diff --git a/.ci/azure/linux_arm64.yml b/.ci/azure/linux_arm64.yml index 6d9b68f1045..8197cf1a8da 100644 --- a/.ci/azure/linux_arm64.yml +++ b/.ci/azure/linux_arm64.yml @@ -26,16 +26,28 @@ jobs: system.debug: true VSTS_HTTP_RETRY: 5 VSTS_HTTP_TIMEOUT: 200 + PYTHON_ARM_VERSION: "3.8.12" + PYTHON_EXEC: "python3.8" + OPENVINO_ARCH: 'aarch64' + NUM_PROC: 1 BUILD_TYPE: Release OPENVINO_REPO_DIR: $(Build.Repository.LocalPath) OPENVINO_CONTRIB_REPO_DIR: $(OPENVINO_REPO_DIR)/../openvino_contrib OPENCV_REPO_DIR: $(OPENVINO_REPO_DIR)/../opencv + BUILD_PYTHON: $(WORK_DIR)/build_python + BUILD_OPENCV: $(WORK_DIR)/build_opencv + BUILD_OPENVINO: $(WORK_DIR)/build + BUILD_OPENVINO_PYTHON: $(WORK_DIR)/build_python + BUILD_OPEN_MODEL_ZOO: $(WORK_DIR)/build_open_model_zoo + INSTALL_OPENVINO: $(WORK_DIR)/install_openvino + INSTALL_PYTHON: $(INSTALL_OPENVINO)/extras/python + INSTALL_OPENCV: $(INSTALL_OPENVINO)/extras/opencv + INSTALL_OPEN_MODEL_ZOO: $(INSTALL_OPENVINO)/extras/open_model_zoo WORK_DIR: $(Pipeline.Workspace)/_w - BUILD_DIR: $(WORK_DIR)/build - BUILD_DIR_OPENCV: $(WORK_DIR)/build_opencv - TMP_DIR: /mnt/tmp SHARE_DIR: /mount/cinfsshare/onnxtestdata - CCACHE_DIR: $(SHARE_DIR)/ccache/master/linux_arm64 + TMP_DIR: /mnt/tmp + OPENVINO_CCACHE_DIR: $(SHARE_DIR)/ccache/master/linux_arm64 + OPENCV_CCACHE_DIR: $(SHARE_DIR)/ccache/master/linux_arm64_opencv steps: - script: | @@ -56,17 +68,21 @@ jobs: df lsblk -o NAME,HCTL,SIZE,MOUNTPOINT | grep -i "sd" free -h + echo "##vso[task.setvariable variable=NUM_PROC]$(nproc --all)" + echo "NUM_PROC=$(NUM_PROC)" displayName: 'System information' - script: | rm -rf $(WORK_DIR) ; mkdir $(WORK_DIR) - rm -rf $(BUILD_DIR) ; mkdir $(BUILD_DIR) + mkdir -p $(BUILD_OPENCV) $(BUILD_OPENVINO) $(BUILD_OPENVINO_PYTHON) $(BUILD_PYTHON) $(BUILD_OPEN_MODEL_ZOO) + mkdir -p $(INSTALL_OPENVINO) $(INSTALL_PYTHON) $(INSTALL_OPENCV) $(INSTALL_OPEN_MODEL_ZOO) sudo rm -rf $(TMP_DIR) ; sudo mkdir $(TMP_DIR) ; sudo chmod 777 -R $(TMP_DIR) sudo mkdir -p $(SHARE_DIR) sudo apt --assume-yes update && sudo apt --assume-yes install nfs-common sudo mount -vvv -t nfs cinfsshare.file.core.windows.net:/cinfsshare/onnxtestdata $(SHARE_DIR) -o vers=4,minorversion=1,sec=sys - mkdir -p $(CCACHE_DIR) - displayName: 'Make directory' + mkdir -p $(OPENVINO_CCACHE_DIR) + mkdir -p $(OPENCV_CCACHE_DIR) + displayName: 'Make directories' - checkout: self clean: true @@ -83,16 +99,25 @@ jobs: - script: | set -e $(OPENVINO_REPO_DIR)/install_build_dependencies.sh - # Move into contrib install_build_dependencies.sh - sudo apt --assume-yes install scons crossbuild-essential-arm64 libprotoc-dev protobuf-compiler - # OpenCV should provide install_build_dependencies.sh as well - # Move into resources - git clone https://github.com/opencv/opencv.git --depth 1 $(OPENCV_REPO_DIR) - # Speed up build - wget https://github.com/ninja-build/ninja/releases/download/v1.10.2/ninja-linux.zip - unzip ninja-linux.zip - sudo cp -v ninja /usr/local/bin/ - workingDirectory: $(WORK_DIR) + export CCACHE_DIR=$(OPENCV_CCACHE_DIR) + export CCACHE_TEMPDIR=$(TMP_DIR)/ccache + export CCACHE_BASEDIR=$(Pipeline.Workspace) + export CCACHE_MAXSIZE=50G + export USE_CCACHE=1 + export PYTHON_ARM_VERSION=$(PYTHON_ARM_VERSION) + export NUM_PROC=$(NUM_PROC) + export BUILD_PYTHON=$(BUILD_PYTHON) + export WORK_DIR=$(WORK_DIR) + export INSTALL_PYTHON=$(INSTALL_PYTHON) + export BUILD_TYPE=$(BUILD_TYPE) + export OPENVINO_REPO_DIR=$(OPENVINO_REPO_DIR) + export INSTALL_OPENCV=$(INSTALL_OPENCV) + export PYTHON_EXEC=$(PYTHON_EXEC) + export OPENCV_REPO_DIR=$(OPENCV_REPO_DIR) + export BUILD_OPENCV=$(BUILD_OPENCV) + export INSTALL_OPENVINO=$(INSTALL_OPENVINO) + $(OPENVINO_CONTRIB_REPO_DIR)/modules/arm_plugin/scripts/install_build_dependencies.sh + workingDirectory: $(BUILD_OPENVINO) displayName: 'Install dependencies' - task: CMake@1 @@ -100,30 +125,21 @@ jobs: cmakeArgs: > -GNinja -DVERBOSE_BUILD=ON + -DOpenCV_DIR=$(INSTALL_OPENCV)/cmake + -DENABLE_OPENCV=OFF + -DPYTHON_INCLUDE_DIRS=$(INSTALL_PYTHON)/include/python3.8 + -DPYTHON_LIBRARY=$(INSTALL_PYTHON)/lib/libpython3.8.so + -DENABLE_PYTHON=ON + -DPYTHON_MODULE_EXTENSION=".so" + -DENABLE_TESTS=ON + -DENABLE_FUNCTIONAL_TESTS=ON + -DENABLE_GAPI_TESTS=OFF + -DENABLE_GAPI_PREPROCESSING=OFF + -DENABLE_DATA=OFF + -DCMAKE_EXE_LINKER_FLAGS=-Wl,-rpath-link,$(INSTALL_OPENCV)/lib + -DTHREADING=SEQ -DENABLE_LTO=ON + -DCMAKE_TOOLCHAIN_FILE=$(OPENVINO_REPO_DIR)/cmake/arm64.toolchain.cmake -DCMAKE_BUILD_TYPE=$(BUILD_TYPE) - -DBUILD_LIST=imgcodecs,videoio,highgui - -DCMAKE_TOOLCHAIN_FILE=$(OPENCV_REPO_DIR)/platforms/linux/aarch64-gnu.toolchain.cmake - $(OPENCV_REPO_DIR) - workingDirectory: $(BUILD_DIR_OPENCV) - - - script: ninja - workingDirectory: $(BUILD_DIR_OPENCV) - displayName: 'Build OpenCV Linux ARM64' - - - script: ninja install - workingDirectory: $(BUILD_DIR_OPENCV) - displayName: 'Install OpenCV Linux ARM64' - - - task: CMake@1 - inputs: - cmakeArgs: > - -GNinja - -DVERBOSE_BUILD=ON - -DCMAKE_BUILD_TYPE=$(BUILD_TYPE) - -DCMAKE_TOOLCHAIN_FILE=$(OPENVINO_REPO_DIR)/cmake/arm64.toolchain.cmake - -DOpenCV_DIR=$(BUILD_DIR_OPENCV)/install/lib/cmake/opencv4 - -DENABLE_OPENCV=OFF - -DENABLE_TESTS=ON -DENABLE_SAMPLES=ON -DBUILD_java_api=OFF -DENABLE_INTEL_MYRIAD=OFF @@ -131,26 +147,102 @@ jobs: -DIE_EXTRA_MODULES=$(OPENVINO_CONTRIB_REPO_DIR)/modules -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_COMPILER_LAUNCHER=ccache + -DARM_COMPUTE_SCONS_JOBS=$(NUM_PROC) + -DOUTPUT_ROOT=$(INSTALL_OPENVINO) + -DCMAKE_INSTALL_PREFIX=$(INSTALL_OPENVINO) $(OPENVINO_REPO_DIR) - workingDirectory: $(BUILD_DIR) - - - script: ls -alR $(OPENVINO_REPO_DIR)/temp/ - displayName: 'List temp SDKs' - - - script: ccache --zero-stats --max-size=50G --show-config - displayName: 'Clean ccache stats' + workingDirectory: $(BUILD_OPENVINO) + displayName: 'CMake OpenVINO ARM plugin' - script: | - export CCACHE_DIR=$(CCACHE_DIR) + export CCACHE_DIR=$(OPENVINO_CCACHE_DIR) export CCACHE_TEMPDIR=$(TMP_DIR)/ccache export CCACHE_BASEDIR=$(Pipeline.Workspace) export CCACHE_MAXSIZE=50G + export USE_CCACHE=1 ninja - workingDirectory: $(BUILD_DIR) - displayName: 'Build Linux ARM64' + workingDirectory: $(BUILD_OPENVINO) + displayName: 'Build OpenVINO ARM plugin' - - script: ccache --show-stats - displayName: 'Show ccache stats' + - script: ninja install + workingDirectory: $(BUILD_OPENVINO) + displayName: 'Install OpenVINO ARM plugin' - - script: ls -alR $(OPENVINO_REPO_DIR)/bin/ - displayName: 'List binary files' + - task: CMake@1 + inputs: + cmakeArgs: > + -GNinja + -DInferenceEngineDeveloperPackage_DIR=$(BUILD_OPENVINO) + -DENABLE_PYTHON=ON + -DPYTHON_EXECUTABLE=$(INSTALL_PYTHON)/bin/python3.8 + -DPYTHON_INCLUDE_DIRS=$(INSTALL_PYTHON)/include/python3.8 + -DPYTHON_LIBRARIES=$(INSTALL_PYTHON)/lib + -DPYTHON3_NUMPY_INCLUDE_DIRS=/usr/local/lib/python3.8/site-packages/numpy/core/include + -DPYTHON_MODULE_EXTENSION=".so" + -DPYBIND11_FINDPYTHON=OFF + -DPYBIND11_NOPYTHON=OFF + -DPYTHONLIBS_FOUND=TRUE + -DCMAKE_BUILD_TYPE=$(BUILD_TYPE) + -DENABLE_DATA=OFF + -DCMAKE_EXE_LINKER_FLAGS=-Wl,-rpath-link,$(INSTALL_OPENCV)/lib + -DCMAKE_TOOLCHAIN_FILE=$(OPENVINO_REPO_DIR)/cmake/arm64.toolchain.cmake + -DCMAKE_CXX_COMPILER_LAUNCHER=ccache + -DCMAKE_C_COMPILER_LAUNCHER=ccache + -DCMAKE_INSTALL_PREFIX=$(INSTALL_OPENVINO) + $(OPENVINO_REPO_DIR)/src/bindings/python + workingDirectory: $(BUILD_OPENVINO_PYTHON) + displayName: 'CMake OpenVINO python binding' + + - script: | + export CCACHE_DIR=$(OPENVINO_CCACHE_DIR) + export CCACHE_TEMPDIR=$(TMP_DIR)/ccache + export CCACHE_BASEDIR=$(Pipeline.Workspace) + export CCACHE_MAXSIZE=50G + export USE_CCACHE=1 + ninja + workingDirectory: $(BUILD_OPENVINO_PYTHON) + displayName: 'Build OpenVINO python binding' + + - script: ninja install + workingDirectory: $(BUILD_OPENVINO_PYTHON) + displayName: 'Install OpenVINO python binding' + + - task: CMake@1 + inputs: + cmakeArgs: > + -GNinja + -DCMAKE_BUILD_TYPE=$(BUILD_TYPE) + -DENABLE_PYTHON=ON + -DPYTHON_EXECUTABLE=/usr/local/bin/python3.8 + -DPYTHON_INCLUDE_DIR=$(INSTALL_PYTHON)/include/python3.8 + -DPYTHON_LIBRARY=$(INSTALL_PYTHON)/lib + -DCMAKE_TOOLCHAIN_FILE=$(OPENVINO_REPO_DIR)/cmake/arm64.toolchain.cmake + -DOpenVINO_DIR=$(BUILD_OPENVINO) + -DInferenceEngine_DIR=$(BUILD_OPENVINO) + -DOpenCV_DIR=$(INSTALL_OPENCV)/cmake + -Dngraph_DIR=$(BUILD_OPENVINO) + -DIE_EXTRA_MODULES=$(OPENVINO_CONTRIB_REPO_DIR)/modules + -DCMAKE_INSTALL_PREFIX=$(INSTALL_OPEN_MODEL_ZOO) + $(OPENVINO_REPO_DIR)/thirdparty/open_model_zoo/demos + workingDirectory: $(BUILD_OPEN_MODEL_ZOO) + displayName: 'CMake Open Model Zoo demos' + + - script: ninja + workingDirectory: $(BUILD_OPEN_MODEL_ZOO) + displayName: 'Build Open Model Zoo demos' + + - script: ninja install + workingDirectory: $(BUILD_OPEN_MODEL_ZOO) + displayName: 'Install Open Model Zoo demos' + + - script: | + cp -r $(BUILD_OPEN_MODEL_ZOO)/$(OPENVINO_ARCH)/$(BUILD_TYPE)/* $(INSTALL_OPEN_MODEL_ZOO)/ + zip -9 -r $(Build.ArtifactStagingDirectory)/openvino_$(OPENVINO_ARCH)_linux.zip ./* + workingDirectory: $(INSTALL_OPENVINO) + displayName: 'Create OpenVINO ARM64 linux package' + + - task: PublishBuildArtifacts@1 + inputs: + pathToPublish: $(Build.ArtifactStagingDirectory) + artifactName: 'openvino_aarch64_linux' + displayName: 'Publish OpenVINO AArch64 linux package' diff --git a/.ci/azure/windows.yml b/.ci/azure/windows.yml index e30d7683299..70069298a7d 100644 --- a/.ci/azure/windows.yml +++ b/.ci/azure/windows.yml @@ -30,7 +30,7 @@ jobs: maxParallel: 2 # About 150% of total time - timeoutInMinutes: 150 + timeoutInMinutes: 180 pool: name: WIN_VMSS_VENV_D8S_WU2 @@ -133,7 +133,7 @@ jobs: - script: | set PATH=$(WORK_DIR)\ninja-win;%PATH% - call "$(MSVS_VARS_PATH)" && $(CMAKE_CMD) -G "Ninja Multi-Config" -DENABLE_WHEEL=ON -DENABLE_ONEDNN_FOR_GPU=$(CMAKE_BUILD_SHARED_LIBS) -DENABLE_GAPI_PREPROCESSING=$(CMAKE_BUILD_SHARED_LIBS) -DBUILD_SHARED_LIBS=$(CMAKE_BUILD_SHARED_LIBS) -DENABLE_REQUIREMENTS_INSTALL=OFF -DENABLE_FASTER_BUILD=ON -DCMAKE_BUILD_TYPE=$(BUILD_TYPE) -DENABLE_TESTS=ON -DENABLE_STRICT_DEPENDENCIES=OFF -DENABLE_PYTHON=ON -DPYTHON_EXECUTABLE="C:\hostedtoolcache\windows\Python\3.7.6\x64\python.exe" -DPYTHON_INCLUDE_DIR="C:\hostedtoolcache\windows\Python\3.7.6\x64\include" -DPYTHON_LIBRARY="C:\hostedtoolcache\windows\Python\3.7.6\x64\libs\python37.lib" -DIE_EXTRA_MODULES=$(OPENVINO_CONTRIB_REPO_DIR)\modules -DCMAKE_C_COMPILER:PATH="$(MSVC_COMPILER_PATH)" -DCMAKE_CXX_COMPILER:PATH="$(MSVC_COMPILER_PATH)" $(REPO_DIR) + call "$(MSVS_VARS_PATH)" && $(CMAKE_CMD) -G "Ninja Multi-Config" -DENABLE_WHEEL=ON -DENABLE_ONEDNN_FOR_GPU=$(CMAKE_BUILD_SHARED_LIBS) -DBUILD_SHARED_LIBS=$(CMAKE_BUILD_SHARED_LIBS) -DENABLE_REQUIREMENTS_INSTALL=OFF -DENABLE_FASTER_BUILD=ON -DCMAKE_BUILD_TYPE=$(BUILD_TYPE) -DENABLE_TESTS=ON -DENABLE_STRICT_DEPENDENCIES=OFF -DENABLE_PYTHON=ON -DPYTHON_EXECUTABLE="C:\hostedtoolcache\windows\Python\3.7.6\x64\python.exe" -DPYTHON_INCLUDE_DIR="C:\hostedtoolcache\windows\Python\3.7.6\x64\include" -DPYTHON_LIBRARY="C:\hostedtoolcache\windows\Python\3.7.6\x64\libs\python37.lib" -DIE_EXTRA_MODULES=$(OPENVINO_CONTRIB_REPO_DIR)\modules -DCMAKE_C_COMPILER:PATH="$(MSVC_COMPILER_PATH)" -DCMAKE_CXX_COMPILER:PATH="$(MSVC_COMPILER_PATH)" $(REPO_DIR) workingDirectory: $(BUILD_DIR) displayName: 'CMake' @@ -167,13 +167,6 @@ jobs: workingDirectory: $(BUILD_SAMPLES_TESTS_DIR) displayName: 'Install Samples Tests' - - script: $(CMAKE_CMD) -DCMAKE_INSTALL_PREFIX=$(INSTALL_DIR) -DCOMPONENT=tests -P cmake_install.cmake && xcopy $(REPO_DIR)\temp\opencv_4.5.2\opencv\* $(INSTALL_DIR)\opencv\ /e /h /y - workingDirectory: $(BUILD_DIR) - displayName: 'Install tests' - - - script: dir $(INSTALL_DIR) /s - displayName: 'List install files' - - script: $(INSTALL_DIR)\samples\cpp\build_samples_msvc.bat -i $(INSTALL_DIR) workingDirectory: $(BUILD_SAMPLES_DIR) displayName: 'Build cpp samples' @@ -198,9 +191,15 @@ jobs: python -m pytest $(INSTALL_DIR)\tests\smoke_tests\ --env_conf $(INSTALL_DIR)\tests\smoke_tests\env_config.yml -s --junitxml=TEST-SamplesSmokeTests.xml workingDirectory: $(INSTALL_DIR) displayName: 'Samples Smoke Tests' - condition: eq(variables['CMAKE_BUILD_SHARED_LIBS'], 'ON') continueOnError: false + - script: $(CMAKE_CMD) -DCMAKE_INSTALL_PREFIX=$(INSTALL_DIR) -DCOMPONENT=tests -P cmake_install.cmake && xcopy $(REPO_DIR)\temp\opencv_4.5.2\opencv\* $(INSTALL_DIR)\opencv\ /e /h /y + workingDirectory: $(BUILD_DIR) + displayName: 'Install tests' + + - script: dir $(INSTALL_DIR) /s + displayName: 'List install files' + - script: rd /Q /S $(BUILD_DIR) displayName: 'Clean build dir' continueOnError: false diff --git a/.github/github_org_control/config.json b/.github/github_org_control/config.json index 66c7de60085..a0ddd443d9a 100644 --- a/.github/github_org_control/config.json +++ b/.github/github_org_control/config.json @@ -5,11 +5,10 @@ "IGNORE_LOGINS": [ "openvino-ci", "openvino-pushbot", - "lab-nerval", - "lab-nerval-onnx-ci", - "onnx-watchdog-agent", "workbench-ci-bot", - "openvino-pot-ci" + "openvino-pot-ci", + "sysicvvpux", + "ote-ci-bot" ], "MAX_MEMBERS_TO_REMOVE": 15, "EMAILS_FILE_PATH": "dev_emails-test.txt", @@ -28,7 +27,7 @@ "openvino-ie-gna-maintainers": "category: GNA", "openvino-ie-gpu-maintainers": "category: GPU", "openvino-ie-lpt-maintainers": "category: LP transformations", - "openvino-ie-multi-maintainers": "category: MULTI", + "openvino-ie-auto-multi-maintainers": "category: MULTI", "openvino-ie-python-api-maintainers": "category: python api", "openvino-ie-template-maintainers": "category: TEMPLATE", "openvino-ie-tests-maintainers": "category: IE Tests", diff --git a/.github/github_org_control/github_api.py b/.github/github_org_control/github_api.py index bc90f19d547..6f9d14c5376 100644 --- a/.github/github_org_control/github_api.py +++ b/.github/github_org_control/github_api.py @@ -157,7 +157,7 @@ class GithubOrgApi: self.github_users_by_email[email] = org_member if not is_valid_name(org_member.name): self.members_to_fix_name.add(org_member) - elif not is_user_ignored(org_member): + else: self.members_to_remove.add(org_member) print("\nOrg members - no Intel emails:") diff --git a/CMakeLists.txt b/CMakeLists.txt index 8a3ae0134db..fb9dc63925e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -13,6 +13,10 @@ endif() project(OpenVINO DESCRIPTION "OpenVINO toolkit") +if(NOT CMAKE_BUILD_TYPE) + set(CMAKE_BUILD_TYPE "Release" CACHE STRING "CMake build type" FORCE) +endif() + set(IE_MAIN_SOURCE_DIR ${OpenVINO_SOURCE_DIR}/inference-engine) find_package(IEDevScripts REQUIRED @@ -34,7 +38,9 @@ endif() message (STATUS "PROJECT ............................... " ${PROJECT_NAME}) message (STATUS "CMAKE_VERSION ......................... " ${CMAKE_VERSION}) message (STATUS "CMAKE_BINARY_DIR ...................... " ${CMAKE_BINARY_DIR}) +message (STATUS "CMAKE_SOURCE_DIR ...................... " ${CMAKE_SOURCE_DIR}) message (STATUS "OpenVINO_SOURCE_DIR ................... " ${OpenVINO_SOURCE_DIR}) +message (STATUS "OpenVINO_BINARY_DIR ................... " ${OpenVINO_BINARY_DIR}) message (STATUS "CMAKE_GENERATOR ....................... " ${CMAKE_GENERATOR}) message (STATUS "CMAKE_C_COMPILER_ID ................... " ${CMAKE_C_COMPILER_ID}) message (STATUS "CMAKE_CXX_COMPILER_ID ................. " ${CMAKE_CXX_COMPILER_ID}) @@ -42,7 +48,7 @@ message (STATUS "CMAKE_BUILD_TYPE ...................... " ${CMAKE_BUILD_TYPE}) message (STATUS "CMAKE_TOOLCHAIN_FILE .................. " ${CMAKE_TOOLCHAIN_FILE}) # remove file with exported developer targets to force its regeneration -file(REMOVE "${CMAKE_BINARY_DIR}/ngraph/ngraphTargets.cmake") +file(REMOVE "${CMAKE_BINARY_DIR}/ngraphTargets.cmake") file(REMOVE "${CMAKE_BINARY_DIR}/InferenceEngineTargets.cmake") file(REMOVE "${CMAKE_BINARY_DIR}/OpenVINOTargets.cmake") foreach(component IN LISTS openvino_export_components) diff --git a/CODEOWNERS b/CODEOWNERS index e1814bf2bd6..565113ff9bc 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -39,14 +39,19 @@ Jenkinsfile @openvinotoolkit/openvino-admins # IE CPU: /src/plugins/intel_cpu/ @openvinotoolkit/openvino-ie-cpu-maintainers @openvinotoolkit/openvino-ie-cpu-developers -/src/common/low_precision_transformations/ @openvinotoolkit/openvino-ie-cpu-maintainers @openvinotoolkit/openvino-ie-cpu-developers /src/plugins/intel_cpu/thirdparty/mkl-dnn/ @openvinotoolkit/openvino-ie-cpu-maintainers @openvinotoolkit/openvino-ie-cpu-developers +#IE LPT +/src/common/low_precision_transformations/ @openvinotoolkit/openvino-ie-lpt-maintainers + # IE GPU: /src/inference/include/ie/gpu/ @openvinotoolkit/openvino-ie-gpu-maintainers @openvinotoolkit/openvino-ie-gpu-developers /src/inference/include/ie/cldnn/ @openvinotoolkit/openvino-ie-gpu-maintainers @openvinotoolkit/openvino-ie-gpu-developers /src/inference/include/openvino/runtime/intel_gpu/ @openvinotoolkit/openvino-ie-gpu-maintainers @openvinotoolkit/openvino-ie-gpu-developers /src/plugins/intel_gpu/ @openvinotoolkit/openvino-ie-gpu-maintainers @openvinotoolkit/openvino-ie-gpu-developers +/docs/snippets/gpu/ @openvinotoolkit/openvino-ie-gpu-maintainers @openvinotoolkit/openvino-ie-gpu-developers +/docs/OV_Runtime_UG/supported_plugins/GPU.md @openvinotoolkit/openvino-ie-gpu-maintainers @openvinotoolkit/openvino-ie-gpu-developers +/docs/OV_Runtime_UG/supported_plugins/GPU_RemoteTensor_API.md @openvinotoolkit/openvino-ie-gpu-maintainers @openvinotoolkit/openvino-ie-gpu-developers # IE VPU: /src/plugins/intel_myriad @openvinotoolkit/openvino-ie-vpu-maintainers @@ -63,6 +68,9 @@ Jenkinsfile @openvinotoolkit/openvino-admins /src/plugins/intel_gna/ @openvinotoolkit/openvino-ie-gna-maintainers /src/inference/include/ie/gna/ @openvinotoolkit/openvino-ie-gna-maintainers +# IE ARM CPU: +/docs/OV_Runtime_UG/supported_plugins/ARM_CPU.md @openvinotoolkit/openvino_contrib-arm_plugin-maintainers + # IE Auto (MULTI) plugin: /src/plugins/auto/ @openvinotoolkit/openvino-ie-auto-multi-maintainers /src/inference/include/ie/multi-device/ @openvinotoolkit/openvino-ie-auto-multi-maintainers @@ -71,8 +79,8 @@ Jenkinsfile @openvinotoolkit/openvino-admins /src/frontends/paddle/ @openvinotoolkit/openvino-ie-paddle-maintainers # IE Tests: -/src/tests/ @openvinotoolkit/openvino-ie-tests-maintainers -/src/tests_deprecated/ @openvinotoolkit/openvino-ie-tests-maintainers +/src/tests/ @openvinotoolkit/openvino-ie-tests-maintainers @openvinotoolkit/openvino-ie-test-developers +/src/tests_deprecated/ @openvinotoolkit/openvino-ie-tests-maintainers @openvinotoolkit/openvino-ie-test-developers /src/tests/functional/inference_engine/ngraph_reader/ @openvinotoolkit/openvino-ie-tests-maintainers @openvinotoolkit/openvino-ngraph-maintainers /src/tests/functional/inference_engine/transformations/ @openvinotoolkit/openvino-ie-tests-maintainers @openvinotoolkit/openvino-ngraph-maintainers @@ -82,6 +90,6 @@ Jenkinsfile @openvinotoolkit/openvino-admins *.md @openvinotoolkit/openvino-docs-maintainers # Control 3d party dependencies -**/*requirements*.* @openvino-configuration-mgmt -**/setup.py @openvino-configuration-mgmt -/scripts/install_dependencies/ @openvino-configuration-mgmt +**/*requirements*.* @openvinotoolkit/openvino-configuration-mgmt +**/setup.py @openvinotoolkit/openvino-configuration-mgmt +/scripts/install_dependencies/ @openvinotoolkit/openvino-configuration-mgmt diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000000..45a41c374ee --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,68 @@ +# How to contribute to the OpenVINO repository + +We suppose that you are an enthusiastic coder, want to contribute some code. For that purpose OpenVINO project now has a repository on the GitHub, to simplify everybody's life! All the bug fixes, new functionality, new tutorials etc. should be submitted via the GitHub's mechanism of pull requests. + +If you are not familiar with the mechanism - do not worry, it's very simple. Keep reading. + +## Before you start contributing you should + +- Make sure you agree to contribute your code under [OpenVINO (Apache 2.0)](https://github.com/openvinotoolkit/openvino/blob/master/LICENSE) license. +- If you are submitting a new module, you should go into [openvino_contrib](https://github.com/openvinotoolkit/openvino_contrib) repository by default. +- If you are going to fix a bug, check that it's still exists. This can be done by building the latest [releases/2020/3](https://github.com/openvinotoolkit/openvino/tree/releases/2020/3) branch (LTS release) or the latest master branch, and make sure that the error is still reproducible there. We do not fix bugs that only affect older non-LTS releases like 2020.2 for example (more details about [branching strategy](https://github.com/openvinotoolkit/openvino/wiki/Branches)) +- Make sure that nobody beat you into fixing or reporting the issue by doing a search on the [Github OpenVINO issues](https://github.com/openvinotoolkit/openvino/issues) page, and making sure that there isn't someone working on it. In the latter case you might provide support or suggestion in the issue or in the linked pull request. +- If you have a question about the software, then this is **NOT** the right place. You should open up a question at the [OpenVINO forum](https://community.intel.com/t5/Intel-Distribution-of-OpenVINO/bd-p/distribution-openvino-toolkit). In order to post a decent question from the start, feel free to read the official forum guidelines. + +Before you open up anything on the OpenVINO GitHub page, be sure that you are at the right place with your problem. + +## "Fork & Pull Request model" for code contribution + +### [](https://github.com/openvinotoolkit/openvino/wiki/Contribute#the-instruction-in-brief)The instruction in brief + +- Register at GitHub. Create your fork of OpenVINO repository [https://github.com/openvinotoolkit/openvino](https://github.com/openvinotoolkit/openvino) (see [https://help.github.com/articles/fork-a-repo](https://help.github.com/articles/fork-a-repo) for details). +- Install Git. + - Set your user name and email address in a Git configuration according to GitHub account (see [https://git-scm.com/book/en/v2/Getting-Started-First-Time-Git-Setup](https://git-scm.com/book/en/v2/Getting-Started-First-Time-Git-Setup) for details). +- Choose a task for yourself. It could be a bugfix or some new code. +- Choose a base branch for your work. More details about branches and policies are here: [Branches](https://github.com/openvinotoolkit/openvino/wiki/Branches) +- Clone your fork to your computer. +- Create a new branch (with a meaningful name) from the base branch you chose. +- Modify / add the code following our [Coding Style Guide](https://github.com/openvinotoolkit/openvino/wiki/CodingStyleGuideLines) and [Documentation guidelines](https://github.com/openvinotoolkit/openvino/wiki/CodingStyleGuideLinesDocumentation). +- If you want to add a new sample, please look at this [Guide for contributing to C++/C/Python IE samples](https://github.com/openvinotoolkit/openvino/wiki/SampleContribute) +- Run testsuite locally: + - execute each test binary from the artifacts directory, e.g. `/bin/intel64/Release/ieFuncTests` +- If you contribute to the documentation and want to add a new guide: + - Create a new markdown file in an appropriate folder. + - **REQUIRED:** The document title must contain a document label in a form: `{#openvino_docs_}`. For example: `Deep Learning Network Intermediate Representation and Operation Sets in OpenVINO™ {#openvino_docs_MO_DG_IR_and_opsets}`. + - Add your file to the documentation structure. Open the documentation structure file [`docs/doxygen/ie_docs.xml`](https://github.com/openvinotoolkit/openvino/blob/master/docs/doxygen/ie_docs.xml) and add your file path to the appropriate section. +- When you are done, make sure that your branch is to date with latest state of the branch you want to contribute to (e.g. `git fetch upstream && git merge upstream/master`), push your branch to your GitHub fork; then create a pull request from your branch to the base branch (see [https://help.github.com/articles/using-pull-requests](https://help.github.com/articles/using-pull-requests) for details). + +## Making a good pull request + +Following these guidelines will increase the likelihood of your pull request being accepted: + +- Before pushing your PR to the repository, make sure that it builds perfectly fine on your local system. +- Add enough information, like a meaningful title, the reason why you made the commit and a link to the issue page if you opened one for this PR. +- Scope your PR to one issue. Before submitting, make sure the diff contains no unrelated changes. If you want to cover more than one issue, submit your changes for each as separate pull requests. +- If you have added new functionality, you should update/create the relevant documentation, as well as add tests for it to the testsuite. +- Try not to include "oops" commits - ones that just fix an error in the previous commit. If you have those, then before submitting [squash](https://github.com/openvinotoolkit/openvino/wiki/Contribute#https://git-scm.com/book/en/v2/Git-Tools-Rewriting-History#Squashing-Commits) those fixes directly into the commits where they belong. +- Make sure to choose the right base branch and to follow the [Coding Style Guide](https://github.com/openvinotoolkit/openvino/wiki/CodingStyleGuideLines) for your code or [Documentation guidelines](https://github.com/openvinotoolkit/openvino/wiki/CodingStyleGuideLinesDocumentation) you are changing documentation files. +- Make sure to add test for new functionality or test that reproduces fixed bug with related test data. Please do not add extra images or videos, if some of existing media files are suitable. + +## Testing and merging pull requests + +- Your pull request will be automatically tested by OpenVINO's precommit (testing status are automatically reported as "green" or "red" circles in precommit steps on PR's page). If any builders have failed, you should fix the issue. To rerun the automatic builds just push changes to your branch on GitHub. No need to close pull request and open a new one! +- Once all the builders are "green", one of OpenVINO developers will review your code. Reviewer could ask you to modify your pull request. Please provide timely response for reviewers (within weeks, not months), otherwise you submission could be postponed or even rejected. + +## PR review good practices + +- Originator is responsible for driving the review of changes and should ping reviewers periodically. +- Originator should close comments from the Reviewer when it is resolved. The Reviewer may re-open the comment if he does not agree with the resolution. +- Originator should request re-review from the Reviewer when all comments are resolved by pushing the button in the “Reviewers” section. +- If it is still WIP and you want to check CI test results early then use _Draft_ PR. +- Do **NOT** rewrite history (push -f) once you converted draft PR into regular one, add new commits instead. Looking at diffs makes review easier. +- Write meaningful description of commits resulting from review. _"Addressing review comments"_ is **NOT** a good description! Having a quick look at good descriptions can tell you much what is going on in PR without a need to go through all of resolved comments. + +## Merging PR + +As soon as the reviewer is fine with the pull request and Precommit likes your code and shows "green" status, the "Approved" review status is put, which signals OpenVINO maintainers that they can merge your pull request. + +© Copyright 2018-2022, OpenVINO team \ No newline at end of file diff --git a/README.md b/README.md index fc47dbe49d4..95431e98dce 100644 --- a/README.md +++ b/README.md @@ -16,12 +16,11 @@ source and public models in popular formats such as TensorFlow, ONNX, PaddlePadd * [OpenVINO™ Runtime] * [Model Optimizer] * [Post-Training Optimization Tool] - +* [Samples] ## License -Deep Learning Deployment Toolkit is licensed under [Apache License Version 2.0](LICENSE). -By contributing to the project, you agree to the license and copyright terms therein -and release your contribution under these terms. +OpenVINO™ Toolkit is licensed under [Apache License Version 2.0](LICENSE). +By contributing to the project, you agree to the license and copyright terms therein and release your contribution under these terms. ## Resources * Docs: https://docs.openvino.ai/ @@ -46,5 +45,6 @@ Please report questions, issues and suggestions using: [OpenVINO™ Runtime]:https://docs.openvino.ai/latest/openvino_docs_OV_Runtime_User_Guide.html [Model Optimizer]:https://docs.openvino.ai/latest/openvino_docs_MO_DG_Deep_Learning_Model_Optimizer_DevGuide.html [Post-Training Optimization Tool]:https://docs.openvino.ai/latest/pot_README.html +[Samples]:https://github.com/openvinotoolkit/openvino/tree/master/samples [tag on StackOverflow]:https://stackoverflow.com/search?q=%23openvino diff --git a/cmake/developer_package/IEDevScriptsConfig.cmake b/cmake/developer_package/IEDevScriptsConfig.cmake index fa430b627e7..2ee352f8b4f 100644 --- a/cmake/developer_package/IEDevScriptsConfig.cmake +++ b/cmake/developer_package/IEDevScriptsConfig.cmake @@ -158,16 +158,22 @@ else () endif() add_definitions(-DIE_BUILD_POSTFIX=\"${IE_BUILD_POSTFIX}\") +macro(ov_set_if_not_defined var value) + if(NOT DEFINED ${var}) + set(${var} ${value}) + endif() +endmacro() + if(NOT UNIX) - set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}) - set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}) + ov_set_if_not_defined(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}) + ov_set_if_not_defined(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}) else() - set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}/lib) - set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}/lib) + ov_set_if_not_defined(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}/lib) + ov_set_if_not_defined(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}/lib) endif() -set(CMAKE_COMPILE_PDB_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}) -set(CMAKE_PDB_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}) -set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}) +ov_set_if_not_defined(CMAKE_COMPILE_PDB_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}) +ov_set_if_not_defined(CMAKE_PDB_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}) +ov_set_if_not_defined(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${OUTPUT_ROOT}/${BIN_FOLDER}) if(APPLE) set(CMAKE_MACOSX_RPATH ON) @@ -207,6 +213,10 @@ endif() macro(ov_install_static_lib target comp) if(NOT BUILD_SHARED_LIBS) + get_target_property(target_type ${target} TYPE) + if(${target_type} STREQUAL "STATIC_LIBRARY") + set_target_properties(${target} PROPERTIES EXCLUDE_FROM_ALL FALSE) + endif() install(TARGETS ${target} EXPORT OpenVINOTargets ARCHIVE DESTINATION ${OV_CPACK_ARCHIVEDIR} COMPONENT ${comp} ${ARGN}) endif() diff --git a/cmake/developer_package/api_validator/api_validator.cmake b/cmake/developer_package/api_validator/api_validator.cmake index 6e073b53ad8..33a21696a9e 100644 --- a/cmake/developer_package/api_validator/api_validator.cmake +++ b/cmake/developer_package/api_validator/api_validator.cmake @@ -51,12 +51,6 @@ endfunction() set(VALIDATED_LIBRARIES "" CACHE INTERNAL "") function(_ov_add_api_validator_post_build_step) - if(NOT BUILD_SHARED_LIBS) - # since _ov_add_api_validator_post_build_step - # is currently run only on shared libraries, we have nothing to test - return() - endif() - set(UWP_API_VALIDATOR_APIS "${PROGRAMFILES}/Windows Kits/10/build/universalDDIs/x64/UniversalDDIs.xml") set(UWP_API_VALIDATOR_EXCLUSION "${UWP_SDK_PATH}/BinaryExclusionlist.xml") diff --git a/cmake/developer_package/cpplint/cpplint.py b/cmake/developer_package/cpplint/cpplint.py index efc12ba5c6b..2700bae29f8 100644 --- a/cmake/developer_package/cpplint/cpplint.py +++ b/cmake/developer_package/cpplint/cpplint.py @@ -3592,7 +3592,7 @@ def CheckOperatorSpacing(filename, clean_lines, linenum, error): elif not Match(r'#.*include', line): # Look for < that is not surrounded by spaces. This is only # triggered if both sides are missing spaces, even though - # technically should should flag if at least one side is missing a + # technically should flag if at least one side is missing a # space. This is done to avoid some false positives with shifts. match = Match(r'^(.*[^\s<])<[^\s=<,]', line) if match: diff --git a/cmake/developer_package/download/download_and_extract.cmake b/cmake/developer_package/download/download_and_extract.cmake index a9b71ace064..01662589e2c 100644 --- a/cmake/developer_package/download/download_and_extract.cmake +++ b/cmake/developer_package/download/download_and_extract.cmake @@ -146,8 +146,6 @@ function (DownloadOrExtractInternal URL archive_path unpacked_path folder fattal endfunction(DownloadOrExtractInternal) -file(REMOVE ${CMAKE_BINARY_DIR}/dependencies_64.txt) - function (CheckOrDownloadAndExtract component RELATIVE_URL archive_name unpacked_path result_path folder fattal resultExt use_alternatives sha256 files_to_extract) set (archive_path ${TEMP}/download/${archive_name}) set (status "ON") @@ -164,7 +162,6 @@ function (CheckOrDownloadAndExtract component RELATIVE_URL archive_name unpacked if (${use_alternatives}) set(DEP_INFO "${component}=${URL}") debug_message (STATUS "DEPENDENCY_URL: ${DEP_INFO}") - file(APPEND ${CMAKE_BINARY_DIR}/dependencies_64.txt "${DEP_INFO}\n") endif() debug_message ("checking that unpacked directory exist: ${unpacked_path}") diff --git a/cmake/developer_package/frontends/frontends.cmake b/cmake/developer_package/frontends/frontends.cmake index 0cc09c639c5..ce42c940d47 100644 --- a/cmake/developer_package/frontends/frontends.cmake +++ b/cmake/developer_package/frontends/frontends.cmake @@ -107,6 +107,17 @@ macro(ov_add_frontend) set(FRONTEND_NAMES "${FRONTEND_NAMES}" CACHE INTERNAL "" FORCE) file(GLOB_RECURSE LIBRARY_SRC ${CMAKE_CURRENT_SOURCE_DIR}/src/*.cpp) + if (WIN32) + # Remove linux specific files + file(GLOB_RECURSE LIN_FILES ${CMAKE_CURRENT_SOURCE_DIR}/src/os/lin/*.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/src/os/lin/*.hpp) + list(REMOVE_ITEM LIBRARY_SRC "${LIN_FILES}") + else() + # Remove windows specific files + file(GLOB_RECURSE WIN_FILES ${CMAKE_CURRENT_SOURCE_DIR}/src/os/win/*.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/src/os/win/*.hpp) + list(REMOVE_ITEM LIBRARY_SRC "${WIN_FILES}") + endif() file(GLOB_RECURSE LIBRARY_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/src/*.hpp) file(GLOB_RECURSE LIBRARY_PUBLIC_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/include/*.hpp) diff --git a/cmake/developer_package/ncc_naming_style/ncc_naming_style.cmake b/cmake/developer_package/ncc_naming_style/ncc_naming_style.cmake index 53b14c02c89..8efdf859a42 100644 --- a/cmake/developer_package/ncc_naming_style/ncc_naming_style.cmake +++ b/cmake/developer_package/ncc_naming_style/ncc_naming_style.cmake @@ -107,8 +107,10 @@ function(ov_ncc_naming_style) list(APPEND NCC_STYLE_ADDITIONAL_INCLUDE_DIRECTORIES "${NCC_STYLE_SOURCE_DIRECTORY}") + # without it sources with same name from different directories will map to same .ncc_style target + file(RELATIVE_PATH source_dir_rel ${CMAKE_SOURCE_DIR} ${NCC_STYLE_SOURCE_DIRECTORY}) foreach(source IN LISTS sources) - set(output_file "${ncc_style_bin_dir}/${source}.ncc_style") + set(output_file "${ncc_style_bin_dir}/${source_dir_rel}/${source}.ncc_style") set(full_source_path "${NCC_STYLE_SOURCE_DIRECTORY}/${source}") add_custom_command( diff --git a/cmake/developer_package/plugins/plugins.cmake b/cmake/developer_package/plugins/plugins.cmake index e41cc2b6d7e..8741861d132 100644 --- a/cmake/developer_package/plugins/plugins.cmake +++ b/cmake/developer_package/plugins/plugins.cmake @@ -230,6 +230,10 @@ macro(ie_register_plugins_dynamic) endif() list(GET name 0 device_name) list(GET name 1 name) + # Skip plugins which don't exist in the possible plugins list + if (IE_REGISTER_POSSIBLE_PLUGINS AND NOT name IN_LIST IE_REGISTER_POSSIBLE_PLUGINS) + continue() + endif() # create plugin file set(config_file_name "${CMAKE_BINARY_DIR}/plugins/${device_name}.xml") @@ -358,7 +362,7 @@ function(ie_generate_plugins_hpp) # for some reason dependency on source files does not work # so, we have to use explicit target and make it dependency for inference_engine add_custom_target(_ie_plugins_hpp DEPENDS ${ie_plugins_hpp}) - add_dependencies(inference_engine _ie_plugins_hpp) + add_dependencies(inference_engine_obj _ie_plugins_hpp) # add dependency for object files get_target_property(sources inference_engine_obj SOURCES) diff --git a/cmake/templates/InferenceEngineDeveloperPackageConfig.cmake.in b/cmake/templates/InferenceEngineDeveloperPackageConfig.cmake.in index fb76232cbcd..a74bbe78dbe 100644 --- a/cmake/templates/InferenceEngineDeveloperPackageConfig.cmake.in +++ b/cmake/templates/InferenceEngineDeveloperPackageConfig.cmake.in @@ -45,7 +45,7 @@ find_dependency(InferenceEngine NO_DEFAULT_PATH) find_dependency(ngraph - PATHS "${CMAKE_CURRENT_LIST_DIR}/src/core" + PATHS "${CMAKE_CURRENT_LIST_DIR}" NO_CMAKE_FIND_ROOT_PATH NO_DEFAULT_PATH) diff --git a/cmake/test_model_zoo.cmake b/cmake/test_model_zoo.cmake index 176e1b55220..18442560350 100644 --- a/cmake/test_model_zoo.cmake +++ b/cmake/test_model_zoo.cmake @@ -86,11 +86,6 @@ ov_model_convert("${OpenVINO_SOURCE_DIR}/${rel_path}" "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/test_model_zoo/onnx_import" ie_onnx_import_out_files) -set(rel_path "docs/onnx_custom_op") -ov_model_convert("${OpenVINO_SOURCE_DIR}/${rel_path}" - "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/test_model_zoo/docs/models" - docs_onnx_out_files) - if(ENABLE_TESTS) if(ENABLE_OV_ONNX_FRONTEND AND ENABLE_REQUIREMENTS_INSTALL) find_package(PythonInterp 3 REQUIRED) diff --git a/cmake/toolchains/mt.runtime.win32.toolchain.cmake b/cmake/toolchains/mt.runtime.win32.toolchain.cmake index 0da0ccf1e6e..62619cd5683 100644 --- a/cmake/toolchains/mt.runtime.win32.toolchain.cmake +++ b/cmake/toolchains/mt.runtime.win32.toolchain.cmake @@ -25,7 +25,7 @@ endif() if(use_static_runtime) foreach(lang C CXX) foreach(build_type "" "_DEBUG" "_MINSIZEREL" "_RELEASE" "_RELWITHDEBINFO") - set(flag_var "CMAKE_${lang}_FLAGS${build_type}") + set(flag_var "CMAKE_${lang}_FLAGS${build_type}_INIT") string(REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") endforeach() endforeach() diff --git a/cmake/toolchains/oecore.arm64.toolchain.cmake b/cmake/toolchains/oecore.arm64.toolchain.cmake deleted file mode 100644 index 0d0b63398b7..00000000000 --- a/cmake/toolchains/oecore.arm64.toolchain.cmake +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright (C) 2018-2022 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 -# - -if(DEFINED OECORE_BASE_DIR) - # OECORE_BASE_DIR was passed via CMake command line, nothing to do -elseif(DEFINED ENV{OECORE_BASE_DIR}) - # User sets OECORE_BASE_DIR environment variable - set(OECORE_BASE_DIR $ENV{OECORE_BASE_DIR}) -elseif(DEFINED ENV{OECORE_NATIVE_SYSROOT}) - # OECORE_NATIVE_SYSROOT is a default environment variable for the OECore toolchain - set(OECORE_BASE_DIR "$ENV{OECORE_NATIVE_SYSROOT}/../..") -else() - # Use default value - set(OECORE_BASE_DIR "/usr/local/oecore-x86_64") -endif() - -set(OECORE_TARGET_NAME "aarch64-ese-linux") -set(OECORE_TARGET_SYSROOT "${OECORE_BASE_DIR}/sysroots/${OECORE_TARGET_NAME}") -set(OECORE_HOST_SYSROOT "${OECORE_BASE_DIR}/sysroots/x86_64-esesdk-linux") -set(OECORE_HOST_COMPILER_BIN_DIR "${OECORE_HOST_SYSROOT}/usr/bin/${OECORE_TARGET_NAME}") - -set(CMAKE_SYSTEM_NAME "Linux") -set(CMAKE_SYSTEM_PROCESSOR "aarch64") - -set(CMAKE_SYSROOT "${OECORE_TARGET_SYSROOT}") - -set(CMAKE_C_COMPILER "${OECORE_HOST_COMPILER_BIN_DIR}/aarch64-ese-linux-gcc") -set(CMAKE_CXX_COMPILER "${OECORE_HOST_COMPILER_BIN_DIR}/aarch64-ese-linux-g++") - -set(CMAKE_C_FLAGS_INIT "-mcpu=cortex-a53 -mtune=cortex-a53 --sysroot=${OECORE_TARGET_SYSROOT}") -set(CMAKE_CXX_FLAGS_INIT "-mcpu=cortex-a53 -mtune=cortex-a53 --sysroot=${OECORE_TARGET_SYSROOT}") - -set(CMAKE_EXE_LINKER_FLAGS_INIT "-Wl,-O1 -Wl,--hash-style=gnu -Wl,--as-needed --sysroot=${OECORE_TARGET_SYSROOT}") -set(CMAKE_SHARED_LINKER_FLAGS_INIT "-Wl,-O1 -Wl,--hash-style=gnu -Wl,--as-needed --sysroot=${OECORE_TARGET_SYSROOT}") -set(CMAKE_MODULE_LINKER_FLAGS_INIT "-Wl,-O1 -Wl,--hash-style=gnu -Wl,--as-needed --sysroot=${OECORE_TARGET_SYSROOT}") - -set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) -set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) -set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) -set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) diff --git a/cmake/toolchains/onecoreuap.toolchain.cmake b/cmake/toolchains/onecoreuap.toolchain.cmake index 0accc4ed7da..c97bca379ed 100644 --- a/cmake/toolchains/onecoreuap.toolchain.cmake +++ b/cmake/toolchains/onecoreuap.toolchain.cmake @@ -35,14 +35,14 @@ if(_onecoreuap_arch STREQUAL "x64") # Forcefull make VS search for C++ libraries in these folders prior to other c++ standard libraries localizations. add_link_options("/LIBPATH:\"\$\(VC_LibraryPath_VC_x64_OneCore\)\"") - set(CMAKE_C_STANDARD_LIBRARIES "\$\(UCRTContentRoot\)lib/\$\(TargetUniversalCRTVersion\)/um/\$\(Platform\)/OneCoreUap.lib" CACHE STRING "" FORCE) - set(CMAKE_CXX_STANDARD_LIBRARIES "\$\(UCRTContentRoot\)lib/\$\(TargetUniversalCRTVersion\)/um/\$\(Platform\)/OneCoreUap.lib" CACHE STRING "" FORCE) + set(CMAKE_C_STANDARD_LIBRARIES_INIT "\$\(UCRTContentRoot\)lib/\$\(TargetUniversalCRTVersion\)/um/\$\(Platform\)/OneCoreUap.lib" CACHE STRING "" FORCE) + set(CMAKE_CXX_STANDARD_LIBRARIES_INIT "\$\(UCRTContentRoot\)lib/\$\(TargetUniversalCRTVersion\)/um/\$\(Platform\)/OneCoreUap.lib" CACHE STRING "" FORCE) elseif(_onecoreuap_arch STREQUAL "X86") add_link_options("/LIBPATH:\"\$\(VCInstallDir\)lib/onecore\"") add_link_options("/LIBPATH:\"\$\(VC_LibraryPath_VC_x86_OneCore\)\"") - set(CMAKE_C_STANDARD_LIBRARIES "\$\(UCRTContentRoot\)lib/\$\(TargetUniversalCRTVersion\)/um/x86/OneCoreUap.lib" CACHE STRING "" FORCE) - set(CMAKE_CXX_STANDARD_LIBRARIES "\$\(UCRTContentRoot\)lib/\$\(TargetUniversalCRTVersion\)/um/x86/OneCoreUap.lib" CACHE STRING "" FORCE) + set(CMAKE_C_STANDARD_LIBRARIES_INIT "\$\(UCRTContentRoot\)lib/\$\(TargetUniversalCRTVersion\)/um/x86/OneCoreUap.lib" CACHE STRING "" FORCE) + set(CMAKE_CXX_STANDARD_LIBRARIES_INIT "\$\(UCRTContentRoot\)lib/\$\(TargetUniversalCRTVersion\)/um/x86/OneCoreUap.lib" CACHE STRING "" FORCE) else() message(FATAL_ERROR "Unsupported architecture ${_onecoreuap_arch}. Only X86 or X86_64 are supported") endif() @@ -52,8 +52,8 @@ unset(_onecoreuap_arch) # compile flags set(includes "/I\"\$\(UniversalCRT_IncludePath\)\"") -set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${includes}") -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${includes}") +set(CMAKE_C_FLAGS_INIT "${CMAKE_C_FLAGS_INIT} ${includes}") +set(CMAKE_CXX_FLAGS_INIT "${CMAKE_CXX_FLAGS_INIT} ${includes}") unset(includes) # linker flags @@ -62,9 +62,9 @@ foreach(lib kernel32 user32 advapi32 ole32 mscoree combase) set(linker_flags "/NODEFAULTLIB:${lib}.lib ${linker_flags}") endforeach() -set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${linker_flags}") -set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${linker_flags}") -set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${linker_flags}") +set(CMAKE_SHARED_LINKER_FLAGS_INIT "${CMAKE_SHARED_LINKER_FLAGS_INIT} ${linker_flags}") +set(CMAKE_MODULE_LINKER_FLAGS_INIT "${CMAKE_MODULE_LINKER_FLAGS_INIT} ${linker_flags}") +set(CMAKE_EXE_LINKER_FLAGS_INIT "${CMAKE_EXE_LINKER_FLAGS_INIT} ${linker_flags}") unset(linker_flags) # diff --git a/docs/CMakeLists.txt b/docs/CMakeLists.txt index 3496226c845..b7e2a7971dd 100644 --- a/docs/CMakeLists.txt +++ b/docs/CMakeLists.txt @@ -7,8 +7,6 @@ if(NOT ENABLE_DOCKER) ie_add_compiler_flags(-Wall) endif() - add_subdirectory(snippets) - # Detect OpenVINO find_package(OpenVINO QUIET PATHS "${CMAKE_BINARY_DIR}" @@ -17,9 +15,8 @@ if(NOT ENABLE_DOCKER) set(OpenVINO_DIR ${CMAKE_BINARY_DIR}) endif() - if(ENABLE_OV_ONNX_FRONTEND) - add_subdirectory(onnx_custom_op) - endif() + add_subdirectory(snippets) + add_subdirectory(template_extension) set(all_docs_targets @@ -55,6 +52,7 @@ endif() set(LINKCHECKER_PY "" CACHE FILEPATH "Path to linkchecker.py for documentation check dir.") set(ENABLE_OPENVINO_NOTEBOOKS OFF CACHE BOOL "Build with openvino notebooks") set(OMZ_DOCS_DIR "" CACHE PATH "Path to open_model_zoo documentation dir.") +set(OTE_DOCS_DIR "" CACHE PATH "Path to training_extensions documentation dir.") set(WORKBENCH_DOCS_DIR "" CACHE PATH "Path to workbench documentation dir.") set(OVMS_DOCS_DIR "" CACHE PATH "Path to model server documentation dir.") set(GRAPH_CSV_DIR "" CACHE PATH "Path to the folder containing csv data for rendering graphs.") @@ -168,6 +166,15 @@ function(build_docs) --output_dir=${DOCS_BUILD_DIR}/workbench) endif() + # ote doc files + if(EXISTS "${OTE_DOCS_DIR}") + get_filename_component(WORKBENCH_DOCS_DIR "${OTE_DOCS_DIR}" ABSOLUTE) + + list(APPEND commands COMMAND ${PYTHON_EXECUTABLE} ${DOXY_MD_FILTER} + --input_dir=${OTE_DOCS_DIR} + --output_dir=${DOCS_BUILD_DIR}/ote) + endif() + # ovms doc files if(EXISTS "${OVMS_DOCS_DIR}") get_filename_component(OVMS_DOCS_DIR "${OVMS_DOCS_DIR}" ABSOLUTE) diff --git a/docs/Doxyfile.config b/docs/Doxyfile.config index 1538db98e38..adffa442688 100644 --- a/docs/Doxyfile.config +++ b/docs/Doxyfile.config @@ -719,7 +719,7 @@ SHOW_NAMESPACES = YES # The FILE_VERSION_FILTER tag can be used to specify a program or script that # doxygen should invoke to get the current version for each file (typically from # the version control system). Doxygen will invoke the program by executing (via -# popen()) the command command input-file, where command is the value of the +# popen()) the command input-file, where command is the value of the # FILE_VERSION_FILTER tag, and input-file is the name of an input file provided # by doxygen. Whatever the program writes to standard output is used as the file # version. For an example see the documentation. @@ -843,16 +843,6 @@ INPUT = "@MARKDOWN_INPUT@" \ "@OpenVINO_SOURCE_DIR@/src/common/transformations/include/" \ "@OpenVINO_SOURCE_DIR@/src/common/util/include/" \ "@OpenVINO_SOURCE_DIR@/src/core/include/" \ - "@OpenVINO_SOURCE_DIR@/src/core/include/ngraph/" \ - "@OpenVINO_SOURCE_DIR@/src/core/include/ngraph/descriptor" \ - "@OpenVINO_SOURCE_DIR@/src/core/include/ngraph/op/" \ - "@OpenVINO_SOURCE_DIR@/src/core/include/ngraph/op/util" \ - "@OpenVINO_SOURCE_DIR@/src/core/include/ngraph/opsets/" \ - "@OpenVINO_SOURCE_DIR@/src/core/include/ngraph/pass/" \ - "@OpenVINO_SOURCE_DIR@/src/core/include/ngraph/pattern/" \ - "@OpenVINO_SOURCE_DIR@/src/core/include/ngraph/pattern/op/" \ - "@OpenVINO_SOURCE_DIR@/src/core/include/ngraph/runtime/" \ - "@OpenVINO_SOURCE_DIR@/src/core/include/ngraph/type/" \ "@OpenVINO_SOURCE_DIR@/src/core/include/openvino/" \ "@OpenVINO_SOURCE_DIR@/src/core/include/openvino/core/" \ "@OpenVINO_SOURCE_DIR@/src/core/include/openvino/core/descriptor/" \ @@ -917,7 +907,9 @@ RECURSIVE = YES # Note that relative paths are relative to the directory from which doxygen is # run. -EXCLUDE = +EXCLUDE = "@OpenVINO_SOURCE_DIR@/thirdparty" \ + "@OpenVINO_SOURCE_DIR@/temp" \ + "@OpenVINO_SOURCE_DIR@/bin" # The EXCLUDE_SYMLINKS tag can be used to select whether or not files or # directories that are symbolic links (a Unix file system feature) are excluded @@ -936,7 +928,6 @@ EXCLUDE_SYMLINKS = NO EXCLUDE_PATTERNS = */temp/* \ */bin/* \ */tests/* \ - */openvx/* \ */thirdparty/* \ "@DOXYREST_OUT@" \ "@XML_OUTPUT@" \ @@ -1045,7 +1036,6 @@ EXCLUDE_SYMBOLS = InferenceEngine::details \ EXAMPLE_PATH = "@OpenVINO_SOURCE_DIR@" \ "@OpenVINO_SOURCE_DIR@/docs/HOWTO/" \ "@OpenVINO_SOURCE_DIR@/docs/" \ - "@OpenVINO_SOURCE_DIR@/docs/onnx_custom_op/" \ "@OpenVINO_SOURCE_DIR@/docs/template_extension/" \ "@OpenVINO_SOURCE_DIR@/docs/template_extension/old/" \ "@OpenVINO_SOURCE_DIR@/docs/template_extension/new/" \ diff --git a/docs/OV_Runtime_UG/Extensibility_DG/GPU_Kernel.md b/docs/Extensibility_UG/GPU_Extensibility.md similarity index 81% rename from docs/OV_Runtime_UG/Extensibility_DG/GPU_Kernel.md rename to docs/Extensibility_UG/GPU_Extensibility.md index 3305c00085b..136c0a1a698 100644 --- a/docs/OV_Runtime_UG/Extensibility_DG/GPU_Kernel.md +++ b/docs/Extensibility_UG/GPU_Extensibility.md @@ -1,17 +1,17 @@ -# How to Implement Custom GPU Operations {#openvino_docs_IE_DG_Extensibility_DG_GPU_Kernel} +# How to Implement Custom GPU Operations {#openvino_docs_Extensibility_UG_GPU} -To enable operations not supported by OpenVINO™ out of the box, you need a custom extension for Model Optimizer, a custom nGraph operation set, and a custom kernel for the device you will target. This page describes custom kernel support for the GPU device. +To enable operations not supported by OpenVINO out of the box, you may need an extension for OpenVINO operation set, and a custom kernel for the device you will target. This page describes custom kernel support for the GPU device. The GPU codepath abstracts many details about OpenCL\*. You need to provide the kernel code in OpenCL C and an XML configuration file that connects the kernel and its parameters to the parameters of the operation. There are two options for using the custom operation configuration file: -* Include a section with your kernels into the global automatically-loaded `cldnn_global_custom_kernels/cldnn_global_custom_kernels.xml` file, which is hosted in the `/runtime/bin` folder -* Call the `InferenceEngine::Core::SetConfig()` method from your application with the `InferenceEngine::PluginConfigParams::KEY_CONFIG_FILE` key and the configuration file name as a value before loading the network that uses custom operations to the plugin: +* Include a section with your kernels into the automatically-loaded `/cldnn_global_custom_kernels/cldnn_global_custom_kernels.xml` file. +* Call the `ov::Core::set_property()` method from your application with the `"CONFIG_FILE"` key and the configuration file name as a value before loading the network that uses custom operations to the plugin: -@snippet snippets/GPU_Kernel.cpp part0 +@snippet snippets/gpu/custom_kernels_api.cpp part0 -All Inference Engine samples, except the trivial `hello_classification`, and most Open Model Zoo demos +All OpenVINO samples, except the trivial `hello_classification`, and most Open Model Zoo demos feature a dedicated command-line option `-c` to load custom kernels. For example, to load custom operations for the classification sample, run the command below: ```sh $ ./classification_sample -m /bvlc_alexnet_fp16.xml -i ./validation_set/daily/227x227/apron.bmp -d GPU @@ -47,8 +47,7 @@ Notation | Description ### Kernel Node and Sub-Node Structure -`Kernel` node contains all kernel source code configuration. No kernel -node structure exists. +`Kernel` node contains all kernel source code configuration. **Sub-nodes**: `Source` (1+), `Define` (0+) @@ -134,7 +133,7 @@ queuing an OpenCL program for execution. ## Example Configuration File -The following code sample provides an example configuration file in XML +The following code sample provides an example configuration file in XML format. For information on the configuration file structure, see [Configuration File Format](#config-file-format). ```xml @@ -155,8 +154,7 @@ format. For information on the configuration file structure, see ## Built-In Definitions for Custom Layers The following table includes definitions that are attached before -user sources, where `` is the actual input and output, for -example, `INPUT0` or `OUTPUT0`. +user sources. For an example, see [Example Kernel](#example-kernel). @@ -170,19 +168,20 @@ For an example, see [Example Kernel](#example-kernel). | `_DIMS`| An array of the tensor dimension sizes. Always ordered as `BFYX` | | `_DIMS_SIZE`| The size of the `_DIMS` array.| | `_TYPE`| The datatype of the tensor: `float`, `half`, or `char`| -| `_FORMAT_` | The format of the tensor, BFYX, BYXF, YXFB , FYXB, or ANY. The format is concatenated to the defined name. You can use the tensor format to define codepaths in your code with `#‍ifdef/#‍endif`. | +| `_FORMAT_` | The format of the tensor, BFYX, BYXF, YXFB , FYXB, or ANY. The format is concatenated to the defined name. You can use the tensor format to define codepaths in your code with `#‍ifdef/#‍endif`. | | `_LOWER_PADDING` | An array of padding elements used for the tensor dimensions before they start. Always ordered as BFYX.| -| `_ LOWER_PADDING_SIZE` | The size of the `_LOWER_PADDING` array | +| `_LOWER_PADDING_SIZE` | The size of the `_LOWER_PADDING` array | | `_UPPER_PADDING` | An array of padding elements used for the tensor dimensions after they end. Always ordered as BFYX. | | `_UPPER_PADDING_SIZE` | The size of the `_UPPER_PADDING` array | -| `_PITCHES` | The number of elements between adjacent elements in each dimension. Always ordered as BFYX.| +| `_PITCHES` | The offset (in elements) between adjacent elements in each dimension. Always ordered as BFYX.| | `_PITCHES_SIZE`| The size of the `_PITCHES` array | | `_OFFSET`| The number of elements from the start of the tensor to the first valid element, bypassing the lower padding. | + All `` values are automatically defined for every tensor bound to this operation, such as `INPUT0`, `INPUT1`, and `OUTPUT0`, as shown in the following example: -```sh +```c #define INPUT0_DIMS_SIZE 4 #define INPUT0_DIMS (int []){ 1,96,55,55, } ``` @@ -197,28 +196,25 @@ __kernel void example_relu_kernel( { const uint idx = get_global_id(0); const uint idy = get_global_id(1); - const uint idbf = get_global_id(2);//batches*features, as OpenCL supports 3D nd-ranges only - const uint feature = idbf%OUTPUT0_DIMS[1]; - const uint batch = idbf/OUTPUT0_DIMS[1]; + const uint idbf = get_global_id(2); // batches*features, as OpenCL supports 3D nd-ranges only + const uint feature = idbf % OUTPUT0_DIMS[1]; + const uint batch = idbf / OUTPUT0_DIMS[1]; //notice that pitches are in elements, not in bytes! const uint in_id = batch*INPUT0_PITCHES[0] + feature*INPUT0_PITCHES[1] + idy*INPUT0_PITCHES[2] + idx*INPUT0_PITCHES[3] + INPUT0_OFFSET; const uint out_id = batch*OUTPUT0_PITCHES[0] + feature*OUTPUT0_PITCHES[1] + idy*OUTPUT0_PITCHES[2] + idx*OUTPUT0_PITCHES[3] + OUTPUT0_OFFSET; INPUT0_TYPE value = input0[in_id]; - //neg_slope (which is non-zero for leaky ReLU) is put automatically as #define, refer to the config xml + // neg_slope (which is non-zero for leaky ReLU) is put automatically as #define, refer to the config xml output[out_id] = value < 0 ? value * neg_slope : value; } ``` + > **NOTE**: As described in the previous section, all items like > `INPUT0_TYPE` are actually defined as OpenCL (pre-)compiler inputs by -> the Inference Engine for efficiency reasons. See [Debugging +> the OpenVINO for efficiency reasons. See [Debugging > Tips](#debugging-tips) for information on debugging the results. -> **NOTE**: Several GPU-targeted kernels are also added to the binaries upon compilation of samples -> so that the sample application can easy load them. -> Refer to the `cldnn_global_custom_kernels` folder in the GPU plugin installation directory. - ## Debugging Tips * **Using `printf` in the OpenCL™ Kernels**. diff --git a/docs/Extensibility_UG/Intro.md b/docs/Extensibility_UG/Intro.md new file mode 100644 index 00000000000..f0df72daf47 --- /dev/null +++ b/docs/Extensibility_UG/Intro.md @@ -0,0 +1,120 @@ +# OpenVINO Extensibility Mechanism {#openvino_docs_Extensibility_UG_Intro} + +@sphinxdirective + +.. toctree:: + :maxdepth: 1 + :hidden: + + openvino_docs_Extensibility_UG_add_openvino_ops + openvino_docs_Extensibility_UG_GPU + openvino_docs_MO_DG_prepare_model_customize_model_optimizer_Customize_Model_Optimizer + +@endsphinxdirective + +The Intel® Distribution of OpenVINO™ toolkit supports neural network models trained with various frameworks, including +TensorFlow, PyTorch, ONNX, PaddlePaddle, MXNet, Caffe, and Kaldi. The list of supported operations (layers) is different for +each of the supported frameworks. To see the operations supported by your framework, refer to +[Supported Framework Operations](../MO_DG/prepare_model/Supported_Frameworks_Layers.md). + +Custom operations, that is those not included in the list, are not recognized by OpenVINO™ out-of-the-box. Therefore, creating Intermediate Representation (IR) for a model using them requires additional steps. This guide illustrates the workflow for running inference on topologies featuring custom operations, allowing you to plug in your own implementation for existing or completely new operations. + +If your model contains operations not normally supported by OpenVINO™, the OpenVINO™ Extensibility API lets you add support for those custom operations and use one implementation for Model Optimizer and OpenVINO™ Runtime. + +There are two steps to support inference of a model with custom operation(s): +1. Add support for a [custom operation in the Model Optimizer](../MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md) so +the Model Optimizer can generate the IR with the operation. +2. Create a custom operation in it as described in the [Custom Operation](add_openvino_ops.md). + +## OpenVINO™ Extensions + +OpenVINO™ provides extensions for: + + * [Custom OpenVINO™ Operation](add_openvino_ops.md): + - Enables the creation of unsupported operations + - Enables the use of `ov::Core::read_model` to read models with unsupported operations + - Provides a shape inference mechanism for custom operations + - Provides an evaluate method that allows you to support the operation on CPU or perform constant folding + * [Model Optimizer Extensibility](../MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md): + - Enables support of new operations to generate IR + - Enables support of custom transformations to replace sub-graphs for performance optimization + +> **NOTE**: This documentation is written based on the [Template extension](https://github.com/openvinotoolkit/openvino/tree/master/docs/template_extension/new), which demonstrates extension development details. You can review the complete code, which is fully compilable and up-to-date, to see how it works. + +## Load extensions to OpenVINO™ Runtime + +To load the extensions to the `ov::Core` object, use the `ov::Core::add_extension` method, this method allows to load library with extensions or extensions from the code. + +### Load extensions to core + +Extensions can be loaded from code with `ov::Core::add_extension` method: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_extensions.cpp + :language: cpp + :fragment: add_extension + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_extensions.py + :language: python + :fragment: add_extension + +@endsphinxdirective + +### Create library with extensions + +You need to create extension library in following cases: + - Load extensions to Model Optimizer + - Load extensions to Python application + +If you want to create an extension library, for example in order to load these extensions to the Model Optimizer, you need to do next steps: +Create an entry point for extension library. OpenVINO™ provides an `OPENVINO_CREATE_EXTENSIONS()` macro, which allows to define an entry point to a library with OpenVINO™ Extensions. +This macro should have a vector of all OpenVINO™ Extensions as an argument. + +Based on that, the declaration of an extension class can look as follows: + +@snippet template_extension/new/ov_extension.cpp ov_extension:entry_point + +To configure the build of your extension library, use the following CMake script: + +@snippet template_extension/new/CMakeLists.txt cmake:extension + +This CMake script finds the OpenVINO™ using the `find_package` CMake command. + +To build the extension library, run the commands below: + +```sh +$ cd docs/template_extension/new +$ mkdir build +$ cd build +$ cmake -DOpenVINO_DIR= ../ +$ cmake --build . +``` + +After the build you can use path to your extension library to load your extensions to OpenVINO™ Runtime: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_extensions.cpp + :language: cpp + :fragment: add_extension_lib + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_extensions.py + :language: python + :fragment: add_extension_lib + +@endsphinxdirective + +## See Also + +* [OpenVINO Transformations](./ov_transformations.md) +* [Using Inference Engine Samples](../OV_Runtime_UG/Samples_Overview.md) +* [Hello Shape Infer SSD sample](../../samples/cpp/hello_reshape_ssd/README.md) diff --git a/docs/Extensibility_UG/add_openvino_ops.md b/docs/Extensibility_UG/add_openvino_ops.md new file mode 100644 index 00000000000..7c5ed06f1fd --- /dev/null +++ b/docs/Extensibility_UG/add_openvino_ops.md @@ -0,0 +1,62 @@ +# Custom OpenVINO™ Operations {#openvino_docs_Extensibility_UG_add_openvino_ops} + +OpenVINO™ Extension API allows you to register custom operations to support models with operations which OpenVINO™ does not support out-of-the-box. + +## Operation Class + +To add your custom operation, create a new class that extends `ov::Op`, which is in turn derived from `ov::Node`, the base class for all graph operations in OpenVINO™. To add `ov::Op` please include next file: + +@snippet template_extension/new/identity.hpp op:common_include + +Follow the steps below to add a custom operation: + +1. Add the `OPENVINO_OP` macro which defines a `NodeTypeInfo` object that identifies the type of the operation to the graph users and helps with dynamic type resolution. The type info of an operation currently consists of a string operation identifier and a string for operation version. + +2. Implement default constructor and constructors that optionally take the operation inputs and attributes as parameters. + +3. Override the shape inference method `validate_and_infer_types`. This method is called multiple times during graph manipulations to determine the shapes and element types of the operations outputs. To access the input shapes and input element types, use the `get_input_partial_shape()` and `get_input_element_type()` methods of `ov::Node`. Set the inferred shape and element type of the output using `set_output_type`. + +4. Override the `clone_with_new_inputs` method, which enables graph manipulation routines to create copies of this operation and connect it to different nodes during optimization. + +5. Override the `visit_attributes` method, which enables serialization and deserialization of operation attributes. An `AttributeVisitor` is passed to the method, and the implementation is expected to walk over all the attributes in the op using the type-aware `on_attribute` helper. Helpers are already implemented for standard C++ types like `int64_t`, `float`, `bool`, `vector`, and for existing OpenVINO defined types. + +6. Override `evaluate`, which is an optional method that enables fallback of some devices to this implementation and the application of constant folding if there is a custom operation on the constant branch. If your operation contains `evaluate` method you also need to override the `has_evaluate` method, this method allow to get information about availability of `evaluate` method for the operation. + +7. Add the `OPENVINO_FRAMEWORK_MAP` macro if you want to map custom operation to framework operation with the same name. It is an optional macro which can be used for one to one mapping. In order to use this macro please include frontend specific headers: + @snippet template_extension/new/identity.hpp op:frontend_include + +Based on that, declaration of an operation class can look as follows: + +@snippet template_extension/new/identity.hpp op:header + +### Operation Constructors + +OpenVINO™ operation contains two constructors: +* Default constructor, which enables you to create an operation without attributes +* Constructor that creates and validates an operation with specified inputs and attributes + +@snippet template_extension/new/identity.cpp op:ctor + +### `validate_and_infer_types()` + +`ov::Node::validate_and_infer_types` method validates operation attributes and calculates output shapes using attributes of the operation. + +@snippet template_extension/new/identity.cpp op:validate + +### `clone_with_new_inputs()` + +`ov::Node::clone_with_new_inputs` method creates a copy of the operation with new inputs. + +@snippet template_extension/new/identity.cpp op:copy + +### `visit_attributes()` + +`ov::Node::visit_attributes` method enables you to visit all operation attributes. + +@snippet template_extension/new/identity.cpp op:visit_attributes + +### `evaluate()` and `has_evaluate()` + +`ov::Node::evaluate` method enables you to apply constant folding to an operation. + +@snippet template_extension/new/identity.cpp op:evaluate diff --git a/docs/Extensibility_UG/graph_rewrite_pass.md b/docs/Extensibility_UG/graph_rewrite_pass.md new file mode 100644 index 00000000000..11f178be4e3 --- /dev/null +++ b/docs/Extensibility_UG/graph_rewrite_pass.md @@ -0,0 +1,28 @@ +# OpenVINO Graph Rewrite Pass {#openvino_docs_Extensibility_UG_graph_rewrite_pass} + +`ov::pass::GraphRewrite` serves for running multiple matcher passes on `ov::Model` in a single graph traversal. +Example: + +@snippet src/transformations/template_pattern_transformation.cpp matcher_pass:graph_rewrite + +In addition, GraphRewrite handles nodes that were registered by MatcherPasses during their execution. This nodes will be added to the beginning of the sequence with nodes for pattern matching. + +> **NOTE**: when using `ov::pass::Manager` temporary GraphRewrite is used to execute single MatcherPass. + +GraphRewrite has two algorithms for MatcherPasses execution. First algorithm is straightforward. It applies each MatcherPass in registration order to current node. + +![graph_rewrite_execution] + +But it is not really efficient when you have a lot of registered passes. So first of all GraphRewrite checks that all MatcherPass patterns has type-based root node (it means that type of this node is not hidden into predicate). +And then creates map from registered MatcherPasses. That helps to avoid additional cost of applying each MatcherPass for each node. + +![graph_rewrite_efficient_search] + +> **NOTE**: GraphRewrite execution algorithm cannot be set manually and depends only on root nodes registered inside MatcherPasses. + +## See Also + +* [OpenVINO™ Transformations](./ov_transformations.md) + +[graph_rewrite_execution]: ./img/graph_rewrite_execution.png +[graph_rewrite_efficient_search]: ./img/graph_rewrite_efficient_search.png diff --git a/docs/OV_Runtime_UG/img/graph_rewrite_efficient_search.png b/docs/Extensibility_UG/img/graph_rewrite_efficient_search.png similarity index 100% rename from docs/OV_Runtime_UG/img/graph_rewrite_efficient_search.png rename to docs/Extensibility_UG/img/graph_rewrite_efficient_search.png diff --git a/docs/OV_Runtime_UG/img/graph_rewrite_execution.png b/docs/Extensibility_UG/img/graph_rewrite_execution.png similarity index 100% rename from docs/OV_Runtime_UG/img/graph_rewrite_execution.png rename to docs/Extensibility_UG/img/graph_rewrite_execution.png diff --git a/docs/OV_Runtime_UG/img/ngraph_insert_node.png b/docs/Extensibility_UG/img/ngraph_insert_node.png similarity index 100% rename from docs/OV_Runtime_UG/img/ngraph_insert_node.png rename to docs/Extensibility_UG/img/ngraph_insert_node.png diff --git a/docs/OV_Runtime_UG/img/ngraph_replace_node.png b/docs/Extensibility_UG/img/ngraph_replace_node.png similarity index 100% rename from docs/OV_Runtime_UG/img/ngraph_replace_node.png rename to docs/Extensibility_UG/img/ngraph_replace_node.png diff --git a/docs/OV_Runtime_UG/img/register_new_node.png b/docs/Extensibility_UG/img/register_new_node.png similarity index 100% rename from docs/OV_Runtime_UG/img/register_new_node.png rename to docs/Extensibility_UG/img/register_new_node.png diff --git a/docs/OV_Runtime_UG/img/transformations_structure.png b/docs/Extensibility_UG/img/transformations_structure.png similarity index 100% rename from docs/OV_Runtime_UG/img/transformations_structure.png rename to docs/Extensibility_UG/img/transformations_structure.png diff --git a/docs/Extensibility_UG/matcher_pass.md b/docs/Extensibility_UG/matcher_pass.md new file mode 100644 index 00000000000..f85d0ecaefe --- /dev/null +++ b/docs/Extensibility_UG/matcher_pass.md @@ -0,0 +1,101 @@ +# OpenVINO Matcher Pass {#openvino_docs_Extensibility_UG_matcher_pass} + +`ov::pass::MatcherPass` is used for pattern-based transformations. + +Template for MatcherPass transformation class +@snippet src/transformations/template_pattern_transformation.hpp graph_rewrite:template_transformation_hpp + +@snippet src/transformations/template_pattern_transformation.cpp graph_rewrite:template_transformation_cpp + +To use `ov::pass::MatcherPass`, you need to complete these steps: +1. Create a pattern +2. Implement a callback +3. Register the pattern and Matcher +4. Execute MatcherPass + +So let's go through each of these steps. + +## Create a pattern + +Pattern is a single root `ov::Model`. But the only difference is that you do not need to create a model object, you just need to create and connect opset or special pattern operations. +Then you need to take the last created operation and put it as a root of the pattern. This root node will be used as a root node in pattern matching. +> **NOTE**: Any nodes in a pattern that have no consumers and are not registered as root will not be used in pattern matching. + +@snippet ov_model_snippets.cpp pattern:simple_example + +The `Parameter` operation in the example above has type and shape specified. These attributes are needed only to create Parameter operation class and will not be used in pattern matching. + +For more pattern examples, refer to the [pattern matching](#pattern_matching) section. + +## Implement callback + +Callback is an action applied to every pattern entrance. In general, callback is the lambda function that takes Matcher object with detected subgraph. + +@snippet ov_model_snippets.cpp pattern:callback_example + +The example above shows the callback structure and how Matcher can be used for accessing nodes detected by pattern. +Callback return value is `true` if root node was replaced and another pattern cannot be applied to the same root node; otherwise, it is `false`. +> **NOTE**: It is not recommended to manipulate with nodes that are under root node. This may affect GraphRewrite execution as it is expected that all nodes that come after root node in topological order are valid and can be used in pattern matching. + +MatcherPass also provides functionality that allows reporting of the newly created nodes that can be used in additional pattern matching. +If MatcherPass was registered in `ov::pass::Manager` or `ov::pass::GraphRewrite`, these registered nodes will be added for additional pattern matching. +That means that matcher passes registered in `ov::pass::GraphRewrite` will be applied to these nodes. + +The example below shows how single MatcherPass can fuse sequence of operations using the `register_new_node` method. + +@snippet src/transformations/template_pattern_transformation.cpp matcher_pass:relu_fusion + +> **NOTE**: If you register multiple nodes, please add them in topological order. We do not topologically sort these nodes as it is a time-consuming operation. + +## Register pattern and Matcher + +The last step is to register Matcher and callback inside the MatcherPass pass. To do this, call the `register_matcher` method. +> **NOTE**: Only one matcher can be registered for a single MatcherPass class. + +```cpp +// Register matcher and callback +register_matcher(m, callback); +``` +## Execute MatcherPass + +MatcherPass has multiple ways to be executed: +* Run on a single node - it can be useful if you want to run MatcherPass inside another transformation. +@snippet src/transformations/template_pattern_transformation.cpp matcher_pass:run_on_node +* Run on `ov::Model` using GraphRewrite - this approach gives ability to run MatcherPass on whole `ov::Model`. Moreover, multiple MatcherPass transformation can be registered in a single GraphRewite to be executed in a single graph traversal. +@snippet src/transformations/template_pattern_transformation.cpp matcher_pass:graph_rewrite +* Run on `ov::Model` using `ov::pass::Manager` - this approach helps you to register MatcherPass for execution on `ov::Model` as another transformation types. +@snippet src/transformations/template_pattern_transformation.cpp matcher_pass:manager + +## Pattern Matching + +Sometimes patterns cannot be expressed via regular operations or it is too complicated. +For example, if you want to detect **Convolution->Add** sub-graph without specifying particular input type for Convolution operation or you want to create a pattern where some of operations can have different types. +And for these cases OpenVINO™ provides additional helpers to construct patterns for GraphRewrite transformations. + +There are two main helpers: +1. `ov::pass::pattern::any_input` - helps to express inputs if their types are undefined. +2. `ov::pass::pattern::wrap_type` - helps to express nodes of pattern without specifying node attributes. + +Let's go through the example to have better understanding of how it works: + +> **NOTE**: Node attributes do not participate in pattern matching and are needed only for operations creation. Only operation types participate in pattern matching. + +The example below shows basic usage of `ov::passpattern::any_input`. +Here we construct Multiply pattern with arbitrary first input and Constant as a second input. +Also as Multiply is commutative operation, it does not matter in which order we set inputs (any_input/Constant or Constant/any_input) because both cases will be matched. + +@snippet ov_model_snippets.cpp pattern:label_example + +This example shows how we can construct a pattern when operation has arbitrary number of inputs. + +@snippet ov_model_snippets.cpp pattern:concat_example + +This example shows how to use predicate to construct a pattern. Also it shows how to match pattern manually on given node. + +@snippet ov_model_snippets.cpp pattern:predicate_example + +> **NOTE**: Be careful with manual matching because Matcher object holds matched nodes. To clear a match, use the m->clear_state() method. + +## See Also + +* [OpenVINO™ Transformations](./ov_transformations.md) diff --git a/docs/Extensibility_UG/model_pass.md b/docs/Extensibility_UG/model_pass.md new file mode 100644 index 00000000000..d2add64d3a5 --- /dev/null +++ b/docs/Extensibility_UG/model_pass.md @@ -0,0 +1,17 @@ +# OpenVINO Model Pass {#openvino_docs_Extensibility_UG_model_pass} + +`ov::pass::ModelPass` is used for transformations that take entire `ov::Model` as an input and process it. + +Template for ModelPass transformation class + +@snippet src/transformations/template_model_transformation.hpp model_pass:template_transformation_hpp + +@snippet src/transformations/template_model_transformation.cpp model_pass:template_transformation_cpp + +Using `ov::pass::ModelPass`, you need to override the `run_on_model` method where you will write the transformation code. +Return value is `true` if the original model has changed during transformation (new operation was added, or operations replacement was made, or node attributes were changed); otherwise, it is `false`. +Also `ov::pass::ModelPass` based transformations can be executed via `ov::pass::Manager`. + +## See Also + +* [OpenVINO™ Transformations](./ov_transformations.md) diff --git a/docs/Extensibility_UG/ov_transformations.md b/docs/Extensibility_UG/ov_transformations.md new file mode 100644 index 00000000000..fe44f4ab5cf --- /dev/null +++ b/docs/Extensibility_UG/ov_transformations.md @@ -0,0 +1,173 @@ +# Overview of Transformations API {#openvino_docs_transformations} + +@sphinxdirective + +.. toctree:: + :maxdepth: 1 + :hidden: + + openvino_docs_Extensibility_UG_model_pass + openvino_docs_Extensibility_UG_matcher_pass + openvino_docs_Extensibility_UG_graph_rewrite_pass + +@endsphinxdirective + +OpenVINO Transformation mechanism allows to develop transformation passes to modify `ov::Model`. You can use this mechanism to apply additional optimizations to the original Model or transform unsupported subgraphs and operations to new operations which are supported by the plugin. +This guide contains all necessary information that you need to start implementing OpenVINO™ transformations. + +## Working with Model + +Before the moving to transformation part it is needed to say several words about functions which allow to modify `ov::Model`. +This chapter extends the [model representation guide](../OV_Runtime_UG/model_representation.md) and shows an API that allows us to manipulate with `ov::Model`. + +### Working with node input and output ports + +First of all let's talk about `ov::Node` input/output ports. Each OpenVINO™ operation has input and output ports except cases when operation has `Parameter` or `Constant` type. + +Every port belongs to its node, so using a port we can access parent node, get shape and type for particular input/output, get all consumers in case of output port, and get producer node in case of input port. +With output port we can set inputs for newly created operations. + +Lets look at the code example. + +@snippet ov_model_snippets.cpp ov:ports_example + +### Node replacement + +OpenVINO™ provides two ways for node replacement: via OpenVINO™ helper function and directly via port methods. We are going to review both of them. + +Let's start with OpenVINO™ helper functions. The most popular function is `ov::replace_node(old_node, new_node)`. + +We will review real replacement case where Negative operation is replaced with Multiply. + +![ngraph_replace_node] + +@snippet ov_model_snippets.cpp ov:replace_node + +`ov::replace_node` has a constraint that number of output ports for both of ops must be the same; otherwise, it raises an exception. + + +The alternative way to do the same replacement is the following: + +@snippet ov_model_snippets.cpp ov:manual_replace + +Another transformation example is insertion. + +![ngraph_insert_node] + +@snippet ov_model_snippets.cpp ov:insert_node + +The alternative way to the insert operation is to make a node copy and use `ov::replace_node()`: + +@snippet ov_model_snippets.cpp ov:insert_node_with_copy + +### Node elimination + +Another type of node replacement is its elimination. + +To eliminate operation, OpenVINO™ has special method that considers all limitations related to OpenVINO™ Runtime. + +@snippet ov_model_snippets.cpp ov:eliminate_node + +`ov::replace_output_update_name()` in case of successful replacement it automatically preserves friendly name and runtime info. + +## Transformations types + +OpenVINO™ Runtime has three main transformation types: + +* [Model pass](./model_pass.md) - straightforward way to work with `ov::Model` directly +* [Matcher pass](./matcher_pass.md) - pattern-based transformation approach +* [Graph rewrite pass](./graph_rewrite_pass.md) - container for matcher passes needed for efficient execution + +![transformations_structure] + +## Transformation conditional compilation + +Transformation library has two internal macros to support conditional compilation feature. + +* `MATCHER_SCOPE(region)` - allows to disable the MatcherPass if matcher isn't used. The region name should be unique. This macro creates a local variable `matcher_name` which you should use as a matcher name. +* `RUN_ON_MODEL_SCOPE(region)` - allows to disable run_on_model pass if it isn't used. The region name should be unique. + +## Transformation writing essentials + +When developing a transformation, you need to follow these transformation rules: + +###1. Friendly Names + +Each `ov::Node` has an unique name and a friendly name. In transformations we care only about friendly name because it represents the name from the model. +To avoid losing friendly name when replacing node with other node or subgraph, set the original friendly name to the latest node in replacing subgraph. See the example below. + +@snippet ov_model_snippets.cpp ov:replace_friendly_name + +In more advanced cases, when replaced operation has several outputs and we add additional consumers to its outputs, we make a decision how to set friendly name by arrangement. + +###2. Runtime Info + +Runtime info is a map `std::map` located inside `ov::Node` class. It represents additional attributes in `ov::Node`. +These attributes can be set by users or by plugins and when executing transformation that changes `ov::Model` we need to preserve these attributes as they will not be automatically propagated. +In most cases, transformations have the following types: 1:1 (replace node with another node), 1:N (replace node with a sub-graph), N:1 (fuse sub-graph into a single node), N:M (any other transformation). +Currently, there is no mechanism that automatically detects transformation types, so we need to propagate this runtime information manually. See the examples below. + +@snippet ov_model_snippets.cpp ov:copy_runtime_info + +When transformation has multiple fusions or decompositions, `ov::copy_runtime_info` must be called multiple times for each case. + +**Note**: copy_runtime_info removes rt_info from destination nodes. If you want to keep it, you need to specify them in source nodes like this: copy_runtime_info({a, b, c}, {a, b}) + +###3. Constant Folding + +If your transformation inserts constant sub-graphs that need to be folded, do not forget to use `ov::pass::ConstantFolding()` after your transformation or call constant folding directly for operation. +The example below shows how constant subgraph can be constructed. + +@snippet ov_model_snippets.cpp ov:constant_subgraph + +Manual constant folding is more preferable than `ov::pass::ConstantFolding()` because it is much faster. + +Below you can find an example of manual constant folding: + +@snippet src/transformations/template_pattern_transformation.cpp manual_constant_folding + +## Common mistakes in transformations + +In transformation development process: + +* Do not use deprecated OpenVINO™ API. Deprecated methods has the `OPENVINO_DEPRECATED` macros in its definition. +* Do not pass `shared_ptr` as an input for other node if type of node is unknown or it has multiple outputs. Use explicit output port. +* If you replace node with another node that produces different shape, remember that new shape will not be propagated until the first `validate_nodes_and_infer_types` call for `ov::Model`. If you are using `ov::pass::Manager`, it will automatically call this method after each transformation execution. +* Do not forget to call the `ov::pass::ConstantFolding` pass if your transformation creates constant subgraphs. +* Use latest OpSet if you are not developing downgrade transformation pass. +* When developing a callback for `ov::pass::MatcherPass`, do not change nodes that come after the root node in topological order. + +## Using pass manager + +`ov::pass::Manager` is a container class that can store the list of transformations and execute them. The main idea of this class is to have high-level representation for grouped list of transformations. +It can register and apply any [transformation pass](#transformations_types) on model. +In addition, `ov::pass::Manager` has extended debug capabilities (find more information in the [how to debug transformations](#how_to_debug_transformations) section). + +The example below shows basic usage of `ov::pass::Manager` + +@snippet src/transformations/template_pattern_transformation.cpp matcher_pass:manager3 + +Another example shows how multiple matcher passes can be united into single GraphRewrite. + +@snippet src/transformations/template_pattern_transformation.cpp matcher_pass:manager2 + +## How to debug transformations + +If you are using `ngraph::pass::Manager` to run sequence of transformations, you can get additional debug capabilities by using the following environment variables: + +``` +OV_PROFILE_PASS_ENABLE=1 - enables performance measurement for each transformation and prints execution status +OV_ENABLE_VISUALIZE_TRACING=1 - enables visualization after each transformation. By default, it saves dot and svg files. +``` + +> **Note**: Make sure that you have dot installed on your machine; otherwise, it will silently save only dot file without svg file. + +## See Also + +* [OpenVINO™ Model Representation](../OV_Runtime_UG/model_representation.md) +* [OpenVINO™ Extensions](./Intro.md) + +[ngraph_replace_node]: ./img/ngraph_replace_node.png +[ngraph_insert_node]: ./img/ngraph_insert_node.png +[transformations_structure]: ./img/transformations_structure.png +[register_new_node]: ./img/register_new_node.png diff --git a/docs/HOWTO/Custom_Layers_Guide.md b/docs/HOWTO/Custom_Layers_Guide.md deleted file mode 100644 index b3dd18f3b14..00000000000 --- a/docs/HOWTO/Custom_Layers_Guide.md +++ /dev/null @@ -1,349 +0,0 @@ -# Custom Operations Guide {#openvino_docs_HOWTO_Custom_Layers_Guide} - -The Intel® Distribution of OpenVINO™ toolkit supports neural network models trained with multiple frameworks including -TensorFlow*, Caffe*, MXNet*, Kaldi* and ONNX* file format. The list of supported operations (layers) is different for -each of the supported frameworks. To see the operations supported by your framework, refer to -[Supported Framework Layers](../MO_DG/prepare_model/Supported_Frameworks_Layers.md). - -Custom operations, that is those not included in the list, are not recognized by Model Optimizer out-of-the-box. Therefore, creating Intermediate Representation (IR) for a model using them requires additional steps. This guide illustrates the workflow for running inference on topologies featuring custom operations, allowing you to plug in your own implementation for existing or completely new operations. - -> **NOTE**: *Layer* is a legacy term for *operation* which came from Caffe\* framework. Currently it is not used. -> Refer to the [Deep Learning Network Intermediate Representation and Operation Sets in OpenVINO™](../MO_DG/IR_and_opsets.md) -> for more information on the topic. - -## Terms Used in This Guide - -- *Intermediate Representation (IR)* — OpenVINO's Neural Network format used by Inference Engine. It abstracts different frameworks and describs model topology, operations parameters, and weights. - -- *Operation* — an abstract concept of a math function selected for a specific purpose. Operations supported by - OpenVINO™ are listed in the supported operation set provided in the [Available Operations Sets](../ops/opset.md). - Examples of the operations are: [ReLU](../ops/activation/ReLU_1.md), [Convolution](../ops/convolution/Convolution_1.md), - [Add](../ops/arithmetic/Add_1.md), etc. - -- *Kernel* — The implementation of an operation function in the OpenVINO™ plugin, in this case, the math programmed (in - C++ and OpenCL) to perform the operation for a target hardware (CPU or GPU). - -- *Inference Engine Extension* — Device-specific module implementing custom operations (a set of kernels). - -## Custom Operation Support Overview - -There are three steps to support inference of a model with custom operation(s): -1. Add support for a custom operation in the [Model Optimizer](../MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) so -the Model Optimizer can generate the IR with the operation. -2. Create an operation set and implement a custom nGraph operation in it as described in the -[Custom nGraph Operation](../OV_Runtime_UG/Extensibility_DG/AddingNGraphOps.md). -3. Implement a customer operation in one of the [OpenVINO™ Runtime](../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md) -plugins to support inference of this operation using a particular target hardware (CPU, GPU or VPU). - -To see the operations that are supported by each device plugin for the Inference Engine, refer to the -[Supported Devices](../OV_Runtime_UG/supported_plugins/Supported_Devices.md). - -> **NOTE**: If a device doesn't support a particular operation, an alternative to creating a new operation is to target -> an additional device using the HETERO plugin. The [Heterogeneous Plugin](../OV_Runtime_UG/supported_plugins/HETERO.md) may be -> used to run an inference model on multiple devices allowing the unsupported operations on one device to "fallback" to -> run on another device (e.g., CPU) that does support those operations. - -### Custom Operation Support for the Model Optimizer - -Model Optimizer model conversion pipeline is described in detail in "Model Conversion Pipeline" section of [Model Optimizer Extensibility](../MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md). It is best to read that article first for a better understanding of the following material. - -Model Optimizer provides an extensions mechanism to support new operations and implement custom model transformations to generate optimized IR. This mechanism is described in the "Model Optimizer Extensions" section of -[Model Optimizer Extensibility](../MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md). - -Two types of Model Optimizer extensions should be implemented to support custom operations, at a minimum: -1. Operation class for a new operation. This class stores information about the operation, its attributes, shape inference function, attributes to be saved to an IR and some others internally used attributes. Refer to the "Model Optimizer Operation" section of [Model Optimizer Extensibility](../MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md) for detailed instructions on how to implement it. -2. Operation attributes extractor. The extractor is responsible for parsing framework-specific representation of the -operation and uses corresponding operation class to update graph node attributes with necessary attributes of the -operation. Refer to the "Operation Extractor" section of -[Model Optimizer Extensibility](../MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md) for detailed instructions on how to implement it. - -> **NOTE**: In some cases you may need to implement some transformation to support the operation. This topic is covered in the "Graph Transformation Extensions" section of [Model Optimizer Extensibility](../MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md). - -## Custom Operations Extensions for the Inference Engine - -Inference Engine provides an extension mechanism to support new operations. This mechanism is described in [Inference Engine Extensibility Mechanism](../OV_Runtime_UG/Extensibility_DG/Intro.md). - -Each device plugin includes a library of optimized implementations to execute known operations which must be extended to execute a custom operation. The custom operation extension is implemented according to the target device: - -- Custom Operation CPU Extension - - A compiled shared library (`.so` or `.dll`) needed by the CPU Plugin for executing the custom operation - on a CPU. Refer to the [How to Implement Custom CPU Operations](../OV_Runtime_UG/Extensibility_DG/CPU_Kernel.md) for more - details. -- Custom Operation GPU Extension - - OpenCL source code (.cl) for the custom operation kernel that will be compiled to execute on the GPU along with an operation description file (.xml) needed by the GPU Plugin for the custom operation kernel. Refer to the [How to Implement Custom GPU Operations](../OV_Runtime_UG/Extensibility_DG/GPU_Kernel.md) for more details. -- Custom Operation VPU Extension - - OpenCL source code (.cl) for the custom operation kernel that will be compiled to execute on the VPU along with an operation description file (.xml) needed by the VPU Plugin for the custom operation kernel. Refer to [How to Implement Custom Operations for VPU](../OV_Runtime_UG/Extensibility_DG/VPU_Kernel.md) for more details. - -Also, it is necessary to implement nGraph custom operation according to [Custom nGraph Operation](../OV_Runtime_UG/Extensibility_DG/AddingNGraphOps.md) so the Inference Engine can read an IR with this -operation and correctly infer output tensor shape and type. - -## Enabling Magnetic Resonance Image Reconstruction Model -This chapter provides step-by-step instructions on how to enable the magnetic resonance image reconstruction model implemented in the [repository](https://github.com/rmsouza01/Hybrid-CS-Model-MRI/) using a custom operation on CPU. The example is prepared for a model generated from the repository with hash `2ede2f96161ce70dcdc922371fe6b6b254aafcc8`. - -### Download and Convert the Model to a Frozen TensorFlow\* Model Format -The original pre-trained model is provided in the hdf5 format which is not supported by OpenVINO directly and needs to be converted to TensorFlow\* frozen model format first. - -1. Download repository `https://github.com/rmsouza01/Hybrid-CS-Model-MRI`:
-```bash - git clone https://github.com/rmsouza01/Hybrid-CS-Model-MRI - git checkout 2ede2f96161ce70dcdc922371fe6b6b254aafcc8 -``` - -2. Convert pre-trained `.hdf5` to a frozen `.pb` graph using the following script (tested with TensorFlow==1.15.0 and -Keras==2.2.4) which should be executed from the root of the cloned repository:
-```py - import keras as K - import numpy as np - import Modules.frequency_spatial_network as fsnet - import tensorflow as tf - - under_rate = '20' - - stats = np.load("Data/stats_fs_unet_norm_" + under_rate + ".npy") - var_sampling_mask = np.load("Data/sampling_mask_" + under_rate + "perc.npy") - - model = fsnet.wnet(stats[0], stats[1], stats[2], stats[3], kshape = (5,5), kshape2=(3,3)) - model_name = "Models/wnet_" + under_rate + ".hdf5" - model.load_weights(model_name) - - inp = np.random.standard_normal([1, 256, 256, 2]).astype(np.float32) - np.save('inp', inp) - - sess = K.backend.get_session() - sess.as_default() - graph_def = sess.graph.as_graph_def() - graph_def = tf.graph_util.convert_variables_to_constants(sess, graph_def, ['conv2d_44/BiasAdd']) - with tf.gfile.FastGFile('wnet_20.pb', 'wb') as f: - f.write(graph_def.SerializeToString()) -``` - -As a result the TensorFlow\* frozen model file "wnet_20.pb" is generated. - -### Convert the Frozen TensorFlow\* Model to Intermediate Representation - -Firstly, open the model in TensorBoard or other TensorFlow* model visualization tool. The model supports dynamic -batch dimension because the value for the batch dimension is not hardcoded in the model. Model Optimizer need to set all -dynamic dimensions to some specific value to create the IR, therefore specify the command line parameter `-b 1` to set -the batch dimension equal to 1. The actual batch size dimension can be changed at runtime using the Inference Engine API -described in the [Using Shape Inference](../OV_Runtime_UG/ShapeInference.md). Also refer to the General Conversion Parameters section in [Converting a Model to Intermediate Representation (IR)](../MO_DG/prepare_model/convert_model/Converting_Model.md) and [Convert Your TensorFlow* Model](../MO_DG/prepare_model/convert_model/Convert_Model_From_TensorFlow.md) -for more details and command line parameters used for the model conversion. - -```sh -mo --input_model /wnet_20.pb -b 1 -``` - -> **NOTE**: This conversion guide is applicable for the 2021.3 release of OpenVINO and that starting from 2021.4 -> the OpenVINO supports this model out of the box. - -Model Optimizer produces the following error: -```bash -[ ERROR ] List of operations that cannot be converted to Inference Engine IR: -[ ERROR ] Complex (1) -[ ERROR ] lambda_2/Complex -[ ERROR ] IFFT2D (1) -[ ERROR ] lambda_2/IFFT2D -[ ERROR ] ComplexAbs (1) -[ ERROR ] lambda_2/Abs -[ ERROR ] Part of the nodes was not converted to IR. Stopped. -``` - -The error means that the Model Optimizer doesn't know how to handle 3 types of TensorFlow\* operations: "Complex", -"IFFT2D" and "ComplexAbs". In order to see more details about the conversion process run the model conversion with -additional parameter `--log_level DEBUG`. It is worth to mention the following lines from the detailed output: - -```bash -[ INFO ] Called "tf_native_tf_node_infer" for node "lambda_2/Complex" -[ ] [ DEBUG ] [ tf:228 ] Added placeholder with name 'lambda_2/lambda_3/strided_slice_port_0_ie_placeholder' -[ ] [ DEBUG ] [ tf:228 ] Added placeholder with name 'lambda_2/lambda_4/strided_slice_port_0_ie_placeholder' -[ ] [ DEBUG ] [ tf:241 ] update_input_in_pbs: replace input 'lambda_2/lambda_3/strided_slice' with input 'lambda_2/lambda_3/strided_slice_port_0_ie_placeholder' -[ ] [ DEBUG ] [ tf:249 ] Replacing input '0' of the node 'lambda_2/Complex' with placeholder 'lambda_2/lambda_3/strided_slice_port_0_ie_placeholder' -[ ] [ DEBUG ] [ tf:241 ] update_input_in_pbs: replace input 'lambda_2/lambda_4/strided_slice' with input 'lambda_2/lambda_4/strided_slice_port_0_ie_placeholder' -[ ] [ DEBUG ] [ tf:249 ] Replacing input '1' of the node 'lambda_2/Complex' with placeholder 'lambda_2/lambda_4/strided_slice_port_0_ie_placeholder' -[ ] [ DEBUG ] [ tf:148 ] Inferred shape of the output tensor with index '0' of the node 'lambda_2/Complex': '[ 1 256 256]' -[ ] [ DEBUG ] [ infer:145 ] Outputs: -[ ] [ DEBUG ] [ infer:32 ] output[0]: shape = [ 1 256 256], value = -[ ] [ DEBUG ] [ infer:129 ] -------------------- -[ ] [ DEBUG ] [ infer:130 ] Partial infer for lambda_2/IFFT2D -[ ] [ DEBUG ] [ infer:131 ] Op: IFFT2D -[ ] [ DEBUG ] [ infer:132 ] Inputs: -[ ] [ DEBUG ] [ infer:32 ] input[0]: shape = [ 1 256 256], value = -``` - -This is a part of the log of the partial inference phase of the model conversion. See the "Partial Inference" section on -the [Model Optimizer Extensibility](../MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md) for -more information about this phase. Model Optimizer inferred output shape for the unknown operation of type "Complex" -using a "fallback" to TensorFlow\*. However, it is not enough to generate the IR because Model Optimizer doesn't know -which attributes of the operation should be saved to IR. So it is necessary to implement Model Optimizer extensions to -support these operations. - -Before going into the extension development it is necessary to understand what these unsupported operations do according -to the TensorFlow\* framework specification. - -* "Complex" - returns a tensor of complex type constructed from two real input tensors specifying real and imaginary -part of a complex number. -* "IFFT2D" - returns a tensor with inverse 2-dimensional discrete Fourier transform over the inner-most 2 dimensions of - an input. -* "ComplexAbs" - returns a tensor with absolute values of input tensor with complex numbers. - -The part of the model with all three unsupported operations is depicted below: - -![Unsupported sub-graph](img/unsupported_subgraph.png) - -This model uses complex numbers during the inference but Inference Engine does not support tensors of this data type. So -it is necessary to find a way how to avoid using tensors of such a type in the model. Fortunately, the complex tensor -appear as a result of "Complex" operation, is used as input in the "IFFT2D" operation then is passed to "ComplexAbs" -which produces real value tensor as output. So there are just 3 operations consuming/producing complex tensors in the -model. - -Let's design an OpenVINO operation "FFT" which get a single real number tensor describing the complex number and -produces a single real number tensor describing output complex tensor. This way the fact that the model uses complex -numbers is hidden inside the "FFT" operation implementation. The operation gets a tensor of shape `[N, H, W, 2]` and -produces the output tensor with the same shape, where the innermost dimension contains pairs of real numbers describing -the complex number (its real and imaginary part). As we will see further this operation will allow us to support the -model. The implementation of the Model Optimizer operation should be saved to `mo_extensions/ops/FFT.py` file: - -@snippet FFT.py fft:operation - -The attribute `inverse` is a flag specifying type of the FFT to apply: forward or inverse. - -See the "Model Optimizer Operation" section of [Model Optimizer Extensibility](../MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md) for detailed instructions on how to implement the operation. - -Now it is necessary to implement extractor for the "IFFT2D" operation according to the -"Operation Extractor" section of [Model Optimizer Extensibility](../MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md). The -following snippet provides two extractors: one for "IFFT2D", another one for "FFT2D", however only on of them is used in this example. The implementation should be saved to the file `mo_extensions/front/tf/FFT_ext.py`. - -@snippet FFT_ext.py fft_ext:extractor - -> **NOTE**: The graph is in inconsistent state after extracting node attributes because according to original operation -> "IFFT2D" semantic it should have an input consuming a tensor of complex numbers, but the extractor instantiated an -> operation "FFT" which expects a real tensor with specific layout. But the inconsistency will be resolved during -> applying front phase transformations discussed below. - -The output shape of the operation "AddV2" from the picture above is `[N, H, W, 2]`. Where the innermost dimension -contains pairs of real numbers describing the complex number (its real and imaginary part). The following "StridedSlice" -operations split the input tensor into 2 parts to get a tensor of real and a tensor of imaginary parts which are then -consumed with the "Complex" operation to produce a tensor of complex numbers. These "StridedSlice" and "Complex" -operations can be removed so the "FFT" operation will get a real value tensor encoding complex numbers. To achieve this -we implement the front phase transformation which searches for a pattern of two "StridedSlice" operations with specific -attributes producing data to "Complex" operation and removes it from the graph. Refer to the -"Pattern-Defined Front Phase Transformations" section of [Model Optimizer Extensibility](../MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md) for more -information on how this type of transformation works. The code snippet should be saved to the file -`mo_extensions/front/tf/Complex.py`. - -@snippet Complex.py complex:transformation - -> **NOTE**: The graph is in inconsistent state because the "ComplexAbs" operation consumes complex value tensor but -> "FFT" produces real value tensor. - -Now lets implement a transformation which replace a "ComplexAbs" operation with a sub-graph of primitive operations -which calculate the result using the following formulae: \f$module(z) = \sqrt{real(z) \cdot real(z) + imag(z) \cdot imag(z)}\f$. -Original "IFFT2D" operation produces tensor of complex values, but the "FFT" operation produces a real value tensor with -the same format and shape as the input for the operation. So the input shape for the "ComplexAbs" will be `[N, H, W, 2]` -with the innermost dimension containing tuple with real and imaginary part of a complex number. In order to calculate -absolute values for the complex tensor we do the following: -1. Raise all elements in the power of 2. -2. Calculate a reduced sum over the innermost dimension. -3. Calculate a square root. - -The implementation should be saved to the file `mo_extensions/front/tf/ComplexAbs.py` and provided below: - -@snippet ComplexAbs.py complex_abs:transformation - -Now it is possible to convert the model using the following command line: -```sh -mo --input_model /wnet_20.pb -b 1 --extensions mo_extensions/ -``` - -The sub-graph corresponding to the originally non-supported one is depicted in the image below: - -![Converted sub-graph](img/converted_subgraph.png) - -> **NOTE**: Model Optimizer performed conversion of the model from NHWC to NCHW layout that is why the dimension with -> the value 2 moved to another position. - -### Inference Engine Extension Implementation -Now it is necessary to implement the extension for the CPU plugin with operation "FFT" introduced previously. The code -below is based on the template extension described in [Inference Engine Extensibility Mechanism](../OV_Runtime_UG/Extensibility_DG/Intro.md). - -#### CMake Build File -The first step is to create a CMake configuration file which builds the extension. The content of the "CMakeLists.txt" -file is the following: - -@snippet template_extension/old/CMakeLists.txt cmake:extension - -The CPU FFT kernel implementation uses OpenCV to perform the FFT that is why the extension library is linked with -`opencv_core` which comes with the OpenVINO. - -#### Custom nGraph Operation "FFT" Implementation -The next step is to create the nGraph operation FFT. The header file "fft_op.hpp" has the following content: - -@snippet template_extension/old/fft_op.hpp fft_op:header - -The operation has just one boolean attribute `inverse`. Implementation of the necessary nGraph operation functions are -in the `fft_op.cpp` file with the following content: - -@snippet template_extension/old/fft_op.cpp fft_op:implementation - -Refer to the [Custom nGraph Operation](../OV_Runtime_UG/Extensibility_DG/AddingNGraphOps.md) for more details. - -#### CPU FFT Kernel Implementation -The operation implementation for CPU plugin uses OpenCV to perform the FFT. The header file "fft_kernel.hpp" has the -following content: - -@snippet template_extension/old/fft_kernel.hpp fft_kernel:header - -The "fft_kernel.cpp" with the implementation of the CPU has the following content: - -@snippet template_extension/old/fft_kernel.cpp fft_kernel:implementation - -Refer to the [How to Implement Custom CPU Operations](../OV_Runtime_UG/Extensibility_DG/CPU_Kernel.md) for more details. - -#### Extension Library Implementation -The last step is to create an extension library "extension.cpp" and "extension.hpp" which will include the FFT -operation for the CPU plugin. The code of the library is described in the [Extension Library](../OV_Runtime_UG/Extensibility_DG/Extension.md). - -### Building and Running the Custom Extension -To build the extension, run the following:
-```bash -mkdir build && cd build -source /opt/intel/openvino_2022/setupvars.sh -cmake .. -DCMAKE_BUILD_TYPE=Release -make --jobs=$(nproc) -``` - -The result of this command is a compiled shared library (`.so` or `.dll`). It should be loaded in the -application using `Core` class instance method `AddExtension` like this -`core.AddExtension(std::make_shared(compiled_library_file_name), "CPU");`. - -To test that the extension is implemented correctly we can run the "mri_reconstruction_demo" with the following content: - -@snippet mri_reconstruction_demo.py mri_demo:demo - -The script can be executed using the following command line: -```bash -python3 mri_reconstruction_demo.py \ - -m /wnet_20.xml \ - -i .npy \ - -p /Data/sampling_mask_20perc.npy \ - -l /libtemplate_extension.so \ - -d CPU -``` - -## Additional Resources - -- Intel® Distribution of OpenVINO™ toolkit home page: [https://software.intel.com/en-us/openvino-toolkit](https://software.intel.com/en-us/openvino-toolkit) -- OpenVINO™ toolkit online documentation: [https://docs.openvino.ai](https://docs.openvino.ai) -- [Model Optimizer Developer Guide](../MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) -- [Model Optimizer Extensibility](../MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md) -- [Inference Engine Extensibility Mechanism](../OV_Runtime_UG/Extensibility_DG/Intro.md) -- [OpenVINO™ Toolkit Samples Overview](../OV_Runtime_UG/Samples_Overview.md) -- [Overview of OpenVINO™ Toolkit Pre-Trained Models](@ref omz_models_group_intel) -- For IoT Libraries and Code Samples see the [Intel® IoT Developer Kit](https://github.com/intel-iot-devkit). - -## Converting Models: - -- [Convert Your Caffe* Model](../MO_DG/prepare_model/convert_model/Convert_Model_From_Caffe.md) -- [Convert Your TensorFlow* Model](../MO_DG/prepare_model/convert_model/Convert_Model_From_TensorFlow.md) -- [Convert Your MXNet* Model](../MO_DG/prepare_model/convert_model/Convert_Model_From_MxNet.md) -- [Convert Your Kaldi* Model](../MO_DG/prepare_model/convert_model/Convert_Model_From_Kaldi.md) -- [Convert Your ONNX* Model](../MO_DG/prepare_model/convert_model/Convert_Model_From_ONNX.md) diff --git a/docs/IE_PLUGIN_DG/AsyncInferRequest.md b/docs/IE_PLUGIN_DG/AsyncInferRequest.md index d9aa2a5d3c0..59c5beadd55 100644 --- a/docs/IE_PLUGIN_DG/AsyncInferRequest.md +++ b/docs/IE_PLUGIN_DG/AsyncInferRequest.md @@ -1,7 +1,7 @@ # Asynchronous Inference Request {#openvino_docs_ie_plugin_dg_async_infer_request} Asynchronous Inference Request runs an inference pipeline asynchronously in one or several task executors depending on a device pipeline structure. -Inference Engine Plugin API provides the base InferenceEngine::AsyncInferRequestThreadSafeDefault class: +OpenVINO Runtime Plugin API provides the base InferenceEngine::AsyncInferRequestThreadSafeDefault class: - The class has the `_pipeline` field of `std::vector >`, which contains pairs of an executor and executed task. - All executors are passed as arguments to a class constructor and they are in the running state and ready to run tasks. @@ -10,7 +10,7 @@ Inference Engine Plugin API provides the base InferenceEngine::AsyncInferRequest `AsyncInferRequest` Class ------------------------ -Inference Engine Plugin API provides the base InferenceEngine::AsyncInferRequestThreadSafeDefault class for a custom asynchronous inference request implementation: +OpenVINO Runtime Plugin API provides the base InferenceEngine::AsyncInferRequestThreadSafeDefault class for a custom asynchronous inference request implementation: @snippet src/template_async_infer_request.hpp async_infer_request:header diff --git a/docs/IE_PLUGIN_DG/Doxyfile b/docs/IE_PLUGIN_DG/Doxyfile index 7d7735b1fbc..84416e0483e 100644 --- a/docs/IE_PLUGIN_DG/Doxyfile +++ b/docs/IE_PLUGIN_DG/Doxyfile @@ -675,7 +675,7 @@ SHOW_NAMESPACES = YES # The FILE_VERSION_FILTER tag can be used to specify a program or script that # doxygen should invoke to get the current version for each file (typically from # the version control system). Doxygen will invoke the program by executing (via -# popen()) the command command input-file, where command is the value of the +# popen()) the command input-file, where command is the value of the # FILE_VERSION_FILTER tag, and input-file is the name of an input file provided # by doxygen. Whatever the program writes to standard output is used as the file # version. For an example see the documentation. diff --git a/docs/IE_PLUGIN_DG/ExecutableNetwork.md b/docs/IE_PLUGIN_DG/ExecutableNetwork.md index 5f703bcd880..0a7f7c7182a 100644 --- a/docs/IE_PLUGIN_DG/ExecutableNetwork.md +++ b/docs/IE_PLUGIN_DG/ExecutableNetwork.md @@ -38,7 +38,7 @@ The implementation `CompileNetwork` is fully device-specific. The function accepts a const shared pointer to `ngraph::Function` object and performs the following steps: 1. Applies ngraph passes using `TransformNetwork` function, which defines plugin-specific conversion pipeline. To support low precision inference, the pipeline can include Low Precision Transformations. These transformations are usually hardware specific. You can find how to use and configure Low Precisions Transformations in [Low Precision Transformations](@ref openvino_docs_IE_DG_lpt) guide. -2. Maps the transformed graph to a backend specific graph representation (for example, to MKLDNN graph for Intel CPU). +2. Maps the transformed graph to a backend specific graph representation (for example, to CPU plugin internal graph representation). 3. Allocates and fills memory for graph weights, backend specific memory handles and so on. @snippet src/template_executable_network.cpp executable_network:map_graph diff --git a/docs/IE_PLUGIN_DG/InferRequest.md b/docs/IE_PLUGIN_DG/InferRequest.md index 5850c3fc625..5d50c8e36fc 100644 --- a/docs/IE_PLUGIN_DG/InferRequest.md +++ b/docs/IE_PLUGIN_DG/InferRequest.md @@ -54,7 +54,7 @@ Decrements a number of created inference requests: #### 1. `inferPreprocess` -Below is the code of the the `inferPreprocess` method to demonstrate Inference Engine common preprocessing step handling: +Below is the code of the `inferPreprocess` method to demonstrate Inference Engine common preprocessing step handling: @snippet src/template_infer_request.cpp infer_request:infer_preprocess diff --git a/docs/IE_PLUGIN_DG/Intro.md b/docs/IE_PLUGIN_DG/Intro.md index 17228b12115..99cbffbec9c 100644 --- a/docs/IE_PLUGIN_DG/Intro.md +++ b/docs/IE_PLUGIN_DG/Intro.md @@ -56,7 +56,7 @@ Detailed guides * Plugin and its components [testing](@ref openvino_docs_ie_plugin_dg_plugin_testing) * [Quantized networks](@ref openvino_docs_ie_plugin_dg_quantized_networks) * [Low precision transformations](@ref openvino_docs_IE_DG_lpt) guide -* [Writing nGraph transformations](@ref ngraph_transformation) guide +* [Writing OpenVINO™ transformations](@ref openvino_docs_transformations) guide API References ----------------------- diff --git a/docs/IE_PLUGIN_DG/Plugin.md b/docs/IE_PLUGIN_DG/Plugin.md index f92a03021a8..a7dc7ecfd2b 100644 --- a/docs/IE_PLUGIN_DG/Plugin.md +++ b/docs/IE_PLUGIN_DG/Plugin.md @@ -2,7 +2,7 @@ Inference Engine Plugin usually represents a wrapper around a backend. Backends can be: - OpenCL-like backend (e.g. clDNN library) for GPU devices. -- MKLDNN backend for Intel CPU devices. +- oneDNN backend for Intel CPU devices. - NVIDIA cuDNN for NVIDIA GPUs. The responsibility of Inference Engine Plugin: @@ -30,7 +30,7 @@ Based on that, declaration of a plugin class can look as follows: The provided plugin class also has several fields: -* `_backend` - a backend engine that is used to perform actual computations for network inference. For `Template` plugin `ngraph::runtime::Backend` is used which performs computations using ngraph reference implementations. +* `_backend` - a backend engine that is used to perform actual computations for network inference. For `Template` plugin `ngraph::runtime::Backend` is used which performs computations using OpenVINO™ reference implementations. * `_waitExecutor` - a task executor that waits for a response from a device about device tasks completion. * `_cfg` of type `Configuration`: @@ -67,7 +67,7 @@ which holds a backend-dependent compiled graph in an internal representation: Before a creation of an `ExecutableNetwork` instance via a constructor, a plugin may check if a provided InferenceEngine::ICNNNetwork object is supported by a device. In the example above, the plugin checks precision information. -The very important part before creation of `ExecutableNetwork` instance is to call `TransformNetwork` method which applies ngraph transformation passes. +The very important part before creation of `ExecutableNetwork` instance is to call `TransformNetwork` method which applies OpenVINO™ transformation passes. Actual graph compilation is done in the `ExecutableNetwork` constructor. Refer to the [ExecutableNetwork Implementation Guide](@ref openvino_docs_ie_plugin_dg_executable_network) for details. @@ -77,27 +77,27 @@ Actual graph compilation is done in the `ExecutableNetwork` constructor. Refer t ### `TransformNetwork()` -The function accepts a const shared pointer to `ngraph::Function` object and performs the following steps: +The function accepts a const shared pointer to `ov::Model` object and performs the following steps: 1. Deep copies a const object to a local object, which can later be modified. -2. Applies common and plugin-specific transformations on a copied graph to make the graph more friendly to hardware operations. For details how to write custom plugin-specific transformation, please, refer to [Writing ngraph transformations](@ref ngraph_transformation) guide. See detailed topics about network representation: +2. Applies common and plugin-specific transformations on a copied graph to make the graph more friendly to hardware operations. For details how to write custom plugin-specific transformation, please, refer to [Writing OpenVINO™ transformations](@ref openvino_docs_transformations) guide. See detailed topics about network representation: * [Intermediate Representation and Operation Sets](../_docs_MO_DG_IR_and_opsets.html) * [Quantized networks](@ref openvino_docs_ie_plugin_dg_quantized_networks). @snippet template_plugin/src/template_plugin.cpp plugin:transform_network -> **NOTE**: After all these transformations, a `ngraph::Function` object contains operations which can be perfectly mapped to backend kernels. E.g. if backend has kernel computing `A + B` operations at once, the `TransformNetwork` function should contain a pass which fuses operations `A` and `B` into a single custom operation `A + B` which fits backend kernels set. +> **NOTE**: After all these transformations, a `ov::Model` object contains operations which can be perfectly mapped to backend kernels. E.g. if backend has kernel computing `A + B` operations at once, the `TransformNetwork` function should contain a pass which fuses operations `A` and `B` into a single custom operation `A + B` which fits backend kernels set. ### `QueryNetwork()` Use the method with the `HETERO` mode, which allows to distribute network execution between different -devices based on the `ngraph::Node::get_rt_info()` map, which can contain the `"affinity"` key. +devices based on the `ov::Node::get_rt_info()` map, which can contain the `"affinity"` key. The `QueryNetwork` method analyzes operations of provided `network` and returns a list of supported -operations via the InferenceEngine::QueryNetworkResult structure. The `QueryNetwork` firstly applies `TransformNetwork` passes to input `ngraph::Function` argument. After this, the transformed network in ideal case contains only operations are 1:1 mapped to kernels in computational backend. In this case, it's very easy to analyze which operations is supposed (`_backend` has a kernel for such operation or extensions for the operation is provided) and not supported (kernel is missed in `_backend`): +operations via the InferenceEngine::QueryNetworkResult structure. The `QueryNetwork` firstly applies `TransformNetwork` passes to input `ov::Model` argument. After this, the transformed network in ideal case contains only operations are 1:1 mapped to kernels in computational backend. In this case, it's very easy to analyze which operations is supposed (`_backend` has a kernel for such operation or extensions for the operation is provided) and not supported (kernel is missed in `_backend`): -1. Store original names of all operations in input `ngraph::Function` +1. Store original names of all operations in input `ov::Model` 2. Apply `TransformNetwork` passes. Note, the names of operations in a transformed network can be different and we need to restore the mapping in the steps below. -3. Construct `supported` and `unsupported` maps which contains names of original operations. Note, that since the inference is performed using ngraph reference backend, the decision whether the operation is supported or not depends on whether the latest OpenVINO opset contains such operation. +3. Construct `supported` and `unsupported` maps which contains names of original operations. Note, that since the inference is performed using OpenVINO™ reference backend, the decision whether the operation is supported or not depends on whether the latest OpenVINO opset contains such operation. 4. `QueryNetworkResult.supportedLayersMap` contains only operations which are fully supported by `_backend`. @snippet template_plugin/src/template_plugin.cpp plugin:query_network diff --git a/docs/IE_PLUGIN_DG/PluginTesting.md b/docs/IE_PLUGIN_DG/PluginTesting.md index a5d75cd21b8..f985ad57771 100644 --- a/docs/IE_PLUGIN_DG/PluginTesting.md +++ b/docs/IE_PLUGIN_DG/PluginTesting.md @@ -26,7 +26,7 @@ Engine concepts: plugin creation, multiple executable networks support, multiple @snippet single_layer_tests/convolution.cpp test_convolution:instantiate 3. **Sub-graph tests** (`subgraph_tests` sub-folder). This group of tests is designed to tests small patterns or combination of layers. E.g. when a particular topology is being enabled in a plugin e.g. TF ResNet-50, there is no need to add the whole topology to test tests. In opposite way, a particular repetitive subgraph or pattern can be extracted from `ResNet-50` and added to the tests. The instantiation of the sub-graph tests is done in the same way as for single layer tests. -> **Note**, such sub-graphs or patterns for sub-graph tests should be added to `IE::ngraphFunctions` library first (this library is a pre-defined set of small `ngraph::Function`) and re-used in sub-graph tests after. +> **Note**, such sub-graphs or patterns for sub-graph tests should be added to `IE::ngraphFunctions` library first (this library is a pre-defined set of small `ov::Model`) and re-used in sub-graph tests after. 4. **HETERO tests** (`subgraph_tests` sub-folder) contains tests for `HETERO` scenario (manual or automatic affinities settings, tests for `QueryNetwork`). @@ -41,18 +41,14 @@ To use these tests for your own plugin development, link the `IE::funcSharedTest To build test binaries together with other build artifacts, use the `make all` command. For details, see [Build Plugin Using CMake*](@ref openvino_docs_ie_plugin_dg_plugin_build). -### Tests for plugin-specific ngraph transformations - -Please, refer to [Transformation testing](@ref ngraph_transformation) guide. - ### How to Extend Inference Engine Plugin Tests Inference Engine Plugin tests are open for contribution. Add common test case definitions applicable for all plugins to the `IE::funcSharedTests` target within the DLDT repository. Then, any other plugin supporting corresponding functionality can instantiate the new test. -All Inference Engine per-layer tests check test layers functionality. They are developed using nGraph functions +All Inference Engine per-layer tests check test layers functionality. They are developed using ov::Model. as input graphs used by tests. In this case, to test a new layer with layer tests, extend -the `IE::ngraphFunctions` library, which is also included in the Inference Engine Developer package, with a new nGraph function +the `IE::ngraphFunctions` library, which is also included in the Inference Engine Developer package, with a new model. including the corresponding operation. > **NOTE**: When implementing a new subgraph test, add new single-layer tests for each operation of the subgraph if such test does not exist. diff --git a/docs/IE_PLUGIN_DG/QuantizedNetworks.md b/docs/IE_PLUGIN_DG/QuantizedNetworks.md index fb7880b66fc..0c8ad29c234 100644 --- a/docs/IE_PLUGIN_DG/QuantizedNetworks.md +++ b/docs/IE_PLUGIN_DG/QuantizedNetworks.md @@ -9,7 +9,7 @@ For more details about low-precision model representation please refer to this [ During the model load each plugin can interpret quantization rules expressed in *FakeQuantize* operations: - Independently based on the definition of *FakeQuantize* operation. - Using a special library of low-precision transformations (LPT) which applies common rules for generic operations, -such as Convolution, Fully-Connected, Eltwise, etc., and translates "fake-quantized" models into the models with low-precision operations. For more information about low-precision flow please refer to the following [document](@ref openvino_docs_IE_DG_Int8Inference). +such as Convolution, Fully-Connected, Eltwise, etc., and translates "fake-quantized" models into the models with low-precision operations. For more information about low-precision flow please refer to the following [document](../OV_Runtime_UG/Int8Inference.md). Here we provide only a high-level overview of the interpretation rules of FakeQuantize. At runtime each FakeQuantize can be split into two independent operations: **Quantize** and **Dequantize**. diff --git a/docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md b/docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md index 731eb073ea7..0fa581a39ab 100644 --- a/docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md +++ b/docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md @@ -1,4 +1,4 @@ -# Model Optimizer Developer Guide {#openvino_docs_MO_DG_Deep_Learning_Model_Optimizer_DevGuide} +# Model Optimizer User Guide {#openvino_docs_MO_DG_Deep_Learning_Model_Optimizer_DevGuide} @sphinxdirective @@ -7,678 +7,128 @@ .. toctree:: :maxdepth: 1 :hidden: - - openvino_docs_MO_DG_IR_and_opsets + openvino_docs_MO_DG_prepare_model_convert_model_Converting_Model + openvino_docs_MO_DG_prepare_model_convert_model_Cutting_Model openvino_docs_MO_DG_Additional_Optimization_Use_Cases - openvino_docs_MO_DG_prepare_model_customize_model_optimizer_Customize_Model_Optimizer + openvino_docs_MO_DG_FP16_Compression + openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_TensorFlow + openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_ONNX + openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_PyTorch + openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_Paddle + openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_MxNet + openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_Caffe + openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_Kaldi + openvino_docs_MO_DG_prepare_model_convert_model_tutorials openvino_docs_MO_DG_prepare_model_Model_Optimizer_FAQ - openvino_docs_MO_DG_Known_Issues_Limitations - openvino_docs_MO_DG_Default_Model_Optimizer_Optimizations @endsphinxdirective -## Introduction +## Introduction -Model Optimizer is a cross-platform command-line tool that facilitates the transition between the training and deployment environment, performs static model analysis, and adjusts deep learning models for optimal execution on end-point target devices. +Model Optimizer is a cross-platform command-line tool that facilitates the transition between training and deployment environments, performs static model analysis, and adjusts deep learning models for optimal execution on end-point target devices. -Model Optimizer process assumes you have a network model trained using supported deep learning frameworks: Caffe*, TensorFlow*, Kaldi*, MXNet* or converted to the ONNX* format. Model Optimizer produces an Intermediate Representation (IR) of the network, which can be inferred with the [OpenVINO™ Runtime](../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md). +Using Model Optimizer tool assumes you already have a deep learning model trained using one of the supported frameworks: TensorFlow, PyTorch, PaddlePaddle, MXNet, Caffe, Kaldi, or represented in ONNX* format. Model Optimizer produces an Intermediate Representation (IR) of the model, which can be inferred with [OpenVINO™ Runtime](../OV_Runtime_UG/openvino_intro.md). -> **NOTE**: Model Optimizer does not infer models. Model Optimizer is an offline tool that runs before the inference takes place. +> **NOTE**: Model Optimizer does not infer models. Model Optimizer is an offline tool that converts a model into IR and optimizes before the inference takes place. -The scheme below illustrates the typical workflow for deploying a trained deep learning model: +The scheme below illustrates the typical workflow for deploying a trained deep learning model: ![](img/BASIC_FLOW_MO_simplified.svg) -The IR is a pair of files describing the model: +The IR is a pair of files describing the model: * .xml - Describes the network topology * .bin - Contains the weights and biases binary data. +> **NOTE**: The generated IR can be additionally optimized for inference by [Post-training Optimization tool](../../tools/pot/README.md) +> that applies post-training quantization methods. + > **TIP**: You also can work with the Model Optimizer inside the OpenVINO™ [Deep Learning Workbench](https://docs.openvino.ai/latest/workbench_docs_Workbench_DG_Introduction.html) (DL Workbench). > [DL Workbench](https://docs.openvino.ai/latest/workbench_docs_Workbench_DG_Introduction.html) is a web-based graphical environment that enables you to optimize, fine-tune, analyze, visualize, and compare performance of deep learning models. -## Install Model Optimizer Pre-Requisites - -Before running the Model Optimizer, you must install the Model Optimizer pre-requisites for the framework that was used to train the model. - -@sphinxdirective -.. tab:: Using configuration scripts - - .. tab:: Linux - - .. tab:: All frameworks - - .. tab:: Install globally - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - ./install_prerequisites.sh - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate # sh, bash, ksh, or zsh - ./install_prerequisites.shs - - .. tab:: Caffe - - .. tab:: Install globally - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisitess - install_prerequisites_caffe.sh - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate # sh, bash, ksh, or zsh - install_prerequisites_caffe.shs - - .. tab:: Tensorflow 1.x - - .. tab:: Install globally - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - install_prerequisites_tf.sh - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate # sh, bash, ksh, or zsh - install_prerequisites_tf.sh - - .. tab:: Tensorflow 2.x - - .. tab:: Install globally - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - install_prerequisites_tf2.sh - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate # sh, bash, ksh, or zsh - install_prerequisites_tf2.sh - - .. tab:: MXNet - - .. tab:: Install globally - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - install_prerequisites_mxnet.sh - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate # sh, bash, ksh, or zsh - install_prerequisites_mxnet.sh - - .. tab:: ONNX - - .. tab:: Install globally - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - install_prerequisites_onnx.sh - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate # sh, bash, ksh, or zsh - install_prerequisites_onnx.sh - - .. tab:: Kaldi - - .. tab:: Install globally - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - install_prerequisites_kaldi.sh - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate # sh, bash, ksh, or zsh - install_prerequisites_kaldi.sh - - .. tab:: Windows - - .. tab:: All frameworks - - .. tab:: Install globally - - .. code-block:: sh - - cd \deployment_tools\model_optimizer\install_prerequisites\ - install_prerequisites.bat - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd \deployment_tools\model_optimizer\install_prerequisites - virtualenv --system-site-packages -p python .\env - env\Scripts\activate.bat - install_prerequisites.bat - - .. tab:: Caffe - - .. tab:: Install globally - - .. code-block:: sh - - cd \deployment_tools\model_optimizer\install_prerequisites\ - install_prerequisites_caffe.bat - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd \deployment_tools\model_optimizer\install_prerequisites - virtualenv --system-site-packages -p python .\env - env\Scripts\activate.bat - install_prerequisites_caffe.bat - - .. tab:: Tensorflow 1.x - - .. tab:: Install globally - - .. code-block:: sh - - cd \deployment_tools\model_optimizer\install_prerequisites\ - install_prerequisites_tf.bat - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd \deployment_tools\model_optimizer\install_prerequisites - virtualenv --system-site-packages -p python .\env - env\Scripts\activate.bat - install_prerequisites_tf.bat - - .. tab:: Tensorflow 2.x - - .. tab:: Install globally - - .. code-block:: sh - - cd \deployment_tools\model_optimizer\install_prerequisites\ - install_prerequisites_tf2.bat - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd \deployment_tools\model_optimizer\install_prerequisites - virtualenv --system-site-packages -p python .\env - env\Scripts\activate.bat - install_prerequisites_tf2.bat - - .. tab:: MXNet - - .. tab:: Install globally - - .. code-block:: sh - - cd \deployment_tools\model_optimizer\install_prerequisites\ - install_prerequisites_mxnet.bat - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd \deployment_tools\model_optimizer\install_prerequisites - virtualenv --system-site-packages -p python .\env - env\Scripts\activate.bat - install_prerequisites_mxnet.bat - - .. tab:: ONNX - - .. tab:: Install globally - - .. code-block:: sh - - cd \deployment_tools\model_optimizer\install_prerequisites\ - install_prerequisites_onnx.bat - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd \deployment_tools\model_optimizer\install_prerequisites - virtualenv --system-site-packages -p python .\env - env\Scripts\activate.bat - install_prerequisites_onnx.bat - - .. tab:: Kaldi - - .. tab:: Install globally - - .. code-block:: sh - - cd \deployment_tools\model_optimizer\install_prerequisites\ - install_prerequisites_kaldi.bat - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd \deployment_tools\model_optimizer\install_prerequisites - virtualenv --system-site-packages -p python .\env - env\Scripts\activate.bat - install_prerequisites_kaldi.bat - - .. tab:: macOS - - .. tab:: All frameworks - - .. tab:: Install globally - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - ./install_prerequisites.sh - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate # sh, bash, ksh, or zsh - ./install_prerequisites.shs - - .. tab:: Caffe - - .. tab:: Install globally - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisitess - install_prerequisites_caffe.sh - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate # sh, bash, ksh, or zsh - install_prerequisites_caffe.shs - - .. tab:: Tensorflow 1.x - - .. tab:: Install globally - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - install_prerequisites_tf.sh - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate # sh, bash, ksh, or zsh - install_prerequisites_tf.sh - - .. tab:: Tensorflow 2.x - - .. tab:: Install globally - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - install_prerequisites_tf2.sh - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate # sh, bash, ksh, or zsh - install_prerequisites_tf2.sh - - .. tab:: MXNet - - .. tab:: Install globally - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - install_prerequisites_mxnet.sh - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate # sh, bash, ksh, or zsh - install_prerequisites_mxnet.sh - - .. tab:: ONNX - - .. tab:: Install globally - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - install_prerequisites_onnx.sh - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate # sh, bash, ksh, or zsh - install_prerequisites_onnx.sh - - .. tab:: Kaldi - - .. tab:: Install globally - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - install_prerequisites_kaldi.sh - - .. tab:: Install to virtualenv - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/install_prerequisites - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate # sh, bash, ksh, or zsh - install_prerequisites_kaldi.sh - -.. tab:: Using manual configuration process - - .. tab:: Linux - - .. tab:: All frameworks - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/ - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate - pip3 install -r requirements.txt - - .. tab:: Caffe - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/ - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate - pip3 install -r requirements_caffe.txt - - .. tab:: Tensorflow 1.x - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/ - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate - pip3 install -r requirements_tf.txt - - .. tab:: Tensorflow 2.x - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/ - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate - pip3 install -r requirements_tf2.txt - - .. tab:: MXNet - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/ - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate - pip3 install -r requirements_mxnet.txt - - .. tab:: ONNX - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/ - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate - pip3 install -r requirements_onnx.txt - - .. tab:: Kaldi - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/ - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate - pip3 install -r requirements_kaldi.txt - - .. tab:: Windows - - .. tab:: All frameworks - - .. code-block:: sh - - cd \deployment_tools\model_optimizer - virtualenv --system-site-packages -p python .\env - env\Scripts\activate.bat - pip install -r requirements.txt - - .. tab:: Caffe - - .. code-block:: sh - - cd \deployment_tools\model_optimizer - virtualenv --system-site-packages -p python .\env - env\Scripts\activate.bat - pip install -r requirements_caffe.txt - - .. tab:: Tensorflow 1.x - - .. code-block:: sh - - cd \deployment_tools\model_optimizer - virtualenv --system-site-packages -p python .\env - env\Scripts\activate.bat - pip install -r requirements_tf.txt - - .. tab:: Tensorflow 2.x - - .. code-block:: sh - - cd \deployment_tools\model_optimizer - virtualenv --system-site-packages -p python .\env - env\Scripts\activate.bat - pip install -r requirements_tf2.txt - - .. tab:: MXNet - - .. code-block:: sh - - cd \deployment_tools\model_optimizer - virtualenv --system-site-packages -p python .\env - env\Scripts\activate.bat - pip install -r requirements_mxnet.txt - - .. tab:: ONNX - - .. code-block:: sh - - cd \deployment_tools\model_optimizer - virtualenv --system-site-packages -p python .\env - env\Scripts\activate.bat - pip install -r requirements_onnx.txt - - .. tab:: Kaldi - - .. code-block:: sh - - cd \deployment_tools\model_optimizer - virtualenv --system-site-packages -p python .\env - env\Scripts\activate.bat - pip install -r requirements_kaldi.txt - - .. tab:: macOS - - .. tab:: All frameworks - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/ - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate - pip3 install -r requirements.txt - - .. tab:: Caffe - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/ - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate - pip3 install -r requirements_caffe.txt - - .. tab:: Tensorflow 1.x - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/ - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate - pip3 install -r requirements_tf.txt - - .. tab:: Tensorflow 2.x - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/ - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate - pip3 install -r requirements_tf2.txt - - .. tab:: MXNet - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/ - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate - pip3 install -r requirements_mxnet.txt - - .. tab:: ONNX - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/ - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate - pip3 install -r requirements_onnx.txt - - .. tab:: Kaldi - - .. code-block:: sh - - cd /deployment_tools/model_optimizer/ - virtualenv --system-site-packages -p python3 ./venv - source ./venv/bin/activate - pip3 install -r requirements_kaldi.txt - -@endsphinxdirective - ## Run Model Optimizer -To convert the model to the Intermediate Representation (IR), run Model Optimizer: +To convert the model to IR, run Model Optimizer: ```sh -mo --input_model INPUT_MODEL --output_dir +mo --input_model INPUT_MODEL ``` -You need to have have write permissions for an output directory. +If out-of-the-box conversion (only the `--input_model` parameter is specified) is not succeed, +try to use parameters for overriding input shapes and cutting the model, mentioned below. -> **NOTE**: Some models require using additional arguments to specify conversion parameters, such as `--input_shape`, `--scale`, `--scale_values`, `--mean_values`, `--mean_file`. To learn about when you need to use these parameters, refer to [Converting a Model to Intermediate Representation (IR)](prepare_model/convert_model/Converting_Model.md). +To override original input shapes for model conversion, Model Optimizer provides two parameters: `--input` and `--input_shape`. +For more information about these parameters, refer to [Setting Input Shapes](prepare_model/convert_model/Converting_Model.md). -To adjust the conversion process, you may use general parameters defined in the [Converting a Model to Intermediate Representation (IR)](prepare_model/convert_model/Converting_Model.md) and -framework-specific parameters for: -* [Caffe](prepare_model/convert_model/Convert_Model_From_Caffe.md) -* [TensorFlow](prepare_model/convert_model/Convert_Model_From_TensorFlow.md) -* [MXNet](prepare_model/convert_model/Convert_Model_From_MxNet.md) -* [ONNX](prepare_model/convert_model/Convert_Model_From_ONNX.md) -* [Kaldi](prepare_model/convert_model/Convert_Model_From_Kaldi.md) +To cut off unwanted parts of a model, such as unsupported operations and training sub-graphs, +the `--input` and `--output` parameters can be used, defining new inputs and outputs of the converted model. +For a more detailed description, refer to [Cutting Off Parts of a Model](prepare_model/convert_model/Cutting_Model.md). -## Videos +Also, you can insert additional input pre-processing sub-graphs into the converted model using +the `--mean_values`, `scales_values`, `--layout`, and other parameters described +in [Embedding Preprocessing Computation](prepare_model/Additional_Optimizations.md). -@sphinxdirective +Model Optimizer's compression parameter `--data_type` allows to generate IR of the `FP16` data type. For more details, +please refer to [Compression of a Model to FP16](prepare_model/FP16_Compression.md). -.. list-table:: +To get the full list of conversion parameters available in Model Optimizer, run the following command: - * - .. raw:: html +```sh +mo --help +``` - - - - .. raw:: html +## Examples of CLI Commands - +Below is a list of separate examples for different frameworks and Model Optimizer parameters. - - .. raw:: html +1. Launch Model Optimizer for a TensorFlow MobileNet model in the binary protobuf format. +```sh +mo --input_model MobileNet.pb +``` +Launch Model Optimizer for a TensorFlow BERT model in the SavedModel format, with three inputs. Explicitly specify input shapes +where the batch size and the sequence length equal 2 and 30 respectively. +```sh +mo --saved_model_dir BERT --input mask,word_ids,type_ids --input_shape [2,30],[2,30],[2,30] +``` +For more information on TensorFlow model conversion, +refer to [Converting a TensorFlow Model](prepare_model/convert_model/Convert_Model_From_TensorFlow.md). - +2. Launch Model Optimizer for an ONNX OCR model and explicitly specify new output. +```sh +mo --input_model ocr.onnx --output probabilities +``` +For more information on ONNX model conversion, +please refer to [Converting an ONNX Model](prepare_model/convert_model/Convert_Model_From_ONNX.md). +Note that PyTorch models must be exported to the ONNX format before its conversion into IR. +More details can be found in [Converting a PyTorch Model](prepare_model/convert_model/Convert_Model_From_PyTorch.md). - * - **Model Optimizer Concept.** - - **Model Optimizer Basic Operation.** - - **Choosing the Right Precision.** +3. Launch Model Optimizer for a PaddlePaddle UNet model and apply mean-scale normalization to the input. +```sh +mo --input_model unet.pdmodel --mean_values [123,117,104] --scale 255 +``` +For more information on PaddlePaddle model conversion, please refer to +[Converting a PaddlePaddle Model](prepare_model/convert_model/Convert_Model_From_Paddle.md). - * - Duration: 3:56 - - Duration: 2:57 - - Duration: 4:18 +4. Launch Model Optimizer for an MXNet SSD Inception V3 model and specify first-channel layout for the input. +```sh +mo --input_model ssd_inception_v3-0000.params --layout NCHW +``` +For more information on MXNet models conversion, please refer to [Converting an MXNet Model](prepare_model/convert_model/Convert_Model_From_MxNet.md). -@endsphinxdirective +5. Launch Model Optimizer for a Caffe AlexNet model with input channels in the RGB format, which needs to be reversed. +```sh +mo --input_model alexnet.caffemodel --reverse_input_channels +``` +For more information on Caffe model conversion, please refer to [Converting a Caffe Model](prepare_model/convert_model/Convert_Model_From_Caffe.md). + +6. Launch Model Optimizer for a Kaldi LibriSpeech nnet2 model. +```sh +mo --input_model librispeech_nnet2.mdl --input_shape [1,140] +``` +For more information on Kaldi model conversion, +refer to [Converting a Kaldi Model](prepare_model/convert_model/Convert_Model_From_Kaldi.md). + +To get conversion recipes for specific TensorFlow, ONNX, PyTorch, MXNet, and Kaldi models, +refer to [Model Conversion Tutorials](prepare_model/convert_model/Convert_Model_Tutorials.md). diff --git a/docs/MO_DG/Known_Issues_Limitations.md b/docs/MO_DG/Known_Issues_Limitations.md deleted file mode 100644 index ec8897d06c6..00000000000 --- a/docs/MO_DG/Known_Issues_Limitations.md +++ /dev/null @@ -1,47 +0,0 @@ -# Known Issues and Limitations in the Model Optimizer {#openvino_docs_MO_DG_Known_Issues_Limitations} - -## Model Optimizer for TensorFlow* should be run on Intel® hardware that supports the AVX instruction set - -TensorFlow* provides only prebuilt binaries with AVX instructions enabled. When you're configuring the Model Optimizer by running the `install_prerequisites` or `install_prerequisites_tf` scripts, they download only those ones, which are not supported on hardware such as Intel® Pentium® processor N4200/5, N3350/5, N3450/5 (formerly known as Apollo Lake). - -To run the Model Optimizer on this hardware, you should compile TensorFlow binaries from source as described at the [TensorFlow website](https://www.tensorflow.org/install/source). - -Another option is to run the Model Optimizer to generate an IR on hardware that supports AVX to and then perform inference on hardware without AVX. - - -## Multiple OpenMP Loadings - -If the application uses the Inference Engine with third-party components that depend on Intel OpenMP, multiple loadings of the libiomp library may occur and cause OpenMP runtime initialization conflicts. This may happen, for example, if the application uses Intel® Math Kernel Library (Intel® MKL) through the “Single Dynamic Library” (libmkl_rt.so) mechanism and calls Intel MKL after loading the Inference Engine plugin. -The error log looks as follows: -```sh -OMP: Error #15: Initializing libiomp5.so, but found libiomp5.so already initialized. -OMP: Hint: This means that multiple copies of the OpenMP runtime have been linked into the program. That is dangerous, since it can degrade performance or cause incorrect results. The best thing to do is to ensure that only a single OpenMP runtime is linked into the process, e.g. by avoiding static linking of the OpenMP runtime in any library. As an unsafe, unsupported, undocumented workaround you can set the environment variable KMP_DUPLICATE_LIB_OK=TRUE to allow the program to continue to execute, but that may cause crashes or silently produce incorrect results. For more information, please see http://www.intel.com/software/products/support/. -``` - -Possible workarounds: - -* Preload the OpenMP runtime using the LD_PRELOAD variable: - ```sh - LD_PRELOAD= ``` - This eliminates multiple loadings of libiomp, and makes all the components use this specific version of OpenMP. - -* Alternatively, you can set KMP_DUPLICATE_LIB_OK=TRUE. However, performance degradation or incorrect results may occur in this case. - - -## Old proto compiler breaks protobuf library - -With python protobuf library version 3.5.1 the following incompatibility can happen. -The known case is for Cent OS 7.4 - -The error log looks as follows: - -```sh -File "../lib64/python3.5/site-packages/google/protobuf/descriptor.py", line 829, in _new_ -return _message.default_pool.AddSerializedFile(serialized_pb) -TypeError: expected bytes, str found -``` - -Possible workaround is to upgrade default protobuf compiler (libprotoc 2.5.0) to newer version, for example -libprotoc 2.6.1. - -[protobuf_issue]: https://github.com/google/protobuf/issues/4272 diff --git a/docs/MO_DG/img/BASIC_FLOW_MO_simplified.svg b/docs/MO_DG/img/BASIC_FLOW_MO_simplified.svg index 49ccbae9100..c2171e6abea 100644 --- a/docs/MO_DG/img/BASIC_FLOW_MO_simplified.svg +++ b/docs/MO_DG/img/BASIC_FLOW_MO_simplified.svg @@ -1,19 +1,187 @@ - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/MO_DG/prepare_model/Additional_Optimizations.md b/docs/MO_DG/prepare_model/Additional_Optimizations.md index 4a927563b6c..75f3c29744c 100644 --- a/docs/MO_DG/prepare_model/Additional_Optimizations.md +++ b/docs/MO_DG/prepare_model/Additional_Optimizations.md @@ -1,15 +1,101 @@ -# Optimize Preprocessing Computation{#openvino_docs_MO_DG_Additional_Optimization_Use_Cases} +# Embedding Preprocessing Computation {#openvino_docs_MO_DG_Additional_Optimization_Use_Cases} -Model Optimizer performs preprocessing to a model. It is possible to optimize this step and improve first inference time, to do that, follow the tips bellow: +Input data for inference can be different from the training dataset and requires additional preprocessing before inference. +To accelerate the whole pipeline including preprocessing and inference, Model Optimizer provides special parameters such as `--mean_values`, +`--scale_values`, `--reverse_input_channels`, and `--layout`. Based on these parameters, Model Optimizer generates IR with additionally +inserted sub-graph that performs the defined preprocessing. This preprocessing block can perform mean-scale normalization of input data, +reverting data along channel dimension, and changing the data layout. For more details about these parameters, refer to the paragraphs below. +The same functionality is also available in runtime, please refer to [Overview of Preprocessing API](../../OV_Runtime_UG/preprocessing_overview.md) +for more information. -- **Image mean/scale parameters**
- Make sure to use the input image mean/scale parameters (`--scale` and `–mean_values`) with the Model Optimizer when you need pre-processing. It allows the tool to bake the pre-processing into the IR to get accelerated by the Inference Engine. +## When to Specify Layout -- **RGB vs. BGR inputs**
- If, for example, your network assumes the RGB inputs, the Model Optimizer can swap the channels in the first convolution using the `--reverse_input_channels` command line option, so you do not need to convert your inputs to RGB every time you get the BGR image, for example, from OpenCV*. +You may need to set input layouts, as it is required by some preprocessing, for example, setting a batch, +applying mean or scales, and reversing input channels (BGR<->RGB). -- **Larger batch size**
- Notice that the devices like GPU are doing better with larger batch size. While it is possible to set the batch size in the runtime using the Inference Engine [ShapeInference feature](../../OV_Runtime_UG/ShapeInference.md). +Layout defines the meaning of dimensions in shape and can be specified for both inputs and outputs. +For the layout syntax, check the [Layout API overview](../../OV_Runtime_UG/layout_overview.md). +To specify the layout, you can use `--layout` option followed by the layout value. -- **Resulting IR precision**
-The resulting IR precision, for instance, `FP16` or `FP32`, directly affects performance. As CPU now supports `FP16` (while internally upscaling to `FP32` anyway) and because this is the best precision for a GPU target, you may want to always convert models to `FP16`. Notice that this is the only precision that Intel® Movidius™ Myriad™ 2 and Intel® Myriad™ X VPUs support. +For example, for Tensorflow\* `nasnet_large` model that was exported to ONNX format and thus has input with `NHWC` layout: + +``` +mo --input_model tf_nasnet_large.onnx --layout nhwc +``` + +Additionally, if a model has more than one input or needs both input and output layouts specified, +you need to provide the name of each input or output to which you apply the layout. + +For example, for ONNX\* `Yolo v3 Tiny` model that has first input `input_1` in `NCHW` layout and second input `image_shape` +with 2 dimensions: batch and size of the image which can be expressed as `N?` layout: + +``` +mo --input_model yolov3-tiny.onnx --layout input_1(nchw),image_shape(n?) +``` + +## How to Change Layout of a Model Inputs and Outputs + +Changing the model layout may be necessary if it differs from the one presented by input data. +To change the layout, you can use either `--layout` or `--source_layout` with `--target_layout`. + +For example, for the same `nasnet_large` that were mentioned previously we may want to provide data in `NCHW` layout: + +``` +mo --input_model tf_nasnet_large.onnx --source_layout nhwc --target_layout nchw +mo --input_model tf_nasnet_large.onnx --layout "nhwc->nchw" +``` + +Again, if a model has more than one input or needs both input and output layouts specified, you need to provide the name of each input or output to which you apply the layout. + +For example, to provide data in the `NHWC` layout for the `Yolo v3 Tiny` model mentioned earlier: + +``` +mo --input_model yolov3-tiny.onnx --source_layout "input_1(nchw),image_shape(n?)" --target_layout "input_1(nhwc)" +mo --input_model yolov3-tiny.onnx --layout "input_1(nchw->nhwc),image_shape(n?)" +``` + +## When to Specify Mean and Scale Values +Usually neural network models are trained with the normalized input data. This means that the input data values are converted to be in a specific range, +for example, `[0, 1]` or `[-1, 1]`. Sometimes the mean values (mean images) are subtracted from the input data values as part of the pre-processing. +There are two cases of how the input data pre-processing is implemented. + * The input pre-processing operations are a part of a model. In this case, the application does not pre-process the input data as a separate step: everything is embedded into the model itself. + * The input pre-processing operations are not a part of a model and the pre-processing is performed within the application which feeds the model with input data. + +In the first case, the Model Optimizer generates the IR with required pre-processing operations and no `mean` and `scale` parameters are required. + +In the second case, information about mean/scale values should be provided to the Model Optimizer to embed it to the generated IR. +Model Optimizer provides command-line parameters to specify the values: `--mean_values`, `--scale_values`, `--scale`. +Using these parameters, Model Optimizer embeds the corresponding preprocessing block for mean-value normalization of the input data +and optimizes this block so that the preprocessing takes negligible time for inference. + +For example, run the Model Optimizer for the PaddlePaddle* UNet model and apply mean-scale normalization to the input data. + +```sh +mo --input_model unet.pdmodel --mean_values [123,117,104] --scale 255 +``` + +## When to Reverse Input Channels +Sometimes input images for your application can be of the RGB (BGR) format and the model is trained on images of the BGR (RGB) format, +the opposite color channel order. In this case, it is important to preprocess the input images by reverting the color channels before inference. +To embed this preprocessing step into IR, Model Optimizer provides the `--reverse_input_channels` command-line parameter to shuffle the color channels. + +The `--reverse_input_channels` parameter applies to an input of the model in two cases. + * Only one dimension in the input shape has a size equal to 3. + * One dimension has an undefined size and is marked as `C` channel using `layout` parameters. + +Using the `--reverse_input_channels` parameter, Model Optimizer embeds the corresponding preprocessing block for reverting +the input data along channel dimension and optimizes this block so that the preprocessing takes negligible time for inference. + +For example, launch the Model Optimizer for the TensorFlow* AlexNet model and embed `reverse_input_channel` preprocessing block into IR. + +```sh +mo --input_model alexnet.pb --reverse_input_channels +``` + +> **NOTE**: If both mean and scale values are specified, the mean is subtracted first and then the scale is applied regardless of the order of options +in the command line. Input values are *divided* by the scale value(s). If also `--reverse_input_channels` option is used, the `reverse_input_channels` +will be applied first, then `mean` and after that `scale`. The data flow in the model looks as follows: +`Parameter -> ReverseInputChannels -> Mean apply-> Scale apply -> the original body of the model`. + +## See Also +* [Overview of Preprocessing API](../../OV_Runtime_UG/preprocessing_overview.md) diff --git a/docs/MO_DG/prepare_model/Default_Model_Optimizer_Optimizations.md b/docs/MO_DG/prepare_model/Default_Model_Optimizer_Optimizations.md deleted file mode 100644 index 6b4f1826384..00000000000 --- a/docs/MO_DG/prepare_model/Default_Model_Optimizer_Optimizations.md +++ /dev/null @@ -1,11 +0,0 @@ -# Default Model Optimizer Optimizations {#openvino_docs_MO_DG_Default_Model_Optimizer_Optimizations} - -Model Optimizer not only converts a model to IR format, but also performs a number of optimizations. For example, certain primitives like linear operations (BatchNorm and ScaleShift), are automatically fused into convolutions. Generally, these layers should not be manifested in the resulting IR: - -![](../img/optimizations/resnet_269.png) - -The picture above shows Caffe\* Resnet269\* topology. The left model is the original model, and the one on the right (after conversion) is the resulting model that the Model Optimizer produces, with BatchNorm and ScaleShift layers fused into the convolution weights rather than constituting separate layers. - -If you still see these operations, inspect the Model Optimizer output carefully while searching for warnings, such as on the tool being unable to fuse. For example, non-linear operations (like activations) in between convolutions and linear operations might prevent the fusing. If performance is of concern, try to change (and potentially re-train) the topology. Refer to the [Model Optimizer Guide](Model_Optimization_Techniques.md) for more optimizations. - -Notice that the activation (`_relu`) is not touched by the Model Optimizer, and while it can be merged into convolution as well, this is rather a device-specific optimization, covered by Inference Engine during the model loading time. You are encouraged to inspect performance counters from plugins that should indicate that these particular layers are not executed (“Optimized out”). For more information, refer to Internal Inference Performance Counters. diff --git a/docs/MO_DG/prepare_model/FP16_Compression.md b/docs/MO_DG/prepare_model/FP16_Compression.md new file mode 100644 index 00000000000..41c905b7202 --- /dev/null +++ b/docs/MO_DG/prepare_model/FP16_Compression.md @@ -0,0 +1,20 @@ +# Compression of a Model to FP16 {#openvino_docs_MO_DG_FP16_Compression} + +Model Optimizer can convert all floating-point weights to `FP16` data type. The resulting IR is called +compressed `FP16` model. + +To compress the model, use the `--data_type` option: + +``` + mo --input_model INPUT_MODEL --data_type FP16 +``` + +> **NOTE**: Using `--data_type FP32` will give no result and will not force `FP32` +> precision in the model. If the model was `FP16` it will have `FP16` precision in IR as well. + +The resulting model will occupy about twice as less space in the file system, but it may have some accuracy drop, +although for the majority of models accuracy degradation is negligible. For details on how plugins handle +compressed `FP16` models refer to [Working with devices](../../OV_Runtime_UG/supported_plugins/Device_Plugins.md) page. + +> **NOTE**: `FP16` compression is sometimes used as initial step for `INT8` quantization, please refer to +> [Post-Training Optimization tool](../../../tools/pot/README.md) for more information about that. diff --git a/docs/MO_DG/prepare_model/Getting_performance_numbers.md b/docs/MO_DG/prepare_model/Getting_performance_numbers.md index 1d11be89064..be253fc5709 100644 --- a/docs/MO_DG/prepare_model/Getting_performance_numbers.md +++ b/docs/MO_DG/prepare_model/Getting_performance_numbers.md @@ -3,28 +3,25 @@ ## Tip 1. Measure the Proper Set of Operations -When evaluating performance of your model with the Inference Engine, you must measure the proper set of operations. To do so, consider the following tips: +When evaluating performance of your model with the OpenVINO Runtime, you must measure the proper set of operations. To do so, consider the following tips: - Avoid including one-time costs like model loading. -- Track separately the operations that happen outside the Inference Engine, like video decoding. +- Track separately the operations that happen outside the OpenVINO Runtime, like video decoding. -> **NOTE**: Some image pre-processing can be baked into the IR and accelerated. For more information, refer to [Model Optimizer Knobs Related to Performance](Additional_Optimizations.md) +> **NOTE**: Some image pre-processing can be baked into the IR and accelerated accordingly. For more information, refer to [Embedding the Preprocessing](Additional_Optimizations.md). Also consider [_runtime_ preprocessing optimizations](../../optimization_guide/dldt_deployment_optimization_common). ## Tip 2. Getting Credible Performance Numbers You need to build your performance conclusions on reproducible data. Do the performance measurements with a large number of invocations of the same routine. Since the first iteration is almost always significantly slower than the subsequent ones, you can use an aggregated value for the execution time for final projections: - If the warm-up run does not help or execution time still varies, you can try running a large number of iterations and then average or find a mean of the results. -- For time values that range too much, use geomean. +- For time values that range too much, consider geomean. +- Beware of the throttling and other power oddities. A device can exist in one of several different power states. When optimizing your model, for better performance data reproducibility consider fixing the device frequency. However the end to end (application) benchmarking should be also performed under real operational conditions. -Refer to the [Inference Engine Samples](../../OV_Runtime_UG/Samples_Overview.md) for code examples for the performance measurements. Almost every sample, except interactive demos, has a `-ni` option to specify the number of iterations. +## Tip 3. Measure Reference Performance Numbers with OpenVINO's benchmark_app -## Getting performance numbers using OpenVINO tool - -To get performance numbers use our Benchmark app. - -[Benchmark App](../../../samples/cpp/benchmark_app/README.md) sample is the best performance reference. +To get performance numbers, use the dedicated [Benchmark App](../../../samples/cpp/benchmark_app/README.md) sample which is the best way to produce the performance reference. It has a lot of device-specific knobs, but the primary usage is as simple as: ```bash $ ./benchmark_app –d GPU –m -i @@ -36,35 +33,25 @@ $ ./benchmark_app –d CPU –m -i ``` to execute on the CPU instead. -For example, for the CPU throughput mode from the previous section, you can play with number of streams (`-nstreams` command-line param). -Try different values of the `-nstreams` argument from `1` to a number of CPU cores and find one that provides the best performance. For example, on a 8-core CPU, compare the `-nstreams 1` (which is a latency-oriented scenario) to the `2`, `4` and `8` streams. Notice that `benchmark_app` automatically queries/creates/runs number of requests required to saturate the given number of streams. - -Finally, notice that when you don't specify number of streams with `-nstreams`, "AUTO" value for the streams is used, e.g. for the CPU this is [CPU_THROUGHPUT_AUTO](../../OV_Runtime_UG/supported_plugins/CPU.md). You can spot the actual value behind "AUTO" for your machine in the application output. -Notice that the "AUTO" number is not necessarily most optimal, so it is generally recommended to play either with the benchmark_app's "-nstreams" as described above, or via [new Workbench tool](@ref workbench_docs_Workbench_DG_Introduction).This allows you to simplify the app-logic, as you don't need to combine multiple inputs into a batch to achieve good CPU performance. -Instead, it is possible to keep a separate infer request per camera or another source of input and process the requests in parallel using Async API. +Each of the [OpenVINO supported devices](../../OV_Runtime_UG/supported_plugins/Supported_Devices.md) offers performance settings that have command-line equivalents in the [Benchmark App](../../../samples/cpp/benchmark_app/README.md). +While these settings provide really low-level control and allow to leverage the optimal model performance on the _specific_ device, we suggest always starting the performance evaluation with the [OpenVINO High-Level Performance Hints](../../OV_Runtime_UG/performance_hints.md) first: + - benchmark_app **-hint tput** -d 'device' -m 'path to your model' + - benchmark_app **-hint latency** -d 'device' -m 'path to your model' ## Comparing Performance with Native/Framework Code -When comparing the Inference Engine performance with the framework or another reference code, make sure that both versions are as similar as possible: +When comparing the OpenVINO Runtime performance with the framework or another reference code, make sure that both versions are as similar as possible: -- Wrap exactly the inference execution (refer to the [Inference Engine Samples](../../OV_Runtime_UG/Samples_Overview.md) for examples). +- Wrap exactly the inference execution (refer to the [Benchmark App](../../../samples/cpp/benchmark_app/README.md) for examples). - Do not include model loading time. -- Ensure the inputs are identical for the Inference Engine and the framework. For example, Caffe\* allows to auto-populate the input with random values. Notice that it might give different performance than on real images. -- Similarly, for correct performance comparison, make sure the access pattern, for example, input layouts, is optimal for Inference Engine (currently, it is NCHW). -- Any user-side pre-processing should be tracked separately. -- Make sure to try the same environment settings that the framework developers recommend, for example, for TensorFlow*. In many cases, things that are more machine friendly, like respecting NUMA (see CPU Checklist), might work well for the Inference Engine as well. -- If applicable, use batching with the Inference Engine. -- If possible, demand the same accuracy. For example, TensorFlow allows `FP16` support, so when comparing to that, make sure to test the Inference Engine with the `FP16` as well. +- Ensure the inputs are identical for the OpenVINO Runtime and the framework. For example, beware of random values that can be used to populate the inputs. +- Consider [Image Pre-processing and Conversion](../../OV_Runtime_UG/preprocessing_overview.md), while any user-side pre-processing should be tracked separately. +- When applicable, leverage the [Dynamic Shapes support](../../OV_Runtime_UG/ov_dynamic_shapes.md) +- If possible, demand the same accuracy. For example, TensorFlow allows `FP16` execution, so when comparing to that, make sure to test the OpenVINO Runtime with the `FP16` as well. -## Using Tools - -Whether you are tuning for the first time or doing advanced performance optimization, you need a a tool that provides accurate insights. Intel® VTune™ Amplifier gives you the tool to mine it and interpret the profiling data. - -Alternatively, you can gather the raw profiling data that samples report, the second chapter provides example of how to interpret these. - -### Internal Inference Performance Counters - -Almost every sample (inspect command-line options for a specific sample with `-h`) supports a `-pc` command that outputs internal execution breakdown. Refer to the [samples code](../../OV_Runtime_UG/Samples_Overview.md) for the actual Inference Engine API behind that. +## Internal Inference Performance Counters and Execution Graphs +Further, finer-grained insights into inference performance breakdown can be achieved with device-specific performance counters and/or execution graphs. +Both [C++](../../../samples/cpp/benchmark_app/README.md) and [Python](../../../tools/benchmark_tool/README.md) versions of the `benchmark_app` supports a `-pc` command-line parameter that outputs internal execution breakdown. Below is example of CPU plugin output for a network (since the device is CPU, the layers wall clock `realTime` and the `cpu` time are the same): @@ -76,58 +63,12 @@ fc6_nChw8c_nchw EXECUTED layerType: Reorder realTime: 20 out_fc6 EXECUTED layerType: Output realTime: 3 cpu: 3 execType: unknown relu5_9_x2 OPTIMIZED_OUT layerType: ReLU realTime: 0 cpu: 0 execType: undef ``` +This contains layers name (as seen in IR), layers type and execution statistics. Notice the `OPTIMIZED_OUT`, which indicates that the particular activation was fused into adjacent convolution. +Both benchmark_app versions also support "exec_graph_path" command-line option governing the OpenVINO to output the same per-layer execution statistics, but in the form of the plugin-specific [Netron-viewable](https://netron.app/) graph to the specified file. -This contains layers name (as seen in IR), layers type and execution statistics. Notice the `OPTIMIZED_OUT`, which indicates that the particular activation was fused into adjacent convolution. Also, the `unknown` stays for the Inference Engine specific CPU (helper) primitives that are not part of the Intel MKL-DNN. +Notice that on some devices, the execution graphs/counters may be pretty intrusive overhead-wise. +Also, especially when performance-debugging the [latency case](../../optimization_guide/dldt_deployment_optimization_latency.md) notice that the counters do not reflect the time spent in the plugin/device/driver/etc queues. If the sum of the counters is too different from the latency of an inference request, consider testing with less inference requests. For example running single [OpenVINO stream](../../optimization_guide/dldt_deployment_optimization_tput.md) with multiple requests would produce nearly identical counters as running single inference request, yet the actual latency can be quite different. -Notice that there are some helper layers in the CPU execution breakdown, which were not presented in the original topology. These are automatically added by the plugin. For example, the `Reorder` re-packs the Intel MKL-DNN internal (blocked) layout to the regular plain NCHW (that the user expects as the output). As explained in the Few Device-Specific Tips, if your custom kernels introduces a lot of outstanding/expensive Reorders, consider blocked implementation for the kernels. +Finally, the performance statistics with both performance counters and execution graphs is averaged, so such a data for the [dynamically-shaped inputs](../../OV_Runtime_UG/ov_dynamic_shapes.md) should be measured carefully (ideally by isolating the specific shape and executing multiple times in a loop, to gather the reliable data). -Notice that in the heterogeneous cases, there will be additional information on which subgraph the statistics is about (the first subgraph is GPU, so its `cpu`/host time is really small compared to the actual `realTime`): - -``` -subgraph1: squeeze1x1 EXECUTED layerType: Convolution realTime: 227 cpu:3 execType: GPU -… -subgraph2: detection_out EXECUTED layerType: DetectionOutput realTime: 121 cpu:121 execType: unknown -… -``` - -As mentioned earlier, `unknown` here means CPU kernel with unknown (for example, not AVX2 or AVX512) acceleration path. -Since FPGA execution does not separate individual kernels, only bulk execution/data transfer statistics is available: - -``` -subgraph1: 1. input preprocessing (mean data/FPGA):EXECUTED layerType: preprocessing realTime: 129 cpu: 129 -subgraph1: 2. input transfer to DDR:EXECUTED layerType: realTime: 201 cpu: 0 -subgraph1: 3. FPGA execute time:EXECUTED layerType: realTime: 3808 cpu: 0 subgraph1: 4. output transfer from DDR:EXECUTED layerType: realTime: 55 cpu: 0 -subgraph1: 5. FPGA output postprocessing:EXECUTED layerType: realTime: 7 cpu: 7 -subgraph1: 6. softmax/copy: EXECUTED layerType: realTime: 2 cpu: 2 -subgraph2: out_prob: NOT_RUN layerType: Output realTime: 0 cpu: 0 -subgraph2: prob: EXECUTED layerType: SoftMax realTime: 10 cpu: 10 -Total time: 4212 microseconds -``` - -The `softmax/copy` is a glue layer that connects the FPGA subgraph to the CPU subgraph (and copies the data). - -### Intel® VTune™ Examples - -All major performance calls of the Inference Engine are instrumented with Instrumentation and Tracing Technology APIs. This allows viewing the Inference Engine calls on the Intel® VTune™ timelines and aggregations plus correlating them to the underlying APIs, like OpenCL. In turn, this enables careful per-layer execution breakdown. - -When choosing the Analysis type in Intel® VTune™ Amplifier, make sure to select the **Analyze user tasks, events, and counters** option: - -![](vtune_option.png) - -See the [corresponding section in the Intel® VTune™ Amplifier User's Guide](https://software.intel.com/en-us/vtune-amplifier-help-task-analysis) for details. - -Example of Inference Engine calls: - -- On the Intel VTune Amplifier timeline. - Notice that `Task_runNOThrow` is an Async API wrapper and it is executed in a different thread and triggers the Intel MKL-DNN execution: - - ![](vtune_timeline.png) - -- In the Intel VTune Amplifier **Top-down view**, grouped by the **Task Domain**. - Notice the `Task_runNoThrow` and `MKLDNN _INFER` that are bracketing the actual Intel MKL-DNN kernels execution: - - ![](vtune_topdown_view.jpg) - -Similarly, you can use any GPU analysis in the Intel VTune Amplifier and get general correlation with Inference Engine API as well as the execution breakdown for OpenCL kernels. - -Just like with regular native application, further drill down in the counters is possible, however, this is mostly useful for optimizing custom kernels. Finally, with the Intel VTune Amplifier, the profiling is not limited to your user-level code (see the [corresponding section in the Intel® VTune™ Amplifier User's Guide](https://software.intel.com/en-us/vtune-amplifier-help-analyze-performance)). +OpenVINO in general and individual plugins are heavily instrumented with Intel® instrumentation and tracing technology (ITT), so another option is to compile the OpenVINO from the source code with the ITT enabled and using tools like [Intel® VTune™ Profiler](https://software.intel.com/en-us/vtune) to get detailed inference performance breakdown and additional insights in the application-level performance on the timeline view. \ No newline at end of file diff --git a/docs/MO_DG/prepare_model/Model_Optimization_Techniques.md b/docs/MO_DG/prepare_model/Model_Optimization_Techniques.md deleted file mode 100644 index f2ae32a6924..00000000000 --- a/docs/MO_DG/prepare_model/Model_Optimization_Techniques.md +++ /dev/null @@ -1,65 +0,0 @@ -# Model Optimization Techniques {#openvino_docs_MO_DG_prepare_model_Model_Optimization_Techniques} - -Optimization offers methods to accelerate inference with the convolution neural networks (CNN) that do not require model retraining. - -* * * - -## Linear Operations Fusing - -Many convolution neural networks includes `BatchNormalization` and `ScaleShift` layers (for example, Resnet\*, Inception\*) that can be presented as a sequence of linear operations: additions and multiplications. For example ScaleShift layer can be presented as Mul → Add sequence. These layers can be fused into previous `Convolution` or `FullyConnected` layers, except when Convolution comes after an Add operation (due to Convolution paddings). - -### Usage - -In the Model Optimizer, this optimization is turned on by default. To disable it, you can pass `--disable_fusing` parameter to the Model Optimizer. - -### Optimization Description - -This optimization method consists of three stages: - -1. `BatchNormalization` and `ScaleShift` decomposition: in this stage, `BatchNormalization` layer is decomposed to `Mul → Add → Mul → Add` sequence, and `ScaleShift` layer is decomposed to `Mul → Add` layers sequence. - -2. **Linear operations merge**: in this stage, the `Mul` and `Add` operations are merged into a single `Mul → Add` instance. - For example, if there is a `BatchNormalization → ScaleShift` sequence in the topology, it is replaced with `Mul → Add` in the first stage. In the next stage, the latter is replaced with a `ScaleShift` layer if there is no available `Convolution` or `FullyConnected` layer to fuse into next. -3. **Linear operations fusion**: in this stage, the tool fuses `Mul` and `Add` operations to `Convolution` or `FullyConnected` layers. Notice that it searches for `Convolution` and `FullyConnected` layers both backward and forward in the graph (except for `Add` operation that cannot be fused to `Convolution` layer in forward direction). - -### Usage Examples - -The picture below shows the depicted part of Caffe\* Resnet269 topology where `BatchNorm` and `ScaleShift` layers will be fused to `Convolution` layers. - -![Caffe ResNet269 block before and after optimization generated with Netscope*](../img/optimizations/resnet_269.png) - -* * * - -## ResNet optimization (stride optimization) - -ResNet optimization is a specific optimization that applies to Caffe ResNet topologies such as ResNet50, ResNet101, ResNet152 and to ResNet-based topologies. This optimization is turned on by default, and can be disabled with the `--disable_resnet_optimization` key. - -### Optimization Description - -In the picture below, you can see the original and optimized parts of a Caffe ResNet50 model. The main idea of this optimization is to move the stride that is greater than 1 from Convolution layers with the kernel size = 1 to upper Convolution layers. In addition, the Model Optimizer adds a Pooling layer to align the input shape for a Eltwise layer, if it was changed during the optimization. - -![ResNet50 blocks (original and optimized) from Netscope*](../img/optimizations/resnet_optimization.png) - -In this example, the stride from the `res3a_branch1` and `res3a_branch2a` Convolution layers moves to the `res2c_branch2b` Convolution layer. In addition, to align the input shape for `res2c` Eltwise, the optimization inserts the Pooling layer with kernel size = 1 and stride = 2. - -* * * - -## Grouped Convolution Fusing - -Grouped convolution fusing is a specific optimization that applies for TensorFlow\* topologies. The main idea of this optimization is to combine convolutions results for the `Split` outputs and then recombine them using `Concat` operation in the same order as they were out from `Split`. - -![Split→Convolutions→Concat block from TensorBoard*](../img/optimizations/groups.png) - -* * * - -## Disable Fusing - -Model Optimizer allows to disable optimizations for specified nodes via `--finegrain_fusing ,,...` (regex is also supported). Using this key, you mark nodes that will noy be touched by any optimizations. - -### Examples of usage - -On the picture below you can see two visualized Intermediate Representations (IR) of TensorFlow InceptionV4 topology. -The first one is original IR that will be produced by the Model Optimizer. -The second one will be produced by the Model Optimizer with key `--finegrain_fusing InceptionV4/InceptionV4/Conv2d_1a_3x3/Conv2D`, where you can see that `Convolution` was not fused with `Mul1_3752` and `Mul1_4061/Fused_Mul_5096/FusedScaleShift_5987` operations. - -![TF InceptionV4 block without/with key --finegrain_fusing (from IR visualizer)](../img/optimizations/inception_v4.png) diff --git a/docs/MO_DG/prepare_model/Model_Optimizer_FAQ.md b/docs/MO_DG/prepare_model/Model_Optimizer_FAQ.md index 91106d0ee86..bdb183de70c 100644 --- a/docs/MO_DG/prepare_model/Model_Optimizer_FAQ.md +++ b/docs/MO_DG/prepare_model/Model_Optimizer_FAQ.md @@ -6,7 +6,7 @@ If your question is not covered by the topics below, use the [OpenVINO™ Su Internally, the Model Optimizer uses a protobuf library to parse and load Caffe\* models. This library requires a file grammar and a generated parser. For a Caffe fallback, the Model Optimizer uses a Caffe-generated parser for a Caffe-specific `.proto` file (which is usually located in the `src/caffe/proto` directory). So, if you have Caffe installed on your machine with Python* interface available, make sure that this is exactly the version of Caffe that was used to create the model. -If you just want to experiment with the Model Optimizer and test a Python extension for working with your custom +If you just want to experiment with the Model Optimizer and test a Python extension for working with your custom layers without building Caffe, add the layer description to the `caffe.proto` file and generate a parser for it. For example, to add the description of the `CustomReshape` layer, which is an artificial layer not present in any `caffe.proto` files: @@ -25,17 +25,17 @@ For example, to add the description of the `CustomReshape` layer, which is an ar optional BlobShape shape = 1; // we just use the same parameter type as some other Caffe layers } ``` - + 2. Generate a new parser: ```shell cd /openvino/tools/mo/front/caffe/proto python3 generate_caffe_pb2.py --input_proto /src/caffe/proto/caffe.proto ``` where `PATH_TO_CUSTOM_CAFFE` is the path to the root directory of custom Caffe\*. - + 3. Now, the Model Optimizer is able to load the model into memory and start working with your extensions if there are any. -However, because your model has custom layers, you must register your custom layers as custom. To learn more about it, refer to the section [Custom Layers in Model Optimizer](customize_model_optimizer/Customize_Model_Optimizer.md). +However, because your model has custom layers, you must register your custom layers as custom. To learn more about it, refer to the section [Custom Layers in Model Optimizer](customize_model_optimizer/Customize_Model_Optimizer.md). #### 2. How do I create a bare caffemodel, if I have only prototxt? @@ -48,8 +48,8 @@ net.save('/my_net.caffemodel') ``` #### 3. What does the message "[ ERROR ]: Unable to create ports for node with id" mean? -Most likely, the Model Optimizer does not know how to infer output shapes of some layers in the given topology. -To lessen the scope, compile the list of layers that are custom for the Model Optimizer: present in the topology, +Most likely, the Model Optimizer does not know how to infer output shapes of some layers in the given topology. +To lessen the scope, compile the list of layers that are custom for the Model Optimizer: present in the topology, absent in [list of supported layers](Supported_Frameworks_Layers.md) for the target framework. Then refer to available options in the corresponding section in [Custom Layers in Model Optimizer](customize_model_optimizer/Customize_Model_Optimizer.md). #### 4. What does the message "Input image of shape is larger than mean image from file" mean? @@ -100,7 +100,7 @@ message NetParameter { ``` So, the input layer of the provided model must be specified in one of the following styles: -* +* ```sh input: "data" input_shape @@ -111,8 +111,8 @@ input_shape dim: 227 } ``` - -* + +* ```sh input: "data" input_shape @@ -129,7 +129,7 @@ input_shape dim: 3 } ``` -* +* ```sh layer { @@ -146,7 +146,7 @@ layer input_param {shape: {dim: 1 dim: 3}} } ``` -* +* ```sh input: "data" input_dim: 1 @@ -158,7 +158,7 @@ However, if your model contains more than one input, the Model Optimizer is able #### 9. What does the message "Mean file for topologies with multiple inputs is not supported" mean? -Model Optimizer does not support mean file processing for topologies with more than one input. In this case, you need to perform preprocessing of the inputs for a generated Intermediate Representation in the Inference Engine to perform subtraction for every input of your multi-input model. +Model Optimizer does not support mean file processing for topologies with more than one input. In this case, you need to perform preprocessing of the inputs for a generated Intermediate Representation in the OpenVINO Runtime to perform subtraction for every input of your multi-input model, see [Overview of Preprocessing](../../OV_Runtime_UG/preprocessing_overview.md) for details. #### 10. What does the message "Cannot load or process mean file: value error" mean? @@ -214,7 +214,7 @@ One of the layers in the specified topology might not have inputs or values. Ple #### 24. What does the message "Part of the nodes was not translated to IE. Stopped" mean? -Some of the layers are not supported by the Inference Engine and cannot be translated to an Intermediate Representation. You can extend the Model Optimizer by allowing generation of new types of layers and implement these layers in the dedicated Inference Engine plugins. For more information, refer to the [Custom Layers Guide](../../HOWTO/Custom_Layers_Guide.md) and [Inference Engine Extensibility Mechanism](../../OV_Runtime_UG/Extensibility_DG/Intro.md) +Some of the operations are not supported by the OpenVINO Runtime and cannot be translated to an Intermediate Representation. You can extend the Model Optimizer by allowing generation of new types of operations and implement these operations in the dedicated OpenVINO plugins. For more information, refer to the [OpenVINO™ Extensibility Mechanism](../../Extensibility_UG/Intro.md) #### 25. What does the message "While creating an edge from .. to .. : node name is undefined in the graph. Check correctness of the input model" mean? @@ -252,7 +252,7 @@ Looks like you have provided only one shape for the placeholder, however there a #### 33. What does the message "The amount of input nodes for port is not equal to 1" mean? -This error occurs when the `SubgraphMatch.single_input_node` function is used for an input port that supplies more than one node in a sub-graph. The `single_input_node` function can be used only for ports that has a single consumer inside the matching sub-graph. When multiple nodes are connected to the port, use the `input_nodes` function or `node_by_pattern` function instead of `single_input_node`. Please, refer to [Sub-Graph Replacement in the Model Optimizer](customize_model_optimizer/Subgraph_Replacement_Model_Optimizer.md) for more details. +This error occurs when the `SubgraphMatch.single_input_node` function is used for an input port that supplies more than one node in a sub-graph. The `single_input_node` function can be used only for ports that has a single consumer inside the matching sub-graph. When multiple nodes are connected to the port, use the `input_nodes` function or `node_by_pattern` function instead of `single_input_node`. Please, refer to **Graph Transformation Extensions** section in the [Model Optimizer Extensibility](customize_model_optimizer/Customize_Model_Optimizer.md) documentation for more details. #### 34. What does the message "Output node for port has already been specified" mean? @@ -268,7 +268,7 @@ Model Optimizer tried to write an event file in the specified directory but fail #### 37. What does the message "There is no registered 'infer' function for node with op = .. . Please implement this function in the extensions" mean? -Most likely, you tried to extend Model Optimizer with a new primitive, but did not specify an infer function. For more information on extensions, see [Custom Layers Guide](../../HOWTO/Custom_Layers_Guide.md). +Most likely, you tried to extend Model Optimizer with a new primitive, but did not specify an infer function. For more information on extensions, see [OpenVINO™ Extensibility Mechanism](../../Extensibility_UG/Intro.md). #### 38. What does the message "Stopped shape/value propagation at node" mean? @@ -300,7 +300,7 @@ Most likely, there is a problem with the specified file for model. The file exis #### 45. What does the message "Found custom layer. Model Optimizer does not support this layer. Please, register it in CustomLayersMapping.xml or implement extension" mean? -This means that the layer `{layer_name}` is not supported in the Model Optimizer. You can find a list of all unsupported layers in the corresponding section. You should implement the extensions for this layer ([Custom Layers Guide](../../HOWTO/Custom_Layers_Guide.md)). +This means that the layer `{layer_name}` is not supported in the Model Optimizer. You can find a list of all unsupported layers in the corresponding section. You should implement the extensions for this layer ([OpenVINO™ Extensibility Mechanism](../../Extensibility_UG/Intro.md)). #### 46. What does the message "Custom replacement configuration file does not exist" mean? @@ -308,7 +308,7 @@ Path to the custom replacement configuration file was provided with the `--trans #### 47. What does the message "Extractors collection have case insensitive duplicates" mean? -When extending Model Optimizer with new primitives keep in mind that their names are case insensitive. Most likely, another operation with the same name is already defined. For more information, see [Custom Layers Guide](../../HOWTO/Custom_Layers_Guide.md). +When extending Model Optimizer with new primitives keep in mind that their names are case insensitive. Most likely, another operation with the same name is already defined. For more information, see [OpenVINO™ Extensibility Mechanism](../../Extensibility_UG/Intro.md). #### 48. What does the message "Input model name is not in an expected format, cannot extract iteration number" mean? @@ -340,7 +340,7 @@ Please, make sure that inputs are defined and have correct shapes. You can use ` #### 55. What does the message "Attempt to register of custom name for the second time as class. Note that custom names are case-insensitive" mean? -When extending Model Optimizer with new primitives keep in mind that their names are case insensitive. Most likely, another operation with the same name is already defined. For more information, see [Custom Layers Guide](../../HOWTO/Custom_Layers_Guide.md). +When extending Model Optimizer with new primitives keep in mind that their names are case insensitive. Most likely, another operation with the same name is already defined. For more information, see [OpenVINO™ Extensibility Mechanism](../../Extensibility_UG/Intro.md). #### 56. What does the message "Both --input_shape and --batch were provided. Please, provide only one of them" mean? @@ -350,7 +350,7 @@ You cannot specify the batch and the input shape at the same time. You should sp The specified input shape cannot be parsed. Please, define it in one of the following ways: -* +* ```shell mo --input_model .caffemodel --input_shape (1,3,227,227) ``` @@ -447,7 +447,7 @@ This message may appear when the `--data_type=FP16` command line option is used. #### 78. What does the message "The amount of nodes matched pattern ... is not equal to 1" mean? -This error occurs when the `SubgraphMatch.node_by_pattern` function is used with a pattern that does not uniquely identify a single node in a sub-graph. Try to extend the pattern string to make unambiguous match to a single sub-graph node. For more details, refer to [Sub-graph Replacement in the Model Optimizer](customize_model_optimizer/Subgraph_Replacement_Model_Optimizer.md). +This error occurs when the `SubgraphMatch.node_by_pattern` function is used with a pattern that does not uniquely identify a single node in a sub-graph. Try to extend the pattern string to make unambiguous match to a single sub-graph node. For more details, refer to **Graph Transformation Extensions** section in the [Model Optimizer Extensibility](customize_model_optimizer/Customize_Model_Optimizer.md) documentation. #### 79. What does the message "The topology contains no "input" layers" mean? @@ -459,18 +459,18 @@ You are using an unsupported Python\* version. Use only versions 3.4 - 3.6 for t #### 81. What does the message "Arguments --nd_prefix_name, --pretrained_model_name and --input_symbol should be provided. Please provide all or do not use any." mean? -This error occurs if you do not provide `--nd_prefix_name`, `--pretrained_model_name` and `--input_symbol` parameters. -Model Optimizer requires both `.params` and `.nd` model files to merge into the result file (`.params`). Topology +This error occurs if you do not provide `--nd_prefix_name`, `--pretrained_model_name` and `--input_symbol` parameters. +Model Optimizer requires both `.params` and `.nd` model files to merge into the result file (`.params`). Topology description (`.json` file) should be prepared (merged) in advance and provided with `--input_symbol` parameter. -If you add to your model additional layers and weights that are in `.nd` files, the Model Optimizer can build a model +If you add to your model additional layers and weights that are in `.nd` files, the Model Optimizer can build a model from one `.params` file and two additional `.nd` files (`*_args.nd`, `*_auxs.nd`). To do that, provide both CLI options or do not pass them if you want to convert an MXNet model without additional weights. For more information, refer to [Converting a MXNet* Model](convert_model/Convert_Model_From_MxNet.md). #### 82. What does the message "You should specify input for mean/scale values" mean? -In case when the model has multiple inputs and you want to provide mean/scale values, you need to pass those values for each input. More specifically, a number of passed values should be the same as the number of inputs of the model. +In case when the model has multiple inputs and you want to provide mean/scale values, you need to pass those values for each input. More specifically, a number of passed values should be the same as the number of inputs of the model. For more information, refer to [Converting a Model to Intermediate Representation](convert_model/Converting_Model.md). #### 83. What does the message "Input with name ... not found!" mean? @@ -490,16 +490,16 @@ For more information, refer to [Converting a MXNet* Model](convert_model/Convert #### 86. What does the message "Operation ... not supported. Please register it as custom op" mean? -Model Optimizer tried to load the model that contains some unsupported operations. +Model Optimizer tried to load the model that contains some unsupported operations. If you want to convert model that contains unsupported operations you need to prepare extension for all such operations. -For more information, refer to [Custom Layers Guide](../../HOWTO/Custom_Layers_Guide.md). +For more information, refer to [OpenVINO™ Extensibility Mechanism](../../Extensibility_UG/Intro.md). #### 87. What does the message "Can not register Op ... Please, call function 'register_caffe_python_extractor' with parameter 'name'" mean? This error appears if the class of implementation of op for Python Caffe layer could not be used by Model Optimizer. Python layers should be handled differently compared to ordinary Caffe layers. In particular, you need to call the function `register_caffe_python_extractor` and pass `name` as the second argument of the function. -The name should be the compilation of the layer name and the module name separated by a dot. +The name should be the compilation of the layer name and the module name separated by a dot. For example, your topology contains this layer with type `Python`: @@ -520,7 +520,7 @@ What you do first is implementing an extension for this layer in the Model Optim ``` class ProposalPythonExampleOp(Op): op = 'Proposal' - + def __init__(self, graph: nx.MultiDiGraph, attrs: dict): ... ``` @@ -536,25 +536,25 @@ Op.excluded_classes.append(ProposalPythonExampleOp) Note that the first call register_caffe_python_extractor(ProposalPythonExampleOp, 'rpn.proposal_layer.ProposalLayer') registers extension of the layer in the Model Optimizer that will be found by the specific name (mandatory to join module name and layer name): rpn.proposal_layer.ProposalLayer. -The second call prevents Model Optimizer from using this extension as if it is an extension for +The second call prevents Model Optimizer from using this extension as if it is an extension for a layer with type `Proposal`. Otherwise, this layer can be chosen as an implementation of extension that can lead to potential issues. -For more information, refer to the [Custom Layers Guide](../../HOWTO/Custom_Layers_Guide.md). +For more information, refer to the [OpenVINO™ Extensibility Mechanism](../../Extensibility_UG/Intro.md). #### 88. What does the message "Model Optimizer is unable to calculate output shape of Memory node .." mean? -Model Optimizer supports only `Memory` layers, in which `input_memory` goes before `ScaleShift` or `FullyConnected` layer. +Model Optimizer supports only `Memory` layers, in which `input_memory` goes before `ScaleShift` or `FullyConnected` layer. This error message means that in your model the layer after input memory is not of type `ScaleShift` or `FullyConnected`. This is a known limitation. #### 89. What do the messages "File ... does not appear to be a Kaldi file (magic number does not match)", "Kaldi model should start with tag" mean? -These error messages mean that the Model Optimizer does not support your Kaldi\* model, because check sum of the model is not +These error messages mean that the Model Optimizer does not support your Kaldi\* model, because check sum of the model is not 16896 (the model should start with this number) or model file does not contain tag `` as a starting one. Double check that you provide a path to a true Kaldi model and try again. #### 90. What do the messages "Expect counts file to be one-line file." or "Expect counts file to contain list of integers" mean? -These messages mean that you passed the file counts containing not one line. The count file should start with +These messages mean that you passed the file counts containing not one line. The count file should start with `[` and end with `]`, and integer values should be separated by space between those signs. #### 91. What does the message "Model Optimizer is not able to read Kaldi model .." mean? @@ -570,10 +570,10 @@ file is not available or does not exist. Also refer to FAQ [#90](#question-90). #### 93. What does the message "For legacy MXNet models Model Optimizer does not support conversion of old MXNet models (trained with 1.0.0 version of MXNet and lower) with custom layers." mean? This message means that if you have model with custom layers and its json file has been generated with MXNet version -lower than 1.0.0, Model Optimizer does not support such topologies. If you want to convert it you have to rebuild -MXNet with unsupported layers or generate new json with MXNet version 1.0.0 and higher. Also you need to implement -Inference Engine extension for used custom layers. -For more information, refer to the [Custom Layers Guide](../../HOWTO/Custom_Layers_Guide.md). +lower than 1.0.0, Model Optimizer does not support such topologies. If you want to convert it you have to rebuild +MXNet with unsupported layers or generate new json with MXNet version 1.0.0 and higher. Also you need to implement +OpenVINO extension for used custom layers. +For more information, refer to the [OpenVINO™ Extensibility Mechanism](../../Extensibility_UG/Intro.md). #### 97. What does the message "Graph contains a cycle. Can not proceed .." mean? @@ -581,22 +581,22 @@ Model Optimizer supports only straightforward models without cycles. There are multiple ways to avoid cycles: -For Tensorflow: +For Tensorflow: * [Convert models, created with TensorFlow Object Detection API](convert_model/tf_specific/Convert_Object_Detection_API_Models.md) -For all frameworks: -1. [Replace cycle containing Sub-graph in Model Optimizer](customize_model_optimizer/Subgraph_Replacement_Model_Optimizer.md) -2. [Custom Layers Guide](../../HOWTO/Custom_Layers_Guide.md) +For all frameworks: +1. [Replace cycle containing Sub-graph in Model Optimizer](customize_model_optimizer/Customize_Model_Optimizer.md) +2. [OpenVINO™ Extensibility Mechanism](../../Extensibility_UG/Intro.md) or -* Edit network in original framework to exclude cycle. +* Edit model in original framework to exclude cycle. #### 98. What does the message "Can not transpose attribute '..' with value .. for node '..' .." mean? This message means that model is not supported. It may be caused by using shapes larger than 4-D. There are two ways to avoid such message: -1. [Cut model part containing such layers in Model Optimizer](convert_model/Cutting_Model.md) +1. [Cutting Off Parts of a Model](convert_model/Cutting_Model.md) 2. Edit network in original framework to exclude such layers. #### 99. What does the message "Expected token ``, has `...`" mean? @@ -611,7 +611,7 @@ But there are exceptions. For example, output value of layer Interp is calculate #### 101. What does the message "Mean/scale values should ..." mean? -It means that your mean/scale values have wrong format. Specify mean/scale values using the form `layer_name(val1,val2,val3)`. +It means that your mean/scale values have wrong format. Specify mean/scale values using the form `layer_name(val1,val2,val3)`. You need to specify values for each input of the model. For more information, refer to [Converting a Model to Intermediate Representation](convert_model/Converting_Model.md). #### 102. What does the message "Operation _contrib_box_nms is not supported ..." mean? @@ -632,10 +632,10 @@ Note that you might have conflicts between previously installed PyPI dependencie #### 105. What does the message "The IR preparation was executed by the legacy MO path. ..." mean? -For the models in ONNX* format, there are two available paths of IR conversion. -The old one is handled by the old Python* implementation, while the new one uses new C++ frontends. -Starting from the 2022.1 version, the default IR conversion path for ONNX models is processed using the new ONNX frontend. -Certain features, such as `--extensions` and `--transformations_config`, are not yet fully supported on the new frontends. -For `--extensions`, the new frontends support only paths to shared libraries (.dll and .so). For `--transformations_config`, they support JSON configurations with defined library fields. -Inputs freezing (enabled by `--freeze_placeholder_with_value` or `--input` arguments) is not supported on the new frontends. +For the models in ONNX* format, there are two available paths of IR conversion. +The old one is handled by the old Python* implementation, while the new one uses new C++ frontends. +Starting from the 2022.1 version, the default IR conversion path for ONNX models is processed using the new ONNX frontend. +Certain features, such as `--extensions` and `--transformations_config`, are not yet fully supported on the new frontends. +For `--extensions`, the new frontends support only paths to shared libraries (.dll and .so). For `--transformations_config`, they support JSON configurations with defined library fields. +Inputs freezing (enabled by `--freeze_placeholder_with_value` or `--input` arguments) is not supported on the new frontends. The IR conversion falls back to the old path if a user does not select any expected path of conversion explicitly (by `--use_new_frontend` or `--use_legacy_frontend` MO arguments) and unsupported pre-defined scenario is detected on the new frontend path. diff --git a/docs/MO_DG/prepare_model/Supported_Frameworks_Layers.md b/docs/MO_DG/prepare_model/Supported_Frameworks_Layers.md index b43910dd12c..a08b35ebed2 100644 --- a/docs/MO_DG/prepare_model/Supported_Frameworks_Layers.md +++ b/docs/MO_DG/prepare_model/Supported_Frameworks_Layers.md @@ -1,9 +1,9 @@ # Supported Framework Layers {#openvino_docs_MO_DG_prepare_model_Supported_Frameworks_Layers} -## Caffe\* Supported Layers +## Caffe Supported Layers -| Layer Name in Caffe\* | Limitations | +| Layer Name in Caffe | Limitations | |:---------- | :----------| | Axpy | | | BN | | @@ -47,10 +47,10 @@ | Tile | | -## MXNet\* Supported Symbols +## MXNet Supported Symbols -| Symbol Name in MXNet\*| Limitations| +| Symbol Name in MXNet| Limitations| | :----------| :----------| | _Plus | | | _contrib_arange_like | | @@ -119,7 +119,7 @@ | Concat | | | Convolution | | | Crop | "center_crop" = 1 is not supported | -| Custom | [Custom Layers in the Model Optimizer](customize_model_optimizer/Customize_Model_Optimizer.md) | +| Custom | [Custom Layers in Model Optimizer](customize_model_optimizer/Customize_Model_Optimizer.md) | | Deconvolution | | | DeformableConvolution | | | DeformablePSROIPooling | | @@ -149,12 +149,12 @@ | zeros_like | | -## TensorFlow\* Supported Operations +## TensorFlow Supported Operations -Some TensorFlow\* operations do not match to any Inference Engine layer, but are still supported by the Model Optimizer and can be used on constant propagation path. These layers are labeled 'Constant propagation' in the table. +Some TensorFlow operations do not match to any OpenVINO operation, but are still supported by the Model Optimizer and can be used on constant propagation path. These layers are labeled 'Constant propagation' in the table. -| Operation Name in TensorFlow\* | Limitations| +| Operation Name in TensorFlow | Limitations| | :----------| :----------| | Abs | | | Acosh | | @@ -348,10 +348,10 @@ Some TensorFlow\* operations do not match to any Inference Engine layer, but are | ZerosLike | | -## TensorFlow 2 Keras\* Supported Operations +## TensorFlow 2 Keras Supported Operations -| Operation Name in TensorFlow 2 Keras\* | Limitations| +| Operation Name in TensorFlow 2 Keras | Limitations| | :----------| :----------| | ActivityRegularization | | | Add | | @@ -431,10 +431,10 @@ Some TensorFlow\* operations do not match to any Inference Engine layer, but are | ZeroPadding2D | | | ZeroPadding3D | | -## Kaldi\* Supported Layers +## Kaldi Supported Layers -| Symbol Name in Kaldi\*| Limitations| +| Symbol Name in Kaldi| Limitations| | :----------| :----------| | addshift | | | affinecomponent | | @@ -478,154 +478,194 @@ Some TensorFlow\* operations do not match to any Inference Engine layer, but are | timeheightconvolutioncomponent | | -## ONNX\* Supported Operators +## ONNX Supported Operators +### Standard ONNX Operators -| Symbol Name in ONNX\*| Limitations| -| :----------| :----------| -| Abs | | -| Acos | | -| Acosh | | -| Add | | -| Affine | | -| And | | -| ArgMax | | -| ArgMin | | -| Asin | | -| Asinh | | -| Atan | | -| Atanh | | -| ATen | Supported only for the 'embedding_bag' operator | -| AveragePool | | -| BatchMatMul | | -| BatchNormalization | | -| Cast | | -| Ceil | | -| Clip | | -| Concat | | -| Constant | | -| ConstantFill | | -| ConstantOfShape | | -| Conv | | -| ConvTranspose | | -| Cos | | -| Cosh | | -| Crop | | -| CumSum | | -| DepthToSpace | | -| DequantizeLinear | | -| DetectionOutput (Intel experimental) | | -| Div | | -| Dropout | Not needed for inference | -| Elu | | -| Equal | | -| Erf | | -| Exp | | -| Expand | | -| ExperimentalDetectronDetectionOutput (Intel experimental) | | -| ExperimentalDetectronGenerateProposalsSingleImage (Intel experimental) | | -| ExperimentalDetectronGroupNorm (Intel experimental) | | -| ExperimentalDetectronPriorGridGenerator (Intel experimental) | | -| ExperimentalDetectronROIFeatureExtractor (Intel experimental) | | -| ExperimentalDetectronTopKROIs (Intel experimental) | | -| FakeQuantize (Intel experimental) | | -| Fill | | -| Flatten | | -| Floor | | -| GRU | | -| Gather | | -| GatherElements | Doesn't work with negative indices | -| GatherND | Doesn't work with negative indices | -| GatherTree | | -| Gemm | | -| GlobalAveragePool | | -| GlobalMaxPool | | -| Greater | | -| GreaterEqual | | -| HardSigmoid | | -| Identity | Not needed for inference | -| ImageScaler | | -| InstanceNormalization | | -| LRN | | -| LSTM | Peepholes are not supported | -| LeakyRelu | | -| Less | | -| LessEqual | | -| Log | | -| LogicalAnd | | -| LogicalOr | | -| LogSoftmax | | -| Loop | | -| LpNormalization | | -| MatMul | | -| Max | | -| MaxPool | | -| MeanVarianceNormalization | Reduction over the batch dimension is not supported, reduction over all dimensions except batch and channel ones is obligatory | -| Min | | -| Mul | | -| Neg | | -| NonMaxSuppression | | -| NonZero | | -| Not | | -| NotEqual | | -| OneHot | | -| Pad | | -| Pow | | -| PriorBox (Intel experimental) | | -| PriorBoxClustered | | -| QuantizeLinear | | -| RNN | | -| ROIAlign | | -| Range | | -| RandomUniform | Operation provides sequence from uniform distribution, but exact values won't match. | -| Reciprocal | | -| ReduceL1 | | -| ReduceL2 | | -| ReduceMax | | -| ReduceMean | | -| ReduceMin | | -| ReduceProd | | -| ReduceSum | | -| Relu | | -| Reshape | | -| Resize | Coordinate transformation mode `tf_crop_and_resize` is not supported, `nearest` mode is not supported for 5D+ inputs. | -| ReverseSequence | | -| Round | | -| Scatter | Supported if fuse-able to ScatterUpdate. MYRIAD only | -| ScatterND | | -| ScatterElements | Supported if fuse-able to ScatterUpdate. MYRIAD only | -| Select | | -| Shape | | -| Sigmoid | | -| Sign | | -| Sin | | -| Size | | -| Slice | | -| Softmax | | -| Softplus | | -| Softsign | | -| SpaceToDepth | | -| Split | | -| Sqrt | | -| Squeeze | The case when squeeze axis is not specified is not supported | -| Sub | | -| Sum | | -| Tan | | -| Tanh | | -| ThresholdedRelu | | -| TopK | | -| Transpose | | -| Unsqueeze | | -| Upsample | | -| Where | | -| Xor | | +| ONNX Operator Name | +| :----------| +| Abs | +| Acos | +| Acosh | +| And | +| ArgMin | +| ArgMax | +| Asin | +| Asinh | +| Atan | +| ATen | +| Atanh | +| AveragePool | +| BatchNormalization | +| BitShift | +| Cast | +| CastLike | +| Ceil | +| Clip | +| Concat | +| Constant | +| ConstantOfShape | +| Conv | +| ConvInteger | +| ConvTranspose | +| Compress | +| Cos | +| Cosh | +| ConstantFill | +| CumSum | +| DepthToSpace | +| DequantizeLinear | +| Div | +| Dropout | +| Einsum | +| Elu | +| Equal | +| Erf | +| Exp | +| Expand | +| EyeLike | +| Flatten | +| Floor | +| Gather | +| GatherElements | +| GatherND | +| Gemm | +| GlobalAveragePool | +| GlobalLpPool | +| GlobalMaxPool | +| Greater | +| GRU | +| Hardmax | +| HardSigmoid | +| HardSwish | +| Identity | +| If | +| ImageScaler | +| InstanceNormalization | +| LeakyRelu | +| Less | +| Log | +| LogSoftmax | +| Loop | +| LpNormalization | +| LRN | +| LSTM | +| MatMulInteger | +| MatMul | +| MaxPool | +| Max | +| Mean | +| MeanVarianceNormalization | +| Min | +| Mod | +| Mul | +| Neg | +| NonMaxSuppression | +| NonZero | +| Not | +| Or | +| OneHot | +| Pad | +| Pow | +| PRelu | +| QLinearConv | +| QLinearMatMul | +| QuantizeLinear | +| Range | +| RandomNormal | +| RandomNormalLike | +| RandomUniform | +| RandomUniformLike | +| Reciprocal | +| ReduceLogSum | +| ReduceLogSumExp | +| ReduceL1 | +| ReduceL2 | +| ReduceMax | +| ReduceMean | +| ReduceMin | +| ReduceProd | +| ReduceSum | +| ReduceSumSquare | +| Relu | +| Reshape | +| Resize | +| ReverseSequence | +| RNN | +| RoiAlign | +| Round | +| ScatterElements | +| ScatterND | +| Selu | +| Shape | +| Shrink | +| Sigmoid | +| Sign | +| Sin | +| Sinh | +| Size | +| Slice | +| Softmax | +| Softplus | +| Softsign | +| SpaceToDepth | +| Split | +| Sqrt | +| Squeeze | +| Sub | +| Sum | +| Tan | +| Tanh | +| ThresholdedRelu | +| Tile | +| TopK | +| Transpose | +| Unsqueeze | +| Where | +| Xor | +### Deprecated ONNX Operators (Supported) -## PaddlePaddle\* Supported Operators +| ONNX Operator Name | +| :----------| +| Affine | +| Crop | +| Scatter | +| Upsample | + +### Operators From the org.openvinotoolkit Domain + +| Custom ONNX Operator Name | +| :----------| +| DeformableConv2D | +| DetectionOutput | +| ExperimentalDetectronDetectionOutput | +| ExperimentalDetectronGenerateProposalsSingleImage | +| ExperimentalDetectronGroupNorm | +| ExperimentalDetectronPriorGridGenerator | +| ExperimentalDetectronROIFeatureExtractor | +| ExperimentalDetectronTopKROIs | +| FakeQuantize | +| GroupNorm | +| Normalize | +| PriorBox | +| PriorBoxClustered | +| Swish | + +### Operators From the com.microsoft Domain + +| Custom ONNX Operator Name | +| :----------| +| Attention | +| BiasGelu | +| EmbedLayerNormalization | +| SkipLayerNormalization | + +## PaddlePaddle Supported Operators paddlepaddle>=2.1 -| Operator Name in PaddlePaddle\*| Limitations| +| Operator Name in PaddlePaddle| Limitations| | :----------| :----------| | adpative_pool2d | 'NHWC' data_layout is not supported | | arg_max | 'int32' output data_type is not supported | diff --git a/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_Caffe.md b/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_Caffe.md index d8f2f906c40..549a625822d 100644 --- a/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_Caffe.md +++ b/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_Caffe.md @@ -1,60 +1,12 @@ # Converting a Caffe* Model {#openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_Caffe} -@sphinxdirective - -.. _convert model caffe: - -@endsphinxdirective - -A summary of the steps for optimizing and deploying a model that was trained with Caffe\*: - -1. [Configure the Model Optimizer](../../Deep_Learning_Model_Optimizer_DevGuide.md) for Caffe\*. -2. [Convert a Caffe\* Model](#Convert_From_Caffe) to produce an optimized [Intermediate Representation (IR)](../../IR_and_opsets.md) of the model based on the trained network topology, weights, and biases values -3. Test the model in the Intermediate Representation format using the [OpenVINO™ Runtime](../../../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md) in the target environment via provided Inference Engine [sample applications](../../../OV_Runtime_UG/Samples_Overview.md) -4. [Integrate](../../../OV_Runtime_UG/Samples_Overview.md) the [OpenVINO™ Runtime](../../../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md) in your application to deploy the model in the target environment - -## Supported Topologies - -* **Classification models:** - * AlexNet - * VGG-16, VGG-19 - * SqueezeNet v1.0, SqueezeNet v1.1 - * ResNet-50, ResNet-101, Res-Net-152 - * Inception v1, Inception v2, Inception v3, Inception v4 - * CaffeNet - * MobileNet - * Squeeze-and-Excitation Networks: SE-BN-Inception, SE-Resnet-101, SE-ResNet-152, SE-ResNet-50, SE-ResNeXt-101, SE-ResNeXt-50 - * ShuffleNet v2 - -* **Object detection models:** - * SSD300-VGG16, SSD500-VGG16 - * Faster-RCNN - * RefineDet (MYRIAD plugin only) - -* **Face detection models:** - * VGG Face - * SSH: Single Stage Headless Face Detector - -* **Semantic segmentation models:** - * FCN8 - -> **NOTE**: It is necessary to specify mean and scale values for most of the Caffe\* models to convert them with the Model Optimizer. The exact values should be determined separately for each model. For example, for Caffe\* models trained on ImageNet, the mean values usually are `123.68`, `116.779`, `103.939` for blue, green and red channels respectively. The scale value is usually `127.5`. Refer to the General Conversion Parameters section in [Converting a Model to Intermediate Representation (IR)](Converting_Model.md) for the information on how to specify mean and scale values. - ## Convert a Caffe* Model - -To convert a Caffe\* model, run Model Optimizer with the path to the input model `.caffemodel` file and the path to an output directory with write permissions: +To convert a Caffe\* model, run Model Optimizer with the path to the input model `.caffemodel` file: ```sh - mo --input_model .caffemodel --output_dir + mo --input_model .caffemodel ``` -Two groups of parameters are available to convert your model: - -* Framework-agnostic parameters are used to convert a model trained with any supported framework. For details, see see the General Conversion Parameters section on the [Converting a Model to Intermediate Representation (IR)](Converting_Model.md) page. -* [Caffe-specific parameters](#caffe_specific_conversion_params) are used to convert only Caffe\* models. - -### Using Caffe\*-Specific Conversion Parameters - The following list provides the Caffe\*-specific parameters. ``` @@ -93,16 +45,16 @@ Caffe*-specific parameters: attributes without flattening nested parameters. ``` -#### Command-Line Interface (CLI) Examples Using Caffe\*-Specific Parameters +### Command-Line Interface (CLI) Examples Using Caffe\*-Specific Parameters -* Launching the Model Optimizer for the [bvlc_alexnet.caffemodel](https://github.com/BVLC/caffe/tree/master/models/bvlc_alexnet) with a specified `prototxt` file. This is needed when the name of the Caffe\* model and the `.prototxt` file are different or are placed in different directories. Otherwise, it is enough to provide only the path to the input `model.caffemodel` file. You must have write permissions for the output directory. - ```sh - mo --input_model bvlc_alexnet.caffemodel --input_proto bvlc_alexnet.prototxt --output_dir +* Launching the Model Optimizer for the [bvlc_alexnet.caffemodel](https://github.com/BVLC/caffe/tree/master/models/bvlc_alexnet) with a specified `prototxt` file. This is needed when the name of the Caffe\* model and the `.prototxt` file are different or are placed in different directories. Otherwise, it is enough to provide only the path to the input `model.caffemodel` file. + ```sh + mo --input_model bvlc_alexnet.caffemodel --input_proto bvlc_alexnet.prototxt ``` * Launching the Model Optimizer for the [bvlc_alexnet.caffemodel](https://github.com/BVLC/caffe/tree/master/models/bvlc_alexnet) with a specified `CustomLayersMapping` file. This is the legacy method of quickly enabling model conversion if your model has custom layers. This requires the Caffe\* system on the computer. Optional parameters without default values and not specified by the user in the `.prototxt` file are removed from the Intermediate Representation, and nested parameters are flattened: ```sh - mo --input_model bvlc_alexnet.caffemodel -k CustomLayersMapping.xml --disable_omitting_optional --enable_flattening_nested_params --output_dir + mo --input_model bvlc_alexnet.caffemodel -k CustomLayersMapping.xml --disable_omitting_optional --enable_flattening_nested_params ``` This example shows a multi-input model with input layers: `data`, `rois` ``` @@ -124,9 +76,9 @@ Optional parameters without default values and not specified by the user in the } ``` -* Launching the Model Optimizer for a multi-input model with two inputs and providing a new shape for each input in the order they are passed to the Model Optimizer along with a writable output directory. In particular, for data, set the shape to `1,3,227,227`. For rois, set the shape to `1,6,1,1`: +* Launching the Model Optimizer for a multi-input model with two inputs and providing a new shape for each input in the order they are passed to the Model Optimizer. In particular, for data, set the shape to `1,3,227,227`. For rois, set the shape to `1,6,1,1`: ```sh - mo --input_model /path-to/your-model.caffemodel --input data,rois --input_shape (1,3,227,227),[1,6,1,1] --output_dir + mo --input_model /path-to/your-model.caffemodel --input data,rois --input_shape (1,3,227,227),[1,6,1,1] ``` ## Custom Layer Definition @@ -146,3 +98,6 @@ In this document, you learned: * Basic information about how the Model Optimizer works with Caffe\* models * Which Caffe\* models are supported * How to convert a trained Caffe\* model using the Model Optimizer with both framework-agnostic and Caffe-specific command-line options + +## See Also +[Model Conversion Tutorials](Convert_Model_Tutorials.md) diff --git a/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_Kaldi.md b/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_Kaldi.md index 511823f047e..02c3c90e67e 100644 --- a/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_Kaldi.md +++ b/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_Kaldi.md @@ -1,58 +1,14 @@ # Converting a Kaldi* Model {#openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_Kaldi} -@sphinxdirective - -.. _convert model kaldi: - -.. toctree:: - :maxdepth: 1 - :hidden: - - openvino_docs_MO_DG_prepare_model_convert_model_kaldi_specific_Aspire_Tdnn_Model - -@endsphinxdirective - -A summary of the steps for optimizing and deploying a model that was trained with Kaldi\*: - -1. [Configure the Model Optimizer](../../Deep_Learning_Model_Optimizer_DevGuide.md) for Kaldi\*. -2. [Convert a Kaldi\* Model](#Convert_From_Kaldi) to produce an optimized [Intermediate Representation (IR)](../../IR_and_opsets.md) of the model based on the trained network topology, weights, and biases values. -3. Test the model in the Intermediate Representation format using the [OpenVINO™ Runtime](../../../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md) in the target environment via provided Inference Engine [sample applications](../../../OV_Runtime_UG/Samples_Overview.md). -4. [Integrate](../../../OV_Runtime_UG/Samples_Overview.md) the [OpenVINO™ Runtime](../../../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md) in your application to deploy the model in the target environment. - > **NOTE**: The Model Optimizer supports the [nnet1](http://kaldi-asr.org/doc/dnn1.html) and [nnet2](http://kaldi-asr.org/doc/dnn2.html) formats of Kaldi models. Support of the [nnet3](http://kaldi-asr.org/doc/dnn3.html) format is limited. -## Supported Topologies -* Convolutional Neural Networks (CNN): - * Wall Street Journal CNN (wsj_cnn4b) - * Resource Management CNN (rm_cnn4a_smbr) - -* Long Short Term Memory (LSTM) Networks: - * Resource Management LSTM (rm_lstm4f) - * TED-LIUM LSTM (ted_lstm4f) - -* Deep Neural Networks (DNN): - * Wall Street Journal DNN (wsj_dnn5b_smbr); - * TED-LIUM DNN (ted_dnn_smbr) - -* Time delay neural network (TDNN) - * [ASpIRE Chain TDNN](kaldi_specific/Aspire_Tdnn_Model.md); - * [Librispeech nnet3](https://github.com/ryanleary/kaldi-test/releases/download/v0.0/LibriSpeech-trained.tgz). - -* TDNN-LSTM model - - ## Convert a Kaldi* Model +To convert a Kaldi\* model, run Model Optimizer with the path to the input model `.nnet` or `.mdl` file: -To convert a Kaldi\* model, run Model Optimizer with the path to the input model `.nnet` or `.mdl` file and to an output directory where you have write permissions: ```sh -mo --input_model .nnet --output_dir + mo --input_model .nnet ``` -Two groups of parameters are available to convert your model: - -* Framework-agnostic parameters are used to convert a model trained with any supported framework. For details, see see the General Conversion Parameters section on the [Converting a Model to Intermediate Representation (IR)](Converting_Model.md) page. -* [Kaldi-specific parameters](#kaldi_specific_conversion_params) are used to convert only Kaldi\* models. - ### Using Kaldi\*-Specific Conversion Parameters The following list provides the Kaldi\*-specific parameters. @@ -67,14 +23,14 @@ Kaldi-specific parameters: ### Examples of CLI Commands -* To launch the Model Optimizer for the wsj_dnn5b_smbr model with the specified `.nnet` file and an output directory where you have write permissions: +* To launch the Model Optimizer for the wsj_dnn5b_smbr model with the specified `.nnet` file: ```sh - mo --input_model wsj_dnn5b_smbr.nnet --output_dir + mo --input_model wsj_dnn5b_smbr.nnet ``` -* To launch the Model Optimizer for the wsj_dnn5b_smbr model with existing file that contains counts for the last layer with biases and a writable output directory: +* To launch the Model Optimizer for the wsj_dnn5b_smbr model with existing file that contains counts for the last layer with biases: ```sh - mo --input_model wsj_dnn5b_smbr.nnet --counts wsj_dnn5b_smbr.counts --output_dir + mo --input_model wsj_dnn5b_smbr.nnet --counts wsj_dnn5b_smbr.counts ``` * The Model Optimizer normalizes сounts in the following way: @@ -88,34 +44,36 @@ Kaldi-specific parameters: \f$|C|\f$ - number of elements in the counts array; * The normalized counts are subtracted from biases of the last or next to last layer (if last layer is SoftMax). - > **NOTE:** Model Optimizer will show warning if model contains counts values inside model and `--counts` option is not used. + > **NOTE**: Model Optimizer will show warning if model contains counts values inside model and `--counts` option is not used. * If you want to remove the last SoftMax layer in the topology, launch the Model Optimizer with the `--remove_output_softmax` flag: ```sh - mo --input_model wsj_dnn5b_smbr.nnet --counts wsj_dnn5b_smbr.counts --remove_output_softmax --output_dir _ + mo --input_model wsj_dnn5b_smbr.nnet --counts wsj_dnn5b_smbr.counts --remove_output_softmax ``` The Model Optimizer finds the last layer of the topology and removes this layer only if it is a SoftMax layer. > **NOTE**: Model Optimizer can remove SoftMax layer only if the topology has one output. - - > **NOTE**: For sample inference of Kaldi models, you can use the Inference Engine Speech Recognition sample application. The sample supports models with one output. If your model has several outputs, specify the desired one with the `--output` option. - - If you want to convert a model for inference on Intel® Movidius™ Myriad™, use the `--remove_memory` option. -It removes Memory layers from the IR. Instead of it, additional inputs and outputs appear in the IR. + + > **NOTE**: For sample inference of Kaldi models, you can use the OpenVINO Speech Recognition sample application. The sample supports models with one output. If your model has several outputs, specify the desired one with the `--output` option. + + If you want to convert a model for inference on Intel® Movidius™ Myriad™, use the `--remove_memory` option. +It removes Memory layers from the IR. Instead of it, additional inputs and outputs appear in the IR. The Model Optimizer outputs the mapping between inputs and outputs. For example: ```sh -[ WARNING ] Add input/output mapped Parameter_0_for_Offset_fastlstm2.r_trunc__2Offset_fastlstm2.r_trunc__2_out -> Result_for_Offset_fastlstm2.r_trunc__2Offset_fastlstm2.r_trunc__2_out -[ WARNING ] Add input/output mapped Parameter_1_for_Offset_fastlstm2.r_trunc__2Offset_fastlstm2.r_trunc__2_out -> Result_for_Offset_fastlstm2.r_trunc__2Offset_fastlstm2.r_trunc__2_out -[ WARNING ] Add input/output mapped Parameter_0_for_iteration_Offset_fastlstm3.c_trunc__3390 -> Result_for_iteration_Offset_fastlstm3.c_trunc__3390 +[ WARNING ] Add input/output mapped Parameter_0_for_Offset_fastlstm2.r_trunc__2Offset_fastlstm2.r_trunc__2_out -> Result_for_Offset_fastlstm2.r_trunc__2Offset_fastlstm2.r_trunc__2_out +[ WARNING ] Add input/output mapped Parameter_1_for_Offset_fastlstm2.r_trunc__2Offset_fastlstm2.r_trunc__2_out -> Result_for_Offset_fastlstm2.r_trunc__2Offset_fastlstm2.r_trunc__2_out +[ WARNING ] Add input/output mapped Parameter_0_for_iteration_Offset_fastlstm3.c_trunc__3390 -> Result_for_iteration_Offset_fastlstm3.c_trunc__3390 ``` Based on this mapping, link inputs and outputs in your application manually as follows: - -1. Initialize inputs from the mapping as zeros in the first frame of an utterance. -2. Copy output blobs from the mapping to the corresponding inputs. For example, data from `Result_for_Offset_fastlstm2.r_trunc__2Offset_fastlstm2.r_trunc__2_out` -must be copied to `Parameter_0_for_Offset_fastlstm2.r_trunc__2Offset_fastlstm2.r_trunc__2_out`. +1. Initialize inputs from the mapping as zeros in the first frame of an utterance. +2. Copy output blobs from the mapping to the corresponding inputs. For example, data from `Result_for_Offset_fastlstm2.r_trunc__2Offset_fastlstm2.r_trunc__2_out` +must be copied to `Parameter_0_for_Offset_fastlstm2.r_trunc__2Offset_fastlstm2.r_trunc__2_out`. ## Supported Kaldi\* Layers Refer to [Supported Framework Layers ](../Supported_Frameworks_Layers.md) for the list of supported standard layers. + +## See Also +[Model Conversion Tutorials](Convert_Model_Tutorials.md) diff --git a/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_MxNet.md b/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_MxNet.md index 8841fccbba1..bcaf6f3b517 100644 --- a/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_MxNet.md +++ b/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_MxNet.md @@ -1,73 +1,12 @@ # Converting an MXNet* Model {#openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_MxNet} -@sphinxdirective - -.. _convert model mxnet: - -.. toctree:: - :maxdepth: 1 - :hidden: - - openvino_docs_MO_DG_prepare_model_convert_model_mxnet_specific_Convert_Style_Transfer_From_MXNet - openvino_docs_MO_DG_prepare_model_convert_model_mxnet_specific_Convert_GluonCV_Models - -@endsphinxdirective - -A summary of the steps for optimizing and deploying a model that was trained with the MXNet\* framework: - -1. [Configure the Model Optimizer](../../Deep_Learning_Model_Optimizer_DevGuide.md) for MXNet* (MXNet was used to train your model) -2. [Convert a MXNet model](#ConvertMxNet) to produce an optimized [Intermediate Representation (IR)](../../IR_and_opsets.md) of the model based on the trained network topology, weights, and biases values -3. Test the model in the Intermediate Representation format using the [OpenVINO™ Runtime](../../../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md) in the target environment via provided Inference Engine [sample applications](../../../OV_Runtime_UG/Samples_Overview.md) -4. [Integrate](../../../OV_Runtime_UG/Samples_Overview.md) the [OpenVINO™ Runtime](../../../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md) in your application to deploy the model in the target environment - -## Supported Topologies - -> **NOTE**: SSD models from the table require converting to the deploy mode. For details, see the [Conversion Instructions](https://github.com/zhreshold/mxnet-ssd/#convert-model-to-deploy-mode) in the GitHub MXNet-SSD repository. - -| Model Name| Model File | -| ------------- |:-------------:| -|VGG-16| [Repo](https://github.com/dmlc/mxnet-model-gallery/tree/master), [Symbol](http://data.mxnet.io/models/imagenet/vgg/vgg16-symbol.json), [Params](http://data.mxnet.io/models/imagenet/vgg/vgg16-0000.params)| -|VGG-19| [Repo](https://github.com/dmlc/mxnet-model-gallery/tree/master), [Symbol](http://data.mxnet.io/models/imagenet/vgg/vgg19-symbol.json), [Params](http://data.mxnet.io/models/imagenet/vgg/vgg19-0000.params)| -|ResNet-152 v1| [Repo](https://github.com/dmlc/mxnet-model-gallery/tree/master), [Symbol](http://data.mxnet.io/models/imagenet/resnet/152-layers/resnet-152-symbol.json), [Params](http://data.mxnet.io/models/imagenet/resnet/152-layers/resnet-152-0000.params)| -|SqueezeNet_v1.1| [Repo](https://github.com/dmlc/mxnet-model-gallery/tree/master), [Symbol](http://data.mxnet.io/models/imagenet/squeezenet/squeezenet_v1.1-symbol.json), [Params](http://data.mxnet.io/models/imagenet/squeezenet/squeezenet_v1.1-0000.params)| -|Inception BN| [Repo](https://github.com/dmlc/mxnet-model-gallery/tree/master), [Symbol](http://data.mxnet.io/models/imagenet/inception-bn/Inception-BN-symbol.json), [Params](http://data.mxnet.io/models/imagenet/inception-bn/Inception-BN-0126.params)| -|CaffeNet| [Repo](https://github.com/dmlc/mxnet-model-gallery/tree/master), [Symbol](http://data.mxnet.io/mxnet/models/imagenet/caffenet/caffenet-symbol.json), [Params](http://data.mxnet.io/models/imagenet/caffenet/caffenet-0000.params)| -|DenseNet-121| [Repo](https://github.com/miraclewkf/DenseNet), [Symbol](https://raw.githubusercontent.com/miraclewkf/DenseNet/master/model/densenet-121-symbol.json), [Params](https://drive.google.com/file/d/0ByXcv9gLjrVcb3NGb1JPa3ZFQUk/view?usp=drive_web)| -|DenseNet-161| [Repo](https://github.com/miraclewkf/DenseNet), [Symbol](https://raw.githubusercontent.com/miraclewkf/DenseNet/master/model/densenet-161-symbol.json), [Params](https://drive.google.com/file/d/0ByXcv9gLjrVcS0FwZ082SEtiUjQ/view)| -|DenseNet-169| [Repo](https://github.com/miraclewkf/DenseNet), [Symbol](https://raw.githubusercontent.com/miraclewkf/DenseNet/master/model/densenet-169-symbol.json), [Params](https://drive.google.com/file/d/0ByXcv9gLjrVcOWZJejlMOWZvZmc/view)| -|DenseNet-201| [Repo](https://github.com/miraclewkf/DenseNet), [Symbol](https://raw.githubusercontent.com/miraclewkf/DenseNet/master/model/densenet-201-symbol.json), [Params](https://drive.google.com/file/d/0ByXcv9gLjrVcUjF4MDBwZ3FQbkU/view)| -|MobileNet| [Repo](https://github.com/KeyKy/mobilenet-mxnet), [Symbol](https://github.com/KeyKy/mobilenet-mxnet/blob/master/mobilenet.py), [Params](https://github.com/KeyKy/mobilenet-mxnet/blob/master/mobilenet-0000.params)| -|SSD-ResNet-50| [Repo](https://github.com/zhreshold/mxnet-ssd), [Symbol + Params](https://github.com/zhreshold/mxnet-ssd/releases/download/v0.6/resnet50_ssd_512_voc0712_trainval.zip)| -|SSD-VGG-16-300| [Repo](https://github.com/zhreshold/mxnet-ssd), [Symbol + Params](https://github.com/zhreshold/mxnet-ssd/releases/download/v0.5-beta/vgg16_ssd_300_voc0712_trainval.zip)| -|SSD-Inception v3| [Repo](https://github.com/zhreshold/mxnet-ssd), [Symbol + Params](https://github.com/zhreshold/mxnet-ssd/releases/download/v0.7-alpha/ssd_inceptionv3_512_voc0712trainval.zip)| -|FCN8 (Semantic Segmentation)| [Repo](https://github.com/apache/incubator-mxnet/tree/master/example/fcn-xs), [Symbol](https://www.dropbox.com/sh/578n5cxej7ofd6m/AAA9SFCBN8R_uL2CnAd3WQ5ia/FCN8s_VGG16-symbol.json?dl=0), [Params](https://www.dropbox.com/sh/578n5cxej7ofd6m/AABHWZHCtA2P6iR6LUflkxb_a/FCN8s_VGG16-0019-cpu.params?dl=0)| -|MTCNN part 1 (Face Detection)| [Repo](https://github.com/pangyupo/mxnet_mtcnn_face_detection), [Symbol](https://github.com/pangyupo/mxnet_mtcnn_face_detection/blob/master/model/det1-symbol.json), [Params](https://github.com/pangyupo/mxnet_mtcnn_face_detection/blob/master/model/det1-0001.params)| -|MTCNN part 2 (Face Detection)| [Repo](https://github.com/pangyupo/mxnet_mtcnn_face_detection), [Symbol](https://github.com/pangyupo/mxnet_mtcnn_face_detection/blob/master/model/det2-symbol.json), [Params](https://github.com/pangyupo/mxnet_mtcnn_face_detection/blob/master/model/det2-0001.params)| -|MTCNN part 3 (Face Detection)| [Repo](https://github.com/pangyupo/mxnet_mtcnn_face_detection), [Symbol](https://github.com/pangyupo/mxnet_mtcnn_face_detection/blob/master/model/det3-symbol.json), [Params](https://github.com/pangyupo/mxnet_mtcnn_face_detection/blob/master/model/det3-0001.params)| -|MTCNN part 4 (Face Detection)| [Repo](https://github.com/pangyupo/mxnet_mtcnn_face_detection), [Symbol](https://github.com/pangyupo/mxnet_mtcnn_face_detection/blob/master/model/det4-symbol.json), [Params](https://github.com/pangyupo/mxnet_mtcnn_face_detection/blob/master/model/det4-0001.params)| -|Lightened_moon| [Repo](https://github.com/tornadomeet/mxnet-face/tree/master/model/lightened_moon), [Symbol](https://github.com/tornadomeet/mxnet-face/blob/master/model/lightened_moon/lightened_moon_fuse-symbol.json), [Params](https://github.com/tornadomeet/mxnet-face/blob/master/model/lightened_moon/lightened_moon_fuse-0082.params)| -|RNN-Transducer| [Repo](https://github.com/HawkAaron/mxnet-transducer) | -|word_lm| [Repo](https://github.com/apache/incubator-mxnet/tree/master/example/rnn/word_lm) | - -**Other supported topologies** - -* [GluonCV SSD and YOLO-v3 models](https://gluon-cv.mxnet.io/model_zoo/detection.html) can be converted using the following [instructions](mxnet_specific/Convert_GluonCV_Models.md). -* [Style transfer model](https://github.com/zhaw/neural_style) can be converted using the following [instructions](mxnet_specific/Convert_Style_Transfer_From_MXNet.md). - ## Convert an MXNet* Model - -To convert an MXNet\* model, run Model Optimizer with a path to the input model `.params` file and to an output directory where you have write permissions: +To convert an MXNet\* model, run Model Optimizer with a path to the input model `.params` file: ```sh - mo --input_model model-file-0000.params --output_dir + mo --input_model model-file-0000.params ``` -Two groups of parameters are available to convert your model: - -* Framework-agnostic parameters are used to convert a model trained with any supported framework. For details, see see the General Conversion Parameters section on the [Converting a Model to Intermediate Representation (IR)](Converting_Model.md) page. -* [MXNet-specific parameters](#mxnet_specific_conversion_params) are used to convert only MXNet models. - - ### Using MXNet\*-Specific Conversion Parameters The following list provides the MXNet\*-specific parameters. @@ -101,7 +40,7 @@ MXNet-specific parameters: Internally, when you run the Model Optimizer, it loads the model, goes through the topology, and tries to find each layer type in a list of known layers. Custom layers are layers that are not included in the list of known layers. If your topology contains any layers that are not in this list of known layers, the Model Optimizer classifies them as custom. ## Supported MXNet\* Layers -Refer to [Supported Framework Layers ](../Supported_Frameworks_Layers.md) for the list of supported standard layers. +Refer to [Supported Framework Layers](../Supported_Frameworks_Layers.md) for the list of supported standard layers. ## Frequently Asked Questions (FAQ) @@ -114,3 +53,6 @@ In this document, you learned: * Basic information about how the Model Optimizer works with MXNet\* models * Which MXNet\* models are supported * How to convert a trained MXNet\* model using the Model Optimizer with both framework-agnostic and MXNet-specific command-line options + +## See Also +[Model Conversion Tutorials](Convert_Model_Tutorials.md) diff --git a/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_ONNX.md b/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_ONNX.md index 7734c5902d7..08dacd50aa6 100644 --- a/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_ONNX.md +++ b/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_ONNX.md @@ -1,83 +1,21 @@ # Converting a ONNX* Model {#openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_ONNX} -@sphinxdirective - -.. _convert model onnx: - -.. toctree:: - :maxdepth: 1 - :hidden: - - openvino_docs_MO_DG_prepare_model_convert_model_onnx_specific_Convert_Faster_RCNN - openvino_docs_MO_DG_prepare_model_convert_model_onnx_specific_Convert_Mask_RCNN - openvino_docs_MO_DG_prepare_model_convert_model_onnx_specific_Convert_GPT2 - openvino_docs_MO_DG_prepare_model_convert_model_onnx_specific_Convert_DLRM - openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_PyTorch - -@endsphinxdirective - ## Introduction to ONNX - [ONNX*](https://github.com/onnx/onnx) is a representation format for deep learning models. ONNX allows AI developers easily transfer models between different frameworks that helps to choose the best combination for them. Today, PyTorch\*, Caffe2\*, Apache MXNet\*, Microsoft Cognitive Toolkit\* and other tools are developing ONNX support. -## Supported Public ONNX Topologies -| Model Name | Path to Public Models master branch| -|:----|:----| -| bert_large | [model archive](https://github.com/mlperf/inference/tree/master/v0.7/language/bert) | -| bvlc_alexnet | [model archive](https://s3.amazonaws.com/download.onnx/models/opset_8/bvlc_alexnet.tar.gz) | -| bvlc_googlenet | [model archive](https://s3.amazonaws.com/download.onnx/models/opset_8/bvlc_googlenet.tar.gz) | -| bvlc_reference_caffenet | [model archive](https://s3.amazonaws.com/download.onnx/models/opset_8/bvlc_reference_caffenet.tar.gz) | -| bvlc_reference_rcnn_ilsvrc13 | [model archive](https://s3.amazonaws.com/download.onnx/models/opset_8/bvlc_reference_rcnn_ilsvrc13.tar.gz) | -| inception_v1 | [model archive](https://s3.amazonaws.com/download.onnx/models/opset_8/inception_v1.tar.gz) | -| inception_v2 | [model archive](https://s3.amazonaws.com/download.onnx/models/opset_8/inception_v2.tar.gz) | -| resnet50 | [model archive](https://s3.amazonaws.com/download.onnx/models/opset_8/resnet50.tar.gz) | -| squeezenet | [model archive](https://s3.amazonaws.com/download.onnx/models/opset_8/squeezenet.tar.gz) | -| densenet121 | [model archive](https://s3.amazonaws.com/download.onnx/models/opset_8/densenet121.tar.gz) | -| emotion_ferplus | [model archive](https://www.cntk.ai/OnnxModels/emotion_ferplus/opset_2/emotion_ferplus.tar.gz) | -| mnist | [model archive](https://www.cntk.ai/OnnxModels/mnist/opset_1/mnist.tar.gz) | -| shufflenet | [model archive](https://s3.amazonaws.com/download.onnx/models/opset_8/shufflenet.tar.gz) | -| VGG19 | [model archive](https://s3.amazonaws.com/download.onnx/models/opset_8/vgg19.tar.gz) | -| zfnet512 | [model archive](https://s3.amazonaws.com/download.onnx/models/opset_8/zfnet512.tar.gz) | -| GPT-2 | [model archive](https://github.com/onnx/models/blob/master/text/machine_comprehension/gpt-2/model/gpt2-10.tar.gz) | -| YOLOv3 | [model archive](https://github.com/onnx/models/blob/master/vision/object_detection_segmentation/yolov3/model/yolov3-10.tar.gz) | - -Listed models are built with the operation set version 8 except the GPT-2 model (which uses version 10). Models that are upgraded to higher operation set versions may not be supported. - -## Supported PaddlePaddle* Models via ONNX Conversion -Starting from the R5 release, the OpenVINO™ toolkit officially supports public PaddlePaddle* models via ONNX conversion. -The list of supported topologies downloadable from PaddleHub is presented below: - -| Model Name | Command to download the model from PaddleHub | -|:----|:----| -| [MobileNetV2](https://www.paddlepaddle.org.cn/hubdetail?name=mobilenet_v2_imagenet) | `hub install mobilenet_v2_imagenet==1.0.1` | -| [ResNet18](https://www.paddlepaddle.org.cn/hubdetail?name=resnet_v2_18_imagenet) | `hub install resnet_v2_18_imagenet==1.0.0` | -| [ResNet34](https://www.paddlepaddle.org.cn/hubdetail?name=resnet_v2_34_imagenet) | `hub install resnet_v2_34_imagenet==1.0.0` | -| [ResNet50](https://www.paddlepaddle.org.cn/hubdetail?name=resnet_v2_50_imagenet) | `hub install resnet_v2_50_imagenet==1.0.1` | -| [ResNet101](https://www.paddlepaddle.org.cn/hubdetail?name=resnet_v2_101_imagenet) | `hub install resnet_v2_101_imagenet==1.0.1` | -| [ResNet152](https://www.paddlepaddle.org.cn/hubdetail?name=resnet_v2_152_imagenet) | `hub install resnet_v2_152_imagenet==1.0.1` | -> **NOTE**: To convert a model downloaded from PaddleHub use [paddle2onnx](https://github.com/PaddlePaddle/paddle2onnx) converter. - -The list of supported topologies from the [models v1.5](https://github.com/PaddlePaddle/models/tree/release/1.5) package: -* [MobileNetV1](https://github.com/PaddlePaddle/models/blob/release/1.5/PaddleCV/image_classification/models/mobilenet.py) -* [MobileNetV2](https://github.com/PaddlePaddle/models/blob/release/1.5/PaddleCV/image_classification/models/mobilenet_v2.py) -* [ResNet](https://github.com/PaddlePaddle/models/blob/release/1.5/PaddleCV/image_classification/models/resnet.py) -* [ResNet_vc](https://github.com/PaddlePaddle/models/blob/release/1.5/PaddleCV/image_classification/models/resnet_vc.py) -* [ResNet_vd](https://github.com/PaddlePaddle/models/blob/release/1.5/PaddleCV/image_classification/models/resnet_vd.py) -* [ResNeXt](https://github.com/PaddlePaddle/models/blob/release/1.5/PaddleCV/image_classification/models/resnext.py) -* [ResNeXt_vd](https://github.com/PaddlePaddle/models/blob/release/1.5/PaddleCV/image_classification/models/resnext_vd.py) - -> **NOTE**: To convert these topologies one should first serialize the model by calling `paddle.fluid.io.save_inference_model` - ([description](https://www.paddlepaddle.org.cn/documentation/docs/en/1.3/api/io.html#save-inference-model)) command and - after that use [paddle2onnx](https://github.com/PaddlePaddle/paddle2onnx) converter. - ## Convert an ONNX* Model The Model Optimizer process assumes you have an ONNX model that was directly downloaded from a public repository or converted from any framework that supports exporting to the ONNX format. -To convert an ONNX\* model, run Model Optimizer with the path to the input model `.nnet` file and an output directory where you have write permissions: +To convert an ONNX\* model, run Model Optimizer with the path to the input model `.onnx` file: + ```sh - mo --input_model .onnx --output_dir + mo --input_model .onnx ``` -There are no ONNX\* specific parameters, so only framework-agnostic parameters are available to convert your model. For details, see see the General Conversion Parameters section on the [Converting a Model to Intermediate Representation (IR)](Converting_Model.md) page. + +There are no ONNX\* specific parameters, so only framework-agnostic parameters are available to convert your model. For details, see the General Conversion Parameters section on the [Converting a Model to Intermediate Representation (IR)](Converting_Model.md) page. ## Supported ONNX\* Layers Refer to [Supported Framework Layers](../Supported_Frameworks_Layers.md) for the list of supported standard layers. + +## See Also +[Model Conversion Tutorials](Convert_Model_Tutorials.md) diff --git a/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_Paddle.md b/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_Paddle.md index 19c487b21ec..bb9567f3f9b 100644 --- a/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_Paddle.md +++ b/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_Paddle.md @@ -1,58 +1,25 @@ -# Converting a Paddle* Model {#openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_Paddle} +# Converting a PaddlePaddle* Model {#openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_Paddle} -A summary of the steps for optimizing and deploying a model trained with Paddle\*: +## Convert a PaddlePaddle Model +To convert a PaddlePaddle model, use the `mo` script and specify the path to the input model `.pdmodel` file: -1. [Configure the Model Optimizer](../../Deep_Learning_Model_Optimizer_DevGuide.md) for Paddle\*. -2. [Convert a Paddle\* Model](#Convert_From_Paddle) to produce an optimized [Intermediate Representation (IR)](../../IR_and_opsets.md) of the model based on the trained network topology, weights, and biases. -3. Test the model in the Intermediate Representation format using the [OpenVINO™ Runtime](../../../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md) in the target environment via provided Inference Engine [sample applications](../../../OV_Runtime_UG/Samples_Overview.md). -4. [Integrate](../../../OV_Runtime_UG/Samples_Overview.md) the [OpenVINO™ Runtime](../../../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md) in your application to deploy the model in the target environment. - -## Supported Topologies - -| Model Name| Model Type| Description| -| ------------- | ------------ | ------------- | -|ppocr-det| optical character recognition| Models are exported from [PaddleOCR](https://github.com/PaddlePaddle/PaddleOCR/tree/release/2.1/). Refer to [READ.md](https://github.com/PaddlePaddle/PaddleOCR/tree/release/2.1/#pp-ocr-20-series-model-listupdate-on-dec-15).| -|ppocr-rec| optical character recognition| Models are exported from [PaddleOCR](https://github.com/PaddlePaddle/PaddleOCR/tree/release/2.1/). Refer to [READ.md](https://github.com/PaddlePaddle/PaddleOCR/tree/release/2.1/#pp-ocr-20-series-model-listupdate-on-dec-15).| -|ResNet-50| classification| Models are exported from [PaddleClas](https://github.com/PaddlePaddle/PaddleClas/tree/release/2.1/). Refer to [getting_started_en.md](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.1/docs/en/tutorials/getting_started_en.md#4-use-the-inference-model-to-predict)| -|MobileNet v2| classification| Models are exported from [PaddleClas](https://github.com/PaddlePaddle/PaddleClas/tree/release/2.1/). Refer to [getting_started_en.md](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.1/docs/en/tutorials/getting_started_en.md#4-use-the-inference-model-to-predict)| -|MobileNet v3| classification| Models are exported from [PaddleClas](https://github.com/PaddlePaddle/PaddleClas/tree/release/2.1/). Refer to [getting_started_en.md](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.1/docs/en/tutorials/getting_started_en.md#4-use-the-inference-model-to-predict)| -|BiSeNet v2| semantic segmentation| Models are exported from [PaddleSeg](https://github.com/PaddlePaddle/PaddleSeg/tree/release/2.1). Refer to [model_export.md](https://github.com/PaddlePaddle/PaddleSeg/blob/release/2.1/docs/model_export.md#)| -|DeepLab v3 plus| semantic segmentation| Models are exported from [PaddleSeg](https://github.com/PaddlePaddle/PaddleSeg/tree/release/2.1). Refer to [model_export.md](https://github.com/PaddlePaddle/PaddleSeg/blob/release/2.1/docs/model_export.md#)| -|Fast-SCNN| semantic segmentation| Models are exported from [PaddleSeg](https://github.com/PaddlePaddle/PaddleSeg/tree/release/2.1). Refer to [model_export.md](https://github.com/PaddlePaddle/PaddleSeg/blob/release/2.1/docs/model_export.md#)| -|OCRNET| semantic segmentation| Models are exported from [PaddleSeg](https://github.com/PaddlePaddle/PaddleSeg/tree/release/2.1). Refer to [model_export.md](https://github.com/PaddlePaddle/PaddleSeg/blob/release/2.1/docs/model_export.md#)| -|Yolo v3| detection| Models are exported from [PaddleDetection](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.1). Refer to [EXPORT_MODEL.md](https://github.com/PaddlePaddle/PaddleDetection/blob/release/2.1/deploy/EXPORT_MODEL.md#).| -|ppyolo| detection| Models are exported from [PaddleDetection](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.1). Refer to [EXPORT_MODEL.md](https://github.com/PaddlePaddle/PaddleDetection/blob/release/2.1/deploy/EXPORT_MODEL.md#).| -|MobileNetv3-SSD| detection| Models are exported from [PaddleDetection](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.2). Refer to [EXPORT_MODEL.md](https://github.com/PaddlePaddle/PaddleDetection/blob/release/2.2/deploy/EXPORT_MODEL.md#).| -|U-Net| semantic segmentation| Models are exported from [PaddleSeg](https://github.com/PaddlePaddle/PaddleSeg/tree/release/2.3). Refer to [model_export.md](https://github.com/PaddlePaddle/PaddleSeg/blob/release/2.3/docs/model_export.md#)| -|BERT| language representation| Models are exported from [PaddleNLP](https://github.com/PaddlePaddle/PaddleNLP/tree/v2.1.1). Refer to [README.md](https://github.com/PaddlePaddle/PaddleNLP/tree/develop/examples/language_model/bert#readme)| -|ernie| language representation| Models are exported from [PaddleNLP](https://github.com/PaddlePaddle/PaddleNLP/tree/v2.1.1). Refer to [README.md](https://github.com/PaddlePaddle/PaddleNLP/tree/develop/examples/language_model/bert#readme)| - -> **NOTE:** The verified models are exported from the repository of branch release/2.1. - -## Convert a Paddle* Model - -To convert a Paddle\* model: - -1. Activate environment with installed OpenVINO if needed -2. Use the `mo` script to simply convert a model, specifying the framework, the path to the input model `.pdmodel` file and the path to an output directory with write permissions: ```sh - mo --input_model .pdmodel --output_dir --framework=paddle + mo --input_model .pdmodel ``` -Parameters to convert your model: +### Example of Converting a PaddlePaddle Model +Below is the example command to convert yolo v3 PaddlePaddle network to OpenVINO IR network with Model Optimizer. -* [Framework-agnostic parameters](Converting_Model.md): These parameters are used to convert a model trained with any supported framework. -> **NOTE:** `--scale`, `--scale_values`, `--mean_values` are not supported in the current version of mo_paddle. - -### Example of Converting a Paddle* Model -Below is the example command to convert yolo v3 Paddle\* network to OpenVINO IR network with Model Optimizer. ```sh - mo --model_name yolov3_darknet53_270e_coco --output_dir --framework=paddle --data_type=FP32 --reverse_input_channels --input_shape=[1,3,608,608],[1,2],[1,2] --input=image,im_shape,scale_factor --output=save_infer_model/scale_0.tmp_1,save_infer_model/scale_1.tmp_1 --input_model=yolov3.pdmodel + mo --input_model=yolov3.pdmodel --input=image,im_shape,scale_factor --input_shape=[1,3,608,608],[1,2],[1,2] --reverse_input_channels --output=save_infer_model/scale_0.tmp_1,save_infer_model/scale_1.tmp_1 ``` -## Supported Paddle\* Layers +## Supported PaddlePaddle Layers Refer to [Supported Framework Layers](../Supported_Frameworks_Layers.md) for the list of supported standard layers. ## Frequently Asked Questions (FAQ) When Model Optimizer is unable to run to completion due to issues like typographical errors, incorrectly used options, etc., it provides explanatory messages. They describe the potential cause of the problem and give a link to the [Model Optimizer FAQ](../Model_Optimizer_FAQ.md), which provides instructions on how to resolve most issues. The FAQ also includes links to relevant sections in the Model Optimizer Developer Guide to help you understand what went wrong. + +## See Also +[Model Conversion Tutorials](Convert_Model_Tutorials.md) diff --git a/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_PyTorch.md b/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_PyTorch.md index 7ffe0fa11ad..ea07288f007 100644 --- a/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_PyTorch.md +++ b/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_PyTorch.md @@ -1,64 +1,19 @@ # Converting a PyTorch* Model {#openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_PyTorch} -@sphinxdirective - -.. toctree:: - :maxdepth: 1 - :hidden: - - openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_F3Net - openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_QuartzNet - openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_RNNT - openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_YOLACT - openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_Bert_ner - openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_RCAN - -@endsphinxdirective - -## Supported Topologies - -Here is the list of models that are tested and guaranteed to be supported. However, you can also use these instructions to convert PyTorch\* models that are not presented in the list. - -* [Torchvision Models](https://pytorch.org/docs/stable/torchvision/index.html): alexnet, densenet121, densenet161, - densenet169, densenet201, resnet101, resnet152, resnet18, resnet34, resnet50, vgg11, vgg13, vgg16, vgg19. - The models can be converted using [regular instructions](#typical-pytorch). -* [Cadene Pretrained Models](https://github.com/Cadene/pretrained-models.pytorch): alexnet, fbresnet152, resnet101, - resnet152, resnet18, resnet34, resnet152, resnet18, resnet34, resnet50, resnext101_32x4d, resnext101_64x4d, vgg11. - The models can be converted using [regular instructions](#typical-pytorch). -* [ESPNet Models](https://github.com/sacmehta/ESPNet/tree/master/pretrained) can be converted using [regular instructions](#typical-pytorch). -* [MobileNetV3](https://github.com/d-li14/mobilenetv3.pytorch) can be converted using [regular instructions](#typical-pytorch). -* [iSeeBetter](https://github.com/amanchadha/iSeeBetter) can be converted using [regular instructions](#typical-pytorch). - Please refer to [`iSeeBetterTest.py`](https://github.com/amanchadha/iSeeBetter/blob/master/iSeeBetterTest.py) script for code to initialize the model. -* F3Net topology can be converted using steps described in [Convert PyTorch\* F3Net to the IR](pytorch_specific/Convert_F3Net.md) - instruction which is used instead of steps 2 and 3 of [regular instructions](#typical-pytorch). -* QuartzNet topologies from [NeMo project](https://github.com/NVIDIA/NeMo) can be converted using steps described in - [Convert PyTorch\* QuartzNet to the IR](pytorch_specific/Convert_QuartzNet.md) instruction which is used instead of - steps 2 and 3 of [regular instructions](#typical-pytorch). -* YOLACT topology can be converted using steps described in [Convert PyTorch\* YOLACT to the IR](pytorch_specific/Convert_YOLACT.md) - instruction which is used instead of steps 2 and 3 of [regular instructions](#typical-pytorch). -* [RCAN](https://github.com/yulunzhang/RCAN) topology can be converted using steps described in [Convert PyTorch\* RCAN to the IR](pytorch_specific/Convert_RCAN.md) - instruction which is used instead of steps 2 and 3 of [regular instructions](#typical-pytorch). -* [BERT_NER](https://github.com/kamalkraj/BERT-NER) topology can be converted using steps described in [Convert PyTorch* BERT-NER to the IR](pytorch_specific/Convert_Bert_ner.md) - instruction which is used instead of steps 2 and 3 of [regular instructions](#typical-pytorch). - -## Typical steps to convert PyTorch\* model - +## Typical Steps to Convert PyTorch Model PyTorch* framework is supported through export to ONNX\* format. A summary of the steps for optimizing and deploying a model that was trained with the PyTorch\* framework: -1. [Configure the Model Optimizer](../../Deep_Learning_Model_Optimizer_DevGuide.md) for ONNX\*. -2. [Export PyTorch model to ONNX\*](#export-to-onnx). -3. [Convert an ONNX\* model](Convert_Model_From_ONNX.md) to produce an optimized [Intermediate Representation (IR)](../../IR_and_opsets.md) of the model based on the trained network topology, weights, and biases values. -4. Test the model in the Intermediate Representation format using the [OpenVINO™ Runtime](../../../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md) in the target environment via provided [sample applications](../../../OV_Runtime_UG/Samples_Overview.md). -5. [Integrate](../../../OV_Runtime_UG/Samples_Overview.md) the Inference Engine in your application to deploy the model in the target environment. +1. [Export PyTorch model to ONNX\*](#export-to-onnx). +2. [Convert an ONNX\* model](Convert_Model_From_ONNX.md) to produce an optimized [Intermediate Representation (IR)](../../IR_and_opsets.md) of the model based on the trained network topology, weights, and biases values. ## Export PyTorch\* Model to ONNX\* Format - PyTorch models are defined in a Python\* code, to export such models use `torch.onnx.export()` method. Usually code to evaluate or test the model is provided with the model code and can be used to initialize and export model. Only the basics will be covered here, the step to export to ONNX\* is crucial but it is covered by PyTorch\* framework. -For more information, please refer to [PyTorch\* documentation](https://pytorch.org/docs/stable/onnx.html). +For more information, please refer to [Exporting PyTorch models to ONNX format](https://pytorch.org/docs/stable/onnx.html). To export a PyTorch\* model you need to obtain the model as an instance of `torch.nn.Module` class and call the `export` function. + ```python import torch @@ -66,7 +21,7 @@ import torch model = SomeModel() # Evaluate the model to switch some operations from training mode to inference. model.eval() -# Create dummy input for the model. It will be used to run the model inside export function. +# Create dummy input for the model. It will be used to run the model inside export function. dummy_input = torch.randn(1, 3, 224, 224) # Call the export function torch.onnx.export(model, (dummy_input, ), 'model.onnx') @@ -77,3 +32,6 @@ torch.onnx.export(model, (dummy_input, ), 'model.onnx') * Not all PyTorch\* operations can be exported to ONNX\* opset 9 which is used by default, as of version 1.8.1. It is recommended to export models to opset 11 or higher when export to default opset 9 is not working. In that case, use `opset_version` option of the `torch.onnx.export`. For more information about ONNX* opset, refer to the [Operator Schemas](https://github.com/onnx/onnx/blob/master/docs/Operators.md). + +## See Also +[Model Conversion Tutorials](Convert_Model_Tutorials.md) diff --git a/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_TensorFlow.md b/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_TensorFlow.md index 1ba6aa2d4d4..1f19b7a1a2a 100644 --- a/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_TensorFlow.md +++ b/docs/MO_DG/prepare_model/convert_model/Convert_Model_From_TensorFlow.md @@ -1,247 +1,40 @@ # Converting a TensorFlow* Model {#openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_TensorFlow} -@sphinxdirective +## Convert TensorFlow 1 Models -.. _convert model tf: +### Convert Frozen Model Format +To convert a TensorFlow model, use the `mo` script to simply convert a model with the path to the input model `.pb` file: -.. toctree:: - :maxdepth: 1 - :hidden: - - openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_RetinaNet_From_Tensorflow - openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_AttentionOCR_From_Tensorflow - openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_YOLO_From_Tensorflow - openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_FaceNet_From_Tensorflow - openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_NCF_From_Tensorflow - openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_DeepSpeech_From_Tensorflow - openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_lm_1b_From_Tensorflow - openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_Object_Detection_API_Models - openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_Slim_Library_Models - openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_CRNN_From_Tensorflow - openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_GNMT_From_Tensorflow - openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_BERT_From_Tensorflow - openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_XLNet_From_Tensorflow - openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_WideAndDeep_Family_Models - openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_EfficientDet_Models - -@endsphinxdirective - -A summary of the steps for optimizing and deploying a model that was trained with the TensorFlow\* framework: - -1. [Configure the Model Optimizer](../../Deep_Learning_Model_Optimizer_DevGuide.md) for TensorFlow\* (TensorFlow was used to train your model). -2. [Freeze the TensorFlow model](#freeze-the-tensorflow-model) if your model is not already frozen or skip this step and use the [instruction](#loading-nonfrozen-models) to a convert a non-frozen model. -3. [Convert a TensorFlow\* model](#Convert_From_TF) to produce an optimized [Intermediate Representation (IR)](../../IR_and_opsets.md) of the model based on the trained network topology, weights, and biases values. -4. Test the model in the Intermediate Representation format using the [OpenVINO™ Runtime](../../../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md) in the target environment via provided [sample applications](../../../OV_Runtime_UG/Samples_Overview.md). -5. [Integrate](../../../OV_Runtime_UG/Samples_Overview.md) the Inference Engine in your application to deploy the model in the target environment. - -## Supported Topologies - -**Supported Non-Frozen Topologies with Links to the Associated Slim Model Classification Download Files** - -Detailed information on how to convert models from the TensorFlow\*-Slim Image Classification Model Library is available in the [Converting TensorFlow*-Slim Image Classification Model Library Models](tf_specific/Convert_Slim_Library_Models.md) chapter. The table below contains list of supported TensorFlow\*-Slim Image Classification Model Library models and required mean/scale values. The mean values are specified as if the input image is read in BGR channels order layout like Inference Engine classification sample does. - -| Model Name| Slim Model Checkpoint File| \-\-mean_values | \-\-scale| -| ------------- | ------------ | ------------- | -----:| -|Inception v1| [inception_v1_2016_08_28.tar.gz](http://download.tensorflow.org/models/inception_v1_2016_08_28.tar.gz)| [127.5,127.5,127.5]| 127.5| -|Inception v2| [inception_v1_2016_08_28.tar.gz](http://download.tensorflow.org/models/inception_v1_2016_08_28.tar.gz)| [127.5,127.5,127.5]| 127.5| -|Inception v3| [inception_v3_2016_08_28.tar.gz](http://download.tensorflow.org/models/inception_v3_2016_08_28.tar.gz)| [127.5,127.5,127.5]| 127.5| -|Inception V4| [inception_v4_2016_09_09.tar.gz](http://download.tensorflow.org/models/inception_v4_2016_09_09.tar.gz)| [127.5,127.5,127.5]| 127.5| -|Inception ResNet v2| [inception_resnet_v2_2016_08_30.tar.gz](http://download.tensorflow.org/models/inception_resnet_v2_2016_08_30.tar.gz)| [127.5,127.5,127.5]| 127.5| -|MobileNet v1 128| [mobilenet_v1_0.25_128.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_02_22/mobilenet_v1_0.25_128.tgz)| [127.5,127.5,127.5]| 127.5| -|MobileNet v1 160| [mobilenet_v1_0.5_160.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_02_22/mobilenet_v1_0.5_160.tgz)| [127.5,127.5,127.5]| 127.5| -|MobileNet v1 224| [mobilenet_v1_1.0_224.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_02_22/mobilenet_v1_1.0_224.tgz)| [127.5,127.5,127.5]| 127.5| -|NasNet Large| [nasnet-a_large_04_10_2017.tar.gz](https://storage.googleapis.com/download.tensorflow.org/models/nasnet-a_large_04_10_2017.tar.gz)| [127.5,127.5,127.5]| 127.5| -|NasNet Mobile| [nasnet-a_mobile_04_10_2017.tar.gz](https://storage.googleapis.com/download.tensorflow.org/models/nasnet-a_mobile_04_10_2017.tar.gz)| [127.5,127.5,127.5]| 127.5| -|ResidualNet-50 v1| [resnet_v1_50_2016_08_28.tar.gz](http://download.tensorflow.org/models/resnet_v1_50_2016_08_28.tar.gz)| [103.94,116.78,123.68] | 1 | -|ResidualNet-50 v2| [resnet_v2_50_2017_04_14.tar.gz](http://download.tensorflow.org/models/resnet_v2_50_2017_04_14.tar.gz)| [103.94,116.78,123.68] | 1 | -|ResidualNet-101 v1| [resnet_v1_101_2016_08_28.tar.gz](http://download.tensorflow.org/models/resnet_v1_101_2016_08_28.tar.gz)| [103.94,116.78,123.68] | 1 | -|ResidualNet-101 v2| [resnet_v2_101_2017_04_14.tar.gz](http://download.tensorflow.org/models/resnet_v2_101_2017_04_14.tar.gz)| [103.94,116.78,123.68] | 1 | -|ResidualNet-152 v1| [resnet_v1_152_2016_08_28.tar.gz](http://download.tensorflow.org/models/resnet_v1_152_2016_08_28.tar.gz)| [103.94,116.78,123.68] | 1 | -|ResidualNet-152 v2| [resnet_v2_152_2017_04_14.tar.gz](http://download.tensorflow.org/models/resnet_v2_152_2017_04_14.tar.gz)| [103.94,116.78,123.68] | 1 | -|VGG-16| [vgg_16_2016_08_28.tar.gz](http://download.tensorflow.org/models/vgg_16_2016_08_28.tar.gz)| [103.94,116.78,123.68] | 1 | -|VGG-19| [vgg_19_2016_08_28.tar.gz](http://download.tensorflow.org/models/vgg_19_2016_08_28.tar.gz)| [103.94,116.78,123.68] | 1 | - -**Supported Pre-Trained Topologies from TensorFlow 1 Detection Model Zoo** - -Detailed information on how to convert models from the TensorFlow 1 Detection Model Zoo is available in the [Converting TensorFlow Object Detection API Models](tf_specific/Convert_Object_Detection_API_Models.md) chapter. The table below contains models from the Object Detection Models zoo that are supported. - -| Model Name| TensorFlow 1 Object Detection API Models| -| :------------- | -----:| -|SSD MobileNet V1 COCO\*| [ssd_mobilenet_v1_coco_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_coco_2018_01_28.tar.gz)| -|SSD MobileNet V1 0.75 Depth COCO| [ssd_mobilenet_v1_0.75_depth_300x300_coco14_sync_2018_07_03.tar.gz](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_0.75_depth_300x300_coco14_sync_2018_07_03.tar.gz)| -|SSD MobileNet V1 PPN COCO| [ssd_mobilenet_v1_ppn_shared_box_predictor_300x300_coco14_sync_2018_07_03.tar.gz](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_ppn_shared_box_predictor_300x300_coco14_sync_2018_07_03.tar.gz)| -|SSD MobileNet V1 FPN COCO| [ssd_mobilenet_v1_fpn_shared_box_predictor_640x640_coco14_sync_2018_07_03.tar.gz](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_fpn_shared_box_predictor_640x640_coco14_sync_2018_07_03.tar.gz)| -|SSD ResNet50 FPN COCO| [ssd_resnet50_v1_fpn_shared_box_predictor_640x640_coco14_sync_2018_07_03.tar.gz](http://download.tensorflow.org/models/object_detection/ssd_resnet50_v1_fpn_shared_box_predictor_640x640_coco14_sync_2018_07_03.tar.gz)| -|SSD MobileNet V2 COCO| [ssd_mobilenet_v2_coco_2018_03_29.tar.gz](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v2_coco_2018_03_29.tar.gz)| -|SSD Lite MobileNet V2 COCO| [ssdlite_mobilenet_v2_coco_2018_05_09.tar.gz](http://download.tensorflow.org/models/object_detection/ssdlite_mobilenet_v2_coco_2018_05_09.tar.gz)| -|SSD Inception V2 COCO| [ssd_inception_v2_coco_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/ssd_inception_v2_coco_2018_01_28.tar.gz)| -|RFCN ResNet 101 COCO| [rfcn_resnet101_coco_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/rfcn_resnet101_coco_2018_01_28.tar.gz)| -|Faster R-CNN Inception V2 COCO| [faster_rcnn_inception_v2_coco_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_v2_coco_2018_01_28.tar.gz)| -|Faster R-CNN ResNet 50 COCO| [faster_rcnn_resnet50_coco_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet50_coco_2018_01_28.tar.gz)| -|Faster R-CNN ResNet 50 Low Proposals COCO| [faster_rcnn_resnet50_lowproposals_coco_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet50_lowproposals_coco_2018_01_28.tar.gz)| -|Faster R-CNN ResNet 101 COCO| [faster_rcnn_resnet101_coco_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_coco_2018_01_28.tar.gz)| -|Faster R-CNN ResNet 101 Low Proposals COCO| [faster_rcnn_resnet101_lowproposals_coco_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_lowproposals_coco_2018_01_28.tar.gz)| -|Faster R-CNN Inception ResNet V2 COCO| [faster_rcnn_inception_resnet_v2_atrous_coco_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_coco_2018_01_28.tar.gz)| -|Faster R-CNN Inception ResNet V2 Low Proposals COCO| [faster_rcnn_inception_resnet_v2_atrous_lowproposals_coco_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_lowproposals_coco_2018_01_28.tar.gz)| -|Faster R-CNN NasNet COCO| [faster_rcnn_nas_coco_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/faster_rcnn_nas_coco_2018_01_28.tar.gz)| -|Faster R-CNN NasNet Low Proposals COCO| [faster_rcnn_nas_lowproposals_coco_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/faster_rcnn_nas_lowproposals_coco_2018_01_28.tar.gz)| -|Mask R-CNN Inception ResNet V2 COCO| [mask_rcnn_inception_resnet_v2_atrous_coco_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/mask_rcnn_inception_resnet_v2_atrous_coco_2018_01_28.tar.gz)| -|Mask R-CNN Inception V2 COCO| [mask_rcnn_inception_v2_coco_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/mask_rcnn_inception_v2_coco_2018_01_28.tar.gz)| -|Mask R-CNN ResNet 101 COCO| [mask_rcnn_resnet101_atrous_coco_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/mask_rcnn_resnet101_atrous_coco_2018_01_28.tar.gz)| -|Mask R-CNN ResNet 50 COCO| [mask_rcnn_resnet50_atrous_coco_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/mask_rcnn_resnet50_atrous_coco_2018_01_28.tar.gz)| -|Faster R-CNN ResNet 101 Kitti\*| [faster_rcnn_resnet101_kitti_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_kitti_2018_01_28.tar.gz)| -|Faster R-CNN Inception ResNet V2 Open Images\*| [faster_rcnn_inception_resnet_v2_atrous_oid_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_oid_2018_01_28.tar.gz)| -|Faster R-CNN Inception ResNet V2 Low Proposals Open Images\*| [faster_rcnn_inception_resnet_v2_atrous_lowproposals_oid_2018_01_28.tar.gz](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_lowproposals_oid_2018_01_28.tar.gz)| -|Faster R-CNN ResNet 101 AVA v2.1\*| [faster_rcnn_resnet101_ava_v2.1_2018_04_30.tar.gz](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_ava_v2.1_2018_04_30.tar.gz)| - -**Supported Pre-Trained Topologies from TensorFlow 2 Detection Model Zoo** - -Detailed information on how to convert models from the TensorFlow 2 Detection Model Zoo is available in the [Converting TensorFlow Object Detection API Models](tf_specific/Convert_Object_Detection_API_Models.md) chapter. The table below contains models from the Object Detection Models zoo that are supported. - -| Model Name| TensorFlow 2 Object Detection API Models| -| :------------- | -----:| -| EfficientDet D0 512x512 | [efficientdet_d0_coco17_tpu-32.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/efficientdet_d0_coco17_tpu-32.tar.gz)| -| EfficientDet D1 640x640 | [efficientdet_d1_coco17_tpu-32.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/efficientdet_d1_coco17_tpu-32.tar.gz)| -| EfficientDet D2 768x768 | [efficientdet_d2_coco17_tpu-32.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/efficientdet_d2_coco17_tpu-32.tar.gz)| -| EfficientDet D3 896x896 | [efficientdet_d3_coco17_tpu-32.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/efficientdet_d3_coco17_tpu-32.tar.gz)| -| EfficientDet D4 1024x1024 | [efficientdet_d4_coco17_tpu-32.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/efficientdet_d4_coco17_tpu-32.tar.gz)| -| EfficientDet D5 1280x1280 | [efficientdet_d5_coco17_tpu-32.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/efficientdet_d5_coco17_tpu-32.tar.gz)| -| EfficientDet D6 1280x1280 | [efficientdet_d6_coco17_tpu-32.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/efficientdet_d6_coco17_tpu-32.tar.gz)| -| EfficientDet D7 1536x1536 | [efficientdet_d7_coco17_tpu-32.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/efficientdet_d7_coco17_tpu-32.tar.gz)| -| SSD MobileNet v2 320x320 | [ssd_mobilenet_v2_320x320_coco17_tpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/ssd_mobilenet_v2_320x320_coco17_tpu-8.tar.gz)| -| SSD MobileNet V1 FPN 640x640 | [ssd_mobilenet_v1_fpn_640x640_coco17_tpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/ssd_mobilenet_v1_fpn_640x640_coco17_tpu-8.tar.gz)| -| SSD MobileNet V2 FPNLite 320x320 | [ssd_mobilenet_v2_fpnlite_320x320_coco17_tpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/ssd_mobilenet_v2_fpnlite_320x320_coco17_tpu-8.tar.gz)| -| SSD MobileNet V2 FPNLite 640x640 | [ssd_mobilenet_v2_fpnlite_640x640_coco17_tpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/ssd_mobilenet_v2_fpnlite_640x640_coco17_tpu-8.tar.gz)| -| SSD ResNet50 V1 FPN 640x640 (RetinaNet50) | [ssd_resnet50_v1_fpn_640x640_coco17_tpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/ssd_resnet50_v1_fpn_640x640_coco17_tpu-8.tar.gz)| -| SSD ResNet50 V1 FPN 1024x1024 (RetinaNet50) | [ssd_resnet50_v1_fpn_1024x1024_coco17_tpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/ssd_resnet50_v1_fpn_1024x1024_coco17_tpu-8.tar.gz)| -| SSD ResNet101 V1 FPN 640x640 (RetinaNet101) | [ssd_resnet101_v1_fpn_640x640_coco17_tpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/ssd_resnet101_v1_fpn_640x640_coco17_tpu-8.tar.gz)| -| SSD ResNet101 V1 FPN 1024x1024 (RetinaNet101) | [ssd_resnet101_v1_fpn_1024x1024_coco17_tpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/ssd_resnet101_v1_fpn_1024x1024_coco17_tpu-8.tar.gz)| -| SSD ResNet152 V1 FPN 640x640 (RetinaNet152) | [ssd_resnet152_v1_fpn_640x640_coco17_tpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/ssd_resnet152_v1_fpn_640x640_coco17_tpu-8.tar.gz)| -| SSD ResNet152 V1 FPN 1024x1024 (RetinaNet152) | [ssd_resnet152_v1_fpn_1024x1024_coco17_tpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/ssd_resnet152_v1_fpn_1024x1024_coco17_tpu-8.tar.gz)| -| Faster R-CNN ResNet50 V1 640x640 | [faster_rcnn_resnet50_v1_640x640_coco17_tpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/faster_rcnn_resnet50_v1_640x640_coco17_tpu-8.tar.gz)| -| Faster R-CNN ResNet50 V1 1024x1024 | [faster_rcnn_resnet50_v1_1024x1024_coco17_tpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/faster_rcnn_resnet50_v1_1024x1024_coco17_tpu-8.tar.gz)| -| Faster R-CNN ResNet50 V1 800x1333 | [faster_rcnn_resnet50_v1_800x1333_coco17_gpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/faster_rcnn_resnet50_v1_800x1333_coco17_gpu-8.tar.gz)| -| Faster R-CNN ResNet101 V1 640x640 | [faster_rcnn_resnet101_v1_640x640_coco17_tpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/faster_rcnn_resnet101_v1_640x640_coco17_tpu-8.tar.gz)| -| Faster R-CNN ResNet101 V1 1024x1024 | [faster_rcnn_resnet101_v1_1024x1024_coco17_tpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/faster_rcnn_resnet101_v1_1024x1024_coco17_tpu-8.tar.gz)| -| Faster R-CNN ResNet101 V1 800x1333 | [faster_rcnn_resnet101_v1_800x1333_coco17_gpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/faster_rcnn_resnet101_v1_800x1333_coco17_gpu-8.tar.gz)| -| Faster R-CNN ResNet152 V1 640x640 | [faster_rcnn_resnet152_v1_640x640_coco17_tpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/faster_rcnn_resnet152_v1_640x640_coco17_tpu-8.tar.gz)| -| Faster R-CNN ResNet152 V1 1024x1024 | [faster_rcnn_resnet152_v1_1024x1024_coco17_tpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/faster_rcnn_resnet152_v1_1024x1024_coco17_tpu-8.tar.gz)| -| Faster R-CNN ResNet152 V1 800x1333 | [faster_rcnn_resnet152_v1_800x1333_coco17_gpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/faster_rcnn_resnet152_v1_800x1333_coco17_gpu-8.tar.gz)| -| Faster R-CNN Inception ResNet V2 640x640 | [faster_rcnn_inception_resnet_v2_640x640_coco17_tpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/faster_rcnn_inception_resnet_v2_640x640_coco17_tpu-8.tar.gz)| -| Faster R-CNN Inception ResNet V2 1024x1024 | [faster_rcnn_inception_resnet_v2_1024x1024_coco17_tpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/faster_rcnn_inception_resnet_v2_1024x1024_coco17_tpu-8.tar.gz)| -| Mask R-CNN Inception ResNet V2 1024x1024 | [mask_rcnn_inception_resnet_v2_1024x1024_coco17_gpu-8.tar.gz](http://download.tensorflow.org/models/object_detection/tf2/20200711/mask_rcnn_inception_resnet_v2_1024x1024_coco17_gpu-8.tar.gz)| - -**Supported Frozen Quantized Topologies** - -The topologies hosted on the TensorFlow\* Lite [site](https://www.tensorflow.org/lite/guide/hosted_models). The frozen model file (`.pb` file) should be fed to the Model Optimizer. - -| Model Name | Frozen Model File | -|:----------------------|---------------------------------------------------------------------------------------------------------------------------------:| -| Mobilenet V1 0.25 128 | [mobilenet_v1_0.25_128_quant.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_0.25_128_quant.tgz) | -| Mobilenet V1 0.25 160 | [mobilenet_v1_0.25_160_quant.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_0.25_160_quant.tgz) | -| Mobilenet V1 0.25 192 | [mobilenet_v1_0.25_192_quant.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_0.25_192_quant.tgz) | -| Mobilenet V1 0.25 224 | [mobilenet_v1_0.25_224_quant.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_0.25_224_quant.tgz) | -| Mobilenet V1 0.50 128 | [mobilenet_v1_0.5_128_quant.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_0.5_128_quant.tgz) | -| Mobilenet V1 0.50 160 | [mobilenet_v1_0.5_160_quant.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_0.5_160_quant.tgz) | -| Mobilenet V1 0.50 192 | [mobilenet_v1_0.5_192_quant.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_0.5_192_quant.tgz) | -| Mobilenet V1 0.50 224 | [mobilenet_v1_0.5_224_quant.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_0.5_224_quant.tgz) | -| Mobilenet V1 0.75 128 | [mobilenet_v1_0.75_128_quant.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_0.75_128_quant.tgz) | -| Mobilenet V1 0.75 160 | [mobilenet_v1_0.75_160_quant.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_0.75_160_quant.tgz) | -| Mobilenet V1 0.75 192 | [mobilenet_v1_0.75_192_quant.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_0.75_192_quant.tgz) | -| Mobilenet V1 0.75 224 | [mobilenet_v1_0.75_224_quant.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_0.75_224_quant.tgz) | -| Mobilenet V1 1.0 128 | [mobilenet_v1_1.0_128_quant.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_1.0_128_quant.tgz) | -| Mobilenet V1 1.0 160 | [mobilenet_v1_1.0_160_quant.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_1.0_160_quant.tgz) | -| Mobilenet V1 1.0 192 | [mobilenet_v1_1.0_192_quant.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_1.0_192_quant.tgz) | -| Mobilenet V1 1.0 224 | [mobilenet_v1_1.0_224_quant.tgz](http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_1.0_224_quant.tgz) | -| Mobilenet V2 1.0 224 | [mobilenet_v2_1.0_224_quant.tgz](http://download.tensorflow.org/models/tflite_11_05_08/mobilenet_v2_1.0_224_quant.tgz) | -| Inception V1 | [inception_v1_224_quant_20181026.tgz](http://download.tensorflow.org/models/inception_v1_224_quant_20181026.tgz) | -| Inception V2 | [inception_v2_224_quant_20181026.tgz](http://download.tensorflow.org/models/inception_v2_224_quant_20181026.tgz) | -| Inception V3 | [inception_v3_quant.tgz](http://download.tensorflow.org/models/tflite_11_05_08/inception_v3_quant.tgz) | -| Inception V4 | [inception_v4_299_quant_20181026.tgz](http://download.tensorflow.org/models/inception_v4_299_quant_20181026.tgz) | - -It is necessary to specify the following command line parameters for the Model Optimizer to convert some of the models from the list above: `--input input --input_shape [1,HEIGHT,WIDTH,3]`. -Where `HEIGHT` and `WIDTH` are the input images height and width for which the model was trained. - -**Other supported topologies** - -| Model Name| Repository | -| :------------- | -----:| -| ResNext | [Repo](https://github.com/taki0112/ResNeXt-Tensorflow)| -| DenseNet | [Repo](https://github.com/taki0112/Densenet-Tensorflow)| -| CRNN | [Repo](https://github.com/MaybeShewill-CV/CRNN_Tensorflow) | -| NCF | [Repo](https://github.com/tensorflow/models/tree/master/official/recommendation) | -| lm_1b | [Repo](https://github.com/tensorflow/models/tree/master/research/lm_1b) | -| DeepSpeech | [Repo](https://github.com/mozilla/DeepSpeech) | -| A3C | [Repo](https://github.com/miyosuda/async_deep_reinforce) | -| VDCNN | [Repo](https://github.com/WenchenLi/VDCNN) | -| Unet | [Repo](https://github.com/kkweon/UNet-in-Tensorflow) | -| Keras-TCN | [Repo](https://github.com/philipperemy/keras-tcn) | -| PRNet | [Repo](https://github.com/YadiraF/PRNet) | -| YOLOv4 | [Repo](https://github.com/Ma-Dan/keras-yolo4) | -| STN | [Repo](https://github.com/oarriaga/STN.keras) | - -* YOLO topologies from DarkNet* can be converted using [these instructions](tf_specific/Convert_YOLO_From_Tensorflow.md). -* FaceNet topologies can be converted using [these instructions](tf_specific/Convert_FaceNet_From_Tensorflow.md). -* CRNN topologies can be converted using [these instructions](tf_specific/Convert_CRNN_From_Tensorflow.md). -* NCF topologies can be converted using [these instructions](tf_specific/Convert_NCF_From_Tensorflow.md). -* [GNMT](https://github.com/tensorflow/nmt) topology can be converted using [these instructions](tf_specific/Convert_GNMT_From_Tensorflow.md). -* [BERT](https://github.com/google-research/bert) topology can be converted using [these instructions](tf_specific/Convert_BERT_From_Tensorflow.md). -* [XLNet](https://github.com/zihangdai/xlnet) topology can be converted using [these instructions](tf_specific/Convert_XLNet_From_Tensorflow.md). -* [Attention OCR](https://github.com/emedvedev/attention-ocr) topology can be converted using [these instructions](tf_specific/Convert_AttentionOCR_From_Tensorflow.md). - - -## Loading Non-Frozen Models to the Model Optimizer - -There are three ways to store non-frozen TensorFlow models and load them to the Model Optimizer: - -1. Checkpoint: - - In this case, a model consists of two files: - - `inference_graph.pb` or `inference_graph.pbtxt` - - `checkpoint_file.ckpt` - - If you do not have an inference graph file, refer to [Freezing Custom Models in Python](#freeze-the-tensorflow-model). - - To convert such a TensorFlow model: - - 1. Go to the `/tools/model_optimizer` directory - 2. Run the `mo` script with the path to the checkpoint file to convert a model and an output directory where you have write permissions: - - * If input model is in `.pb` format:
```sh - mo --input_model .pb --input_checkpoint --output_dir -``` - * If input model is in `.pbtxt` format:
-```sh - mo --input_model .pbtxt --input_checkpoint --input_model_is_text --output_dir + mo --input_model .pb ``` -2. MetaGraph: +### Convert Non-Frozen Model Formats +There are three ways to store non-frozen TensorFlow models and convert them by Model Optimizer: - In this case, a model consists of three or four files stored in the same directory: - - `model_name.meta` - - `model_name.index` - - `model_name.data-00000-of-00001` (digit part may vary) - - `checkpoint` (optional) - - To convert such TensorFlow model: - - 1. Go to the `/tools/model_optimizer` directory - 2. Run the `mo` script with a path to the MetaGraph `.meta` file and a writable output directory to convert a model:
+1. **Checkpoint**. In this case, a model consists of two files: `inference_graph.pb` (or `inference_graph.pbtxt`) and `checkpoint_file.ckpt`. +If you do not have an inference graph file, refer to [Freezing Custom Models in Python](#freeze-the-tensorflow-model). +To convert the model with the inference graph in `.pb` format, run the `mo` script with the path to the checkpoint file to convert a model: ```sh - mo --input_meta_graph .meta --output_dir + mo --input_model .pb --input_checkpoint +``` +To convert the model with the inference graph in `.pbtxt` format, run the `mo` script with the path to the checkpoint file to convert a model: +```sh + mo --input_model .pbtxt --input_checkpoint --input_model_is_text ``` -3. SavedModel format of TensorFlow 1.x and 2.x versions: - - In this case, a model consists of a special directory with a `.pb` file and several subfolders: `variables`, `assets`, and `assets.extra`. For more information about the SavedModel directory, refer to the [README](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/python/saved_model#components) file in the TensorFlow repository. - - To convert such TensorFlow model: - - 1. Go to the `/tools/model_optimizer` directory - 2. Run the `mo` script with a path to the SavedModel directory and a writable output directory to convert a model:
+2. **MetaGraph**. In this case, a model consists of three or four files stored in the same directory: `model_name.meta`, `model_name.index`, +`model_name.data-00000-of-00001` (digit part may vary), and `checkpoint` (optional). +To convert such TensorFlow model, run the `mo` script with a path to the MetaGraph `.meta` file: ```sh - mo --saved_model_dir --output_dir + mo --input_meta_graph .meta +``` + +3. **SavedModel format**. In this case, a model consists of a special directory with a `.pb` file +and several subfolders: `variables`, `assets`, and `assets.extra`. For more information about the SavedModel directory, refer to the [README](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/python/saved_model#components) file in the TensorFlow repository. +To convert such TensorFlow model, run the `mo` script with a path to the SavedModel directory: +```sh + mo --saved_model_dir ``` You can convert TensorFlow 1.x SavedModel format in the environment that has a 1.x or 2.x version of TensorFlow. However, TensorFlow 2.x SavedModel format strictly requires the 2.x version of TensorFlow. @@ -249,12 +42,12 @@ If a model contains operations currently unsupported by OpenVINO, prune these op To determine custom input nodes, display a graph of the model in TensorBoard. To generate TensorBoard logs of the graph, use the `--tensorboard_logs` option. TensorFlow 2.x SavedModel format has a specific graph due to eager execution. In case of pruning, find custom input nodes in the `StatefulPartitionedCall/*` subgraph of TensorFlow 2.x SavedModel format. -## Freezing Custom Models in Python\* - +### Freezing Custom Models in Python\* When a network is defined in Python\* code, you have to create an inference graph file. Usually graphs are built in a form that allows model training. That means that all trainable parameters are represented as variables in the graph. To be able to use such graph with Model Optimizer such graph should be frozen. The graph is frozen and dumped to a file with the following code: + ```python import tensorflow as tf from tensorflow.python.framework import graph_io @@ -273,126 +66,36 @@ Where: * `inference_graph.pb` is the name of the generated inference graph file. * `as_text` specifies whether the generated file should be in human readable text format or binary. -## Convert a TensorFlow* Model - -To convert a TensorFlow model: - -1. Go to the `/tools/model_optimizer` directory -2. Use the `mo` script to simply convert a model with the path to the input model `.pb` file and a writable output directory: -```sh - mo --input_model .pb --output_dir -``` - -Two groups of parameters are available to convert your model: - -* Framework-agnostic parameters are used to convert a model trained with any supported framework. For details, see see the General Conversion Parameters section on the [Converting a Model to Intermediate Representation (IR)](Converting_Model.md) page. -* [TensorFlow-specific parameters](#tensorflow_specific_conversion_params): Parameters used to convert only TensorFlow models. - -> **NOTE**: The color channel order (RGB or BGR) of an input data should match the channel order of the model training dataset. If they are different, perform the `RGB<->BGR` conversion specifying the command-line parameter: `--reverse_input_channels`. Otherwise, inference results may be incorrect. For more information about the parameter, refer to **When to Reverse Input Channels** section of [Converting a Model to Intermediate Representation (IR)](Converting_Model.md). - -### Using TensorFlow\*-Specific Conversion Parameters -The following list provides the TensorFlow\*-specific parameters. - -``` -TensorFlow*-specific parameters: - --input_model_is_text - TensorFlow*: treat the input model file as a text - protobuf format. If not specified, the Model Optimizer - treats it as a binary file by default. - --input_checkpoint INPUT_CHECKPOINT - TensorFlow*: variables file to load. - --input_meta_graph INPUT_META_GRAPH - Tensorflow*: a file with a meta-graph of the model - before freezing - --saved_model_dir SAVED_MODEL_DIR - TensorFlow*: directory with a model in SavedModel format - of TensorFlow 1.x or 2.x version - --saved_model_tags SAVED_MODEL_TAGS - Group of tag(s) of the MetaGraphDef to load, in string - format, separated by ','. For tag-set contains - multiple tags, all tags must be passed in. - --tensorflow_custom_operations_config_update TENSORFLOW_CUSTOM_OPERATIONS_CONFIG_UPDATE - TensorFlow*: update the configuration file with node - name patterns with input/output nodes information. - --tensorflow_object_detection_api_pipeline_config TENSORFLOW_OBJECT_DETECTION_API_PIPELINE_CONFIG - TensorFlow*: path to the pipeline configuration file - used to generate model created with help of Object - Detection API. - --tensorboard_logdir TENSORBOARD_LOGDIR - TensorFlow*: dump the input graph to a given directory - that should be used with TensorBoard. - --tensorflow_custom_layer_libraries TENSORFLOW_CUSTOM_LAYER_LIBRARIES - TensorFlow*: comma separated list of shared libraries - with TensorFlow* custom operations implementation. - --disable_nhwc_to_nchw - [DEPRECATED] Disables default translation from NHWC to NCHW. Since 2022.1 - this option is deprecated and used only to maintain backward compatibility - with previous releases. -``` - -> **NOTE**: Models produces with TensorFlow\* usually have not fully defined shapes (contain `-1` in some dimensions). It is necessary to pass explicit shape for the input using command line parameter `--input_shape` or `-b` to override just batch dimension. If the shape is fully defined, then there is no need to specify either `-b` or `--input_shape` options. - -#### Command-Line Interface (CLI) Examples Using TensorFlow\*-Specific Parameters - -* Launching the Model Optimizer for Inception V1 frozen model when model file is a plain text protobuf, specifying a writable output directory: -```sh - mo --input_model inception_v1.pbtxt --input_model_is_text -b 1 --output_dir -``` - -* Launching the Model Optimizer for Inception V1 frozen model and update custom sub-graph replacement file `transform.json` with information about input and output nodes of the matched sub-graph, specifying a writable output directory. For more information about this feature, refer to [Sub-Graph Replacement in the Model Optimizer](../customize_model_optimizer/Subgraph_Replacement_Model_Optimizer.md). -```sh - mo --input_model inception_v1.pb -b 1 --tensorflow_custom_operations_config_update transform.json --output_dir -``` - -* Launching the Model Optimizer for Inception V1 frozen model and use custom sub-graph replacement file `transform.json` for model conversion. For more information about this feature, refer to [Sub-Graph Replacement in the Model Optimizer](../customize_model_optimizer/Subgraph_Replacement_Model_Optimizer.md). -```sh - mo --input_model inception_v1.pb -b 1 --transformations_config transform.json --output_dir -``` - -* Launching the Model Optimizer for Inception V1 frozen model and dump information about the graph to TensorBoard log dir `/tmp/log_dir` -```sh - mo --input_model inception_v1.pb -b 1 --tensorboard_logdir /tmp/log_dir --output_dir -``` - -* Launching the Model Optimizer for a model with custom TensorFlow operations (refer to the [TensorFlow* documentation](https://www.tensorflow.org/extend/adding_an_op)) implemented in C++ and compiled into the shared library `my_custom_op.so`. Model Optimizer falls back to TensorFlow to infer output shape of operations implemented in the library if a custom TensorFlow operation library is provided. If it is not provided, a custom operation with an inference function is needed. For more information about custom operations, refer to the [Custom Layers Guide](../../../HOWTO/Custom_Layers_Guide.md). -```sh - mo --input_model custom_model.pb --tensorflow_custom_layer_libraries ./my_custom_op.so --output_dir -``` - - -## Convert TensorFlow* 2 Models - -In order to convert TensorFlow* 2 models, installation of dependencies from `requirements_tf.txt` is required. -TensorFlow* 2.X officially supports two model formats: SavedModel and Keras H5 (or HDF5). +## Convert TensorFlow 2 Models +To convert TensorFlow* 2 models, ensure that `openvino-dev[tensorflow2]` is installed via `pip`. +TensorFlow* 2.X officially supports two model formats: SavedModel and Keras H5 (or HDF5). Below are the instructions on how to convert each of them. -### SavedModel Format +### SavedModel Format +A model in the SavedModel format consists of a directory with a `saved_model.pb` file and two subfolders: `variables` and `assets`. +To convert such a model, run the `mo` script with a path to the SavedModel directory: -A model in the SavedModel format consists of a directory with a `saved_model.pb` file and two subfolders: `variables` and `assets`. -To convert such a model: -1. Go to the `/tools/model_optimizer` directory. -2. Run the `mo` script with a path to the SavedModel directory and a writable output directory: ```sh - mo --saved_model_dir --output_dir + mo --saved_model_dir ``` TensorFlow* 2 SavedModel format strictly requires the 2.x version of TensorFlow installed in the -environment for conversion to the Intermediate Representation (IR). +environment for conversion to the Intermediate Representation (IR). If a model contains operations currently unsupported by OpenVINO™, prune these operations by explicit specification of input nodes using the `--input` or `--output` -options. To determine custom input nodes, visualize a model graph in the TensorBoard. +options. To determine custom input nodes, visualize a model graph in the TensorBoard. To generate TensorBoard logs of the graph, use the Model Optimizer `--tensorboard_logs` command-line -option. +option. TensorFlow* 2 SavedModel format has a specific graph structure due to eager execution. In case of pruning, find custom input nodes in the `StatefulPartitionedCall/*` subgraph. -### Keras H5 - +### Keras H5 If you have a model in the HDF5 format, load the model using TensorFlow* 2 and serialize it in the SavedModel format. Here is an example of how to do it: + ```python import tensorflow as tf model = tf.keras.models.load_model('model.h5') @@ -401,6 +104,7 @@ tf.saved_model.save(model,'model') The Keras H5 model with a custom layer has specifics to be converted into SavedModel format. For example, the model with a custom layer `CustomLayer` from `custom_layer.py` is converted as follows: + ```python import tensorflow as tf from custom_layer import CustomLayer @@ -412,42 +116,39 @@ Then follow the above instructions for the SavedModel format. > **NOTE**: Do not use other hacks to resave TensorFlow* 2 models into TensorFlow* 1 formats. +## Command-Line Interface (CLI) Examples Using TensorFlow\*-Specific Parameters +* Launching the Model Optimizer for Inception V1 frozen model when model file is a plain text protobuf: -## Custom Layer Definition +```sh + mo --input_model inception_v1.pbtxt --input_model_is_text -b 1 +``` -Internally, when you run the Model Optimizer, it loads the model, goes through the topology, and tries to find each layer type in a list of known layers. Custom layers are layers that are not included in the list of known layers. If your topology contains any layers that are not in this list of known layers, the Model Optimizer classifies them as custom. +* Launching the Model Optimizer for Inception V1 frozen model and dump information about the graph to TensorBoard log dir `/tmp/log_dir` -See [Custom Layers in the Model Optimizer](../customize_model_optimizer/Customize_Model_Optimizer.md) for information about: +```sh + mo --input_model inception_v1.pb -b 1 --tensorboard_logdir /tmp/log_dir +``` -* Model Optimizer internal procedure for working with custom layers -* How to convert a TensorFlow model that has custom layers -* Custom layer implementation details +* Launching the Model Optimizer for BERT model in the SavedModel format, with three inputs. Explicitly specify input shapes +where the batch size and the sequence length equal 2 and 30 respectively. +```sh +mo --saved_model_dir BERT --input mask,word_ids,type_ids --input_shape [2,30],[2,30],[2,30] +``` ## Supported TensorFlow\* and TensorFlow 2 Keras\* Layers Refer to [Supported Framework Layers ](../Supported_Frameworks_Layers.md) for the list of supported standard layers. - ## Frequently Asked Questions (FAQ) - The Model Optimizer provides explanatory messages if it is unable to run to completion due to issues like typographical errors, incorrectly used options, or other issues. The message describes the potential cause of the problem and gives a link to the [Model Optimizer FAQ](../Model_Optimizer_FAQ.md). The FAQ has instructions on how to resolve most issues. The FAQ also includes links to relevant sections in the Model Optimizer Developer Guide to help you understand what went wrong. -## Video: Converting a TensorFlow Model - -@sphinxdirective - -.. raw:: html - - - -@endsphinxdirective - ## Summary In this document, you learned: -* Basic information about how the Model Optimizer works with TensorFlow\* models +* Basic information about how the Model Optimizer works with TensorFlow models * Which TensorFlow models are supported * How to freeze a TensorFlow model * How to convert a trained TensorFlow model using the Model Optimizer with both framework-agnostic and TensorFlow-specific command-line options + +## See Also +[Model Conversion Tutorials](Convert_Model_Tutorials.md) diff --git a/docs/MO_DG/prepare_model/convert_model/Convert_Model_Tutorials.md b/docs/MO_DG/prepare_model/convert_model/Convert_Model_Tutorials.md new file mode 100644 index 00000000000..15df7067e0e --- /dev/null +++ b/docs/MO_DG/prepare_model/convert_model/Convert_Model_Tutorials.md @@ -0,0 +1,44 @@ +# Model Conversion Tutorials {#openvino_docs_MO_DG_prepare_model_convert_model_tutorials} + +@sphinxdirective + +.. toctree:: + :maxdepth: 1 + :hidden: + + openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_AttentionOCR_From_Tensorflow + openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_BERT_From_Tensorflow + openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_CRNN_From_Tensorflow + openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_DeepSpeech_From_Tensorflow + openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_EfficientDet_Models + openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_FaceNet_From_Tensorflow + openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_GNMT_From_Tensorflow + openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_lm_1b_From_Tensorflow + openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_NCF_From_Tensorflow + openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_Object_Detection_API_Models + openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_RetinaNet_From_Tensorflow + openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_Slim_Library_Models + openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_WideAndDeep_Family_Models + openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_XLNet_From_Tensorflow + openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_YOLO_From_Tensorflow + openvino_docs_MO_DG_prepare_model_convert_model_onnx_specific_Convert_Faster_RCNN + openvino_docs_MO_DG_prepare_model_convert_model_onnx_specific_Convert_GPT2 + openvino_docs_MO_DG_prepare_model_convert_model_onnx_specific_Convert_Mask_RCNN + openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_Bert_ner + openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_Cascade_RCNN_res101 + openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_F3Net + openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_QuartzNet + openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_RCAN + openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_RNNT + openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_YOLACT + openvino_docs_MO_DG_prepare_model_convert_model_mxnet_specific_Convert_GluonCV_Models + openvino_docs_MO_DG_prepare_model_convert_model_mxnet_specific_Convert_Style_Transfer_From_MXNet + openvino_docs_MO_DG_prepare_model_convert_model_kaldi_specific_Aspire_Tdnn_Model + +@endsphinxdirective + +This section provides you with a set of tutorials that demonstrate conversion steps for specific TensorFlow, ONNX, PyTorch, MXNet, and Kaldi models. +It contains conversion recipes for concrete models, that unnecessarily cover your case. +Try to convert the model out-of-the-box, meaning only the `--input_model` parameter is specified in the command line, before studying the tutorials. + +You can also find a collection of [Python tutorials](../../../tutorials.md) written for running on Jupyter* notebooks that provide an introduction to the OpenVINO™ toolkit and explain how to use the Python API and tools for optimized deep learning inference. diff --git a/docs/MO_DG/prepare_model/convert_model/Converting_Model.md b/docs/MO_DG/prepare_model/convert_model/Converting_Model.md index b0fdd565f19..63da4bb204a 100644 --- a/docs/MO_DG/prepare_model/convert_model/Converting_Model.md +++ b/docs/MO_DG/prepare_model/convert_model/Converting_Model.md @@ -1,297 +1,75 @@ -# Converting a Model to Intermediate Representation (IR) {#openvino_docs_MO_DG_prepare_model_convert_model_Converting_Model} +# Setting Input Shapes {#openvino_docs_MO_DG_prepare_model_convert_model_Converting_Model} -@sphinxdirective +Model Optimizer provides the option of making models more efficient by providing additional shape definition. +It is achieved with two parameters: `--input_shape` and `--static_shape`, used under certain conditions. -.. toctree:: - :maxdepth: 1 - :hidden: +## When to Specify --input_shape Command-line Parameter +Model Optimizer supports conversion of models with dynamic input shapes that contain undefined dimensions. +However, if the shape of data is not going to change from one inference to another, +it is recommended to set up static shapes (when all dimensions are fully defined) for the inputs. +It can be beneficial from a performance perspective and memory consumption. +To set up static shapes, Model Optimizer provides the `--input_shape` parameter. +The same functionality is also available in runtime via `reshape` method, please refer to [Changing input shapes](../../../OV_Runtime_UG/ShapeInference.md). +For more information about dynamic shapes in runtime, refer to [Dynamic Shapes](../../../OV_Runtime_UG/ov_dynamic_shapes.md) - openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_TensorFlow - openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_Caffe - openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_MxNet - openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_Kaldi - openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_ONNX - openvino_docs_MO_DG_prepare_model_convert_model_Convert_Model_From_Paddle - openvino_docs_MO_DG_prepare_model_Model_Optimization_Techniques - openvino_docs_MO_DG_prepare_model_convert_model_Cutting_Model - openvino_docs_MO_DG_prepare_model_Supported_Frameworks_Layers - openvino_docs_MO_DG_prepare_model_convert_model_IR_suitable_for_INT8_inference - openvino_docs_MO_DG_prepare_model_customize_model_optimizer_Subgraph_Replacement_Model_Optimizer - openvino_docs_MO_DG_prepare_model_convert_model_Legacy_IR_Layers_Catalog_Spec +OpenVINO Runtime API can have limitations to infer models with undefined dimensions on some hardware. +In this case, the `--input_shape` parameter and the `reshape` method can help to resolve undefined dimensions. -@endsphinxdirective +Sometimes Model Optimizer is unable to convert models out-of-the-box (only the `--input_model` parameter is specified). +Such problem can relate to models with inputs of undefined ranks and a case of cutting off parts of a model. +In this case, user has to specify input shapes explicitly using `--input_shape` parameter. -To convert the model to the Intermediate Representation (IR), run Model Optimizer using the following command: +For example, run the Model Optimizer for the TensorFlow* MobileNet model with the single input +and specify input shape `[2,300,300,3]`. ```sh -mo --input_model INPUT_MODEL --output_dir +mo --input_model MobileNet.pb --input_shape [2,300,300,3] ``` -The output directory must have write permissions, so you can run Model Optimizer from the output directory or specify an output path with the `--output_dir` option. - -> **NOTE**: The color channel order (RGB or BGR) of an input data should match the channel order of the model training dataset. If they are different, perform the `RGB<->BGR` conversion specifying the command-line parameter: `--reverse_input_channels`. Otherwise, inference results may be incorrect. For details, refer to [When to Reverse Input Channels](#when_to_reverse_input_channels). - -To adjust the conversion process, you may use general parameters defined in the [General Conversion Parameters](#general_conversion_parameters) and -Framework-specific parameters for: -* [Caffe](Convert_Model_From_Caffe.md) -* [TensorFlow](Convert_Model_From_TensorFlow.md) -* [MXNet](Convert_Model_From_MxNet.md) -* [ONNX](Convert_Model_From_ONNX.md) -* [Kaldi](Convert_Model_From_Kaldi.md) - - -## General Conversion Parameters - -To adjust the conversion process, you can also use the general (framework-agnostic) parameters: +If a model has multiple inputs, `--input_shape` must be used in conjunction with `--input` parameter. +The parameter `--input` contains a list of input names for which shapes in the same order are defined via `--input_shape`. +For example, launch the Model Optimizer for the ONNX* OCR model with a pair of inputs `data` and `seq_len` +and specify shapes `[3,150,200,1]` and `[3]` for them. ```sh -optional arguments: - -h, --help show this help message and exit - --framework {tf,caffe,mxnet,kaldi,onnx} - Name of the framework used to train the input model. - -Framework-agnostic parameters: - --input_model INPUT_MODEL, -w INPUT_MODEL, -m INPUT_MODEL - Tensorflow*: a file with a pre-trained model (binary - or text .pb file after freezing). Caffe*: a model - proto file with model weights - --model_name MODEL_NAME, -n MODEL_NAME - Model_name parameter passed to the final create_ir - transform. This parameter is used to name a network in - a generated IR and output .xml/.bin files. - --output_dir OUTPUT_DIR, -o OUTPUT_DIR - Directory that stores the generated IR. By default, it - is the directory from where the Model Optimizer is - launched. - --input_shape INPUT_SHAPE - Input shape(s) that should be fed to an input node(s) - of the model. Shape is defined as a comma-separated - list of integer numbers enclosed in parentheses or - square brackets, for example [1,3,227,227] or - (1,227,227,3), where the order of dimensions depends - on the framework input layout of the model. For - example, [N,C,H,W] is used for Caffe* models and - [N,H,W,C] for TensorFlow* models. Model Optimizer - performs necessary transformations to convert the - shape to the layout required by Inference Engine - (N,C,H,W). The shape should not contain undefined - dimensions (? or -1) and should fit the dimensions - defined in the input operation of the graph. Boundaries - of undefined dimension can be specified with ellipsis, - for example [1,1..10,128,128]. One boundary can be undefined, - for example [1,..100] or [1,3,1..,1..]. If there - are multiple inputs in the model, --input_shape should - contain definition of shape for each input separated - by a comma, for example: [1,3,227,227],[2,4] for a - model with two inputs with 4D and 2D shapes. - Alternatively, specify shapes with the --input - option. - --scale SCALE, -s SCALE - All input values coming from original network inputs - will be divided by this value. When a list of inputs - is overridden by the --input parameter, this scale is - not applied for any input that does not match with the - original input of the model. - If both --mean and --scale are specified, - the mean is subtracted first and then scale is applied - regardless of the order of options in command line. - --reverse_input_channels - Switch the input channels order from RGB to BGR (or - vice versa). Applied to original inputs of the model - if and only if a number of channels equals 3. - When --mean_values/--scale_values are also specified, - reversing of channels will be applied to user's input - data first, so that numbers in --mean_values and - --scale_values go in the order of channels used in - the original model. In other words, if both options are - specified then the data flow in the model looks as following: - Parameter -> ReverseInputChannels -> Mean/Scale apply -> the original body of the model. - --log_level {CRITICAL,ERROR,WARN,WARNING,INFO,DEBUG,NOTSET} - Logger level - --input INPUT Quoted list of comma-separated input nodes names with shapes, - data types, and values for freezing. The order of inputs in converted - model is the same as order of specified operation names. The shape and value are - specified as space-separated lists. The data type of input - node is specified in braces and can have one of the values: - f64 (float64), f32 (float32), f16 (float16), i64 (int64), - i32 (int32), u8 (uint8), boolean (bool). Data type is optional. - If it's not specified explicitly then there are two options: - if input node is a parameter, data type is taken from the - original node dtype, if input node is not a parameter, data type - is set to f32. Example, to set `input_1` with shape [1 100], - and Parameter node `sequence_len` with scalar input with value `150`, - and boolean input `is_training` with `False` value use the - following format: "input_1[1 10],sequence_len->150,is_training->False". - Another example, use the following format to set input port 0 - of the node `node_name1` with the shape [3 4] as an input node - and freeze output port 1 of the node `node_name2` with the - value [20 15] of the int32 type and shape [2]: - "0:node_name1[3 4],node_name2:1[2]{i32}->[20 15]". - --output OUTPUT The name of the output operation of the model. For - TensorFlow*, do not add :0 to this name. - The order of outputs in converted model is the same as order of - specified operation names. - --mean_values MEAN_VALUES, -ms MEAN_VALUES - Mean values to be used for the input image per - channel. Values to be provided in the (R,G,B) or - [R,G,B] format. Can be defined for desired input of - the model, for example: "--mean_values - data[255,255,255],info[255,255,255]". The exact - meaning and order of channels depend on how the - original model was trained. - --scale_values SCALE_VALUES - Scale values to be used for the input image per - channel. Values are provided in the (R,G,B) or [R,G,B] - format. Can be defined for desired input of the model, - for example: "--scale_values - data[255,255,255],info[255,255,255]". The exact - meaning and order of channels depend on how the - original model was trained. - If both --mean_values and --scale_values are specified, - the mean is subtracted first and then scale is applied - regardless of the order of options in command line. - --data_type {FP16,FP32,half,float} - Data type for all intermediate tensors and weights. If - original model is in FP32 and --data_type=FP16 is - specified, all model weights and biases are compressed - to FP16. - --disable_fusing Turn off fusing of linear operations to Convolution - --disable_resnet_optimization - Turn off resnet optimization - --finegrain_fusing FINEGRAIN_FUSING - Regex for layers/operations that won't be fused. - Example: --finegrain_fusing Convolution1,.*Scale.* - --disable_gfusing Turn off fusing of grouped convolutions - --enable_concat_optimization - Turn on Concat optimization. - --extensions EXTENSIONS - Directory or a comma separated list of directories - with extensions. To disable all extensions including - those that are placed at the default location, pass an - empty string. - --batch BATCH, -b BATCH - Input batch size - --version Version of Model Optimizer - --silent Prevent any output messages except those that - correspond to log level equals ERROR, that can be set - with the following option: --log_level. By default, - log level is already ERROR. - --freeze_placeholder_with_value FREEZE_PLACEHOLDER_WITH_VALUE - Replaces input layer with constant node with provided - value, for example: "node_name->True". It will be - DEPRECATED in future releases. Use --input option to - specify a value for freezing. - --static_shape Enables IR generation for fixed input shape (folding - `ShapeOf` operations and shape-calculating sub-graphs - to `Constant`). Changing model input shape using - the Inference Engine API in runtime may fail for such an IR. - --disable_weights_compression - Disable compression and store weights with original - precision. - --progress Enable model conversion progress display. - --stream_output Switch model conversion progress display to a - multiline mode. - --transformations_config TRANSFORMATIONS_CONFIG - Use the configuration file with transformations - description. - --use_new_frontend Force the usage of new frontend API for model processing. - --use_legacy_frontend Force the usage of legacy API for model processing. +mo --input_model ocr.onnx --input data,seq_len --input_shape [3,150,200,1],[3] ``` -The sections below provide details on using particular parameters and examples of CLI commands. +The alternative way to specify input shapes is to use the `--input` parameter as follows: -## When to Specify Mean and Scale Values -Usually neural network models are trained with the normalized input data. This means that the input data values are converted to be in a specific range, for example, `[0, 1]` or `[-1, 1]`. Sometimes the mean values (mean images) are subtracted from the input data values as part of the pre-processing. There are two cases how the input data pre-processing is implemented. - * The input pre-processing operations are a part of a topology. In this case, the application that uses the framework to infer the topology does not pre-process the input. - * The input pre-processing operations are not a part of a topology and the pre-processing is performed within the application which feeds the model with an input data. - -In the first case, the Model Optimizer generates the IR with required pre-processing layers and Inference Engine samples may be used to infer the model. - -In the second case, information about mean/scale values should be provided to the Model Optimizer to embed it to the generated IR. Model Optimizer provides a number of command line parameters to specify them: `--mean`, `--scale`, `--scale_values`, `--mean_values`. - -> **NOTE:** If both mean and scale values are specified, the mean is subtracted first and then scale is applied regardless of the order of options in command line. Input values are *divided* by the scale value(s). If also `--reverse_input_channels` option is used, the reverse_input_channels will be applied first, then mean and after that scale. - -There is no a universal recipe for determining the mean/scale values for a particular model. The steps below could help to determine them: -* Read the model documentation. Usually the documentation describes mean/scale value if the pre-processing is required. -* Open the example script/application executing the model and track how the input data is read and passed to the framework. -* Open the model in a visualization tool and check for layers performing subtraction or multiplication (like `Sub`, `Mul`, `ScaleShift`, `Eltwise` etc) of the input data. If such layers exist, pre-processing is probably part of the model. - -## When to Specify Input Shapes -There are situations when the input data shape for the model is not fixed, like for the fully-convolutional neural networks. In this case, for example, TensorFlow\* models contain `-1` values in the `shape` attribute of the `Placeholder` operation. Inference Engine does not support input layers with undefined size, so if the input shapes are not defined in the model, the Model Optimizer fails to convert the model. The solution is to provide the input shape(s) using the `--input` or `--input_shape` command line parameter for all input(s) of the model or provide the batch size using the `-b` command line parameter if the model contains just one input with undefined batch size only. In the latter case, the `Placeholder` shape for the TensorFlow\* model looks like this `[-1, 224, 224, 3]`. - -## When to Reverse Input Channels -Input data for your application can be of RGB or BRG color input order. For example, Inference Engine samples load input images in the BGR channels order. However, the model may be trained on images loaded with the opposite order (for example, most TensorFlow\* models are trained with images in RGB order). In this case, inference results using the Inference Engine samples may be incorrect. The solution is to provide `--reverse_input_channels` command line parameter. Taking this parameter, the Model Optimizer performs first convolution or other channel dependent operation weights modification so these operations output will be like the image is passed with RGB channels order. - -## When to Specify `--static_shape` Command Line Parameter -If the `--static_shape` command line parameter is specified the Model Optimizer evaluates shapes of all operations in the model (shape propagation) for a fixed input(s) shape(s). During the shape propagation the Model Optimizer evaluates operations *Shape* and removes them from the computation graph. With that approach, the initial model which can consume inputs of different shapes may be converted to IR working with the input of one fixed shape only. For example, consider the case when some blob is reshaped from 4D of a shape *[N, C, H, W]* to a shape *[N, C, H \* W]*. During the model conversion the Model Optimize calculates output shape as a constant 1D blob with values *[N, C, H \* W]*. So if the input shape changes to some other value *[N,C,H1,W1]* (it is possible scenario for a fully convolutional model) then the reshape layer becomes invalid. -Resulting Intermediate Representation will not be resizable with the help of Inference Engine. - -## Examples of CLI Commands - -Launch the Model Optimizer for the Caffe bvlc_alexnet model with debug log level: ```sh -mo --input_model bvlc_alexnet.caffemodel --log_level DEBUG --output_dir +mo --input_model ocr.onnx --input data[3 150 200 1],seq_len[3] ``` -Launch the Model Optimizer for the Caffe bvlc_alexnet model with the output IR called `result.*` in the specified `output_dir`: +The parameter `--input_shape` allows overriding original input shapes to the shapes compatible with a given model. +Dynamic shapes, i.e. with dynamic dimensions, in the original model can be replaced with static shapes for the converted model, and vice versa. +The dynamic dimension can be marked in Model Optimizer command-line as `-1` or `?`. +For example, launch the Model Optimizer for the ONNX* OCR model and specify dynamic batch dimension for inputs. + ```sh -mo --input_model bvlc_alexnet.caffemodel --model_name result --output_dir /../../models/ +mo --input_model ocr.onnx --input data,seq_len --input_shape [-1,150,200,1],[-1] ``` -Launch the Model Optimizer for the Caffe bvlc_alexnet model with one input with scale values: +To optimize memory consumption for models with undefined dimensions in run-time, Model Optimizer provides the capability to define boundaries of dimensions. +The boundaries of undefined dimension can be specified with ellipsis. +For example, launch the Model Optimizer for the ONNX* OCR model and specify a boundary for the batch dimension. + ```sh -mo --input_model bvlc_alexnet.caffemodel --scale_values [59,59,59] --output_dir +mo --input_model ocr.onnx --input data,seq_len --input_shape [1..3,150,200,1],[1..3] ``` -Launch the Model Optimizer for the Caffe bvlc_alexnet model with multiple inputs with scale values: -```sh -mo --input_model bvlc_alexnet.caffemodel --input data,rois --scale_values [59,59,59],[5,5,5] --output_dir -``` +## When to Specify --static_shape Command-line Parameter +Model Optimizer provides the `--static_shape` parameter that allows evaluating shapes of all operations in the model for fixed input shapes +and to fold shape computing sub-graphs into constants. The resulting IR can be more compact in size and the loading time for such IR can be decreased. +However, the resulting IR will not be reshape-able with the help of the `reshape` method from OpenVINO Runtime API. +It is worth noting that the `--input_shape` parameter does not affect reshape-ability of the model. -Launch the Model Optimizer for the Caffe bvlc_alexnet model with multiple inputs with scale and mean values specified for the particular nodes: -```sh -mo --input_model bvlc_alexnet.caffemodel --input data,rois --mean_values data[59,59,59] --scale_values rois[5,5,5] --output_dir -``` +For example, launch the Model Optimizer for the ONNX* OCR model using `--static_shape`. -Launch the Model Optimizer for the Caffe bvlc_alexnet model with specified input layer, overridden input shape, scale 5, batch 8 and specified name of an output operation: ```sh -mo --input_model bvlc_alexnet.caffemodel --input "data[1 3 224 224]" --output pool5 -s 5 -b 8 --output_dir +mo --input_model ocr.onnx --input data[3 150 200 1],seq_len[3] --static_shape ``` -Launch the Model Optimizer for the Caffe bvlc_alexnet model with disabled fusing for linear operations to Convolution and grouped convolutions: -```sh -mo --input_model bvlc_alexnet.caffemodel --disable_fusing --disable_gfusing --output_dir -``` - -Launch the Model Optimizer for the Caffe bvlc_alexnet model with reversed input channels order between RGB and BGR, specified mean values to be used for the input image per channel and specified data type for input tensor values: -```sh -mo --input_model bvlc_alexnet.caffemodel --reverse_input_channels --mean_values [255,255,255] --data_type FP16 --output_dir -``` - -Launch the Model Optimizer for the Caffe bvlc_alexnet model with extensions listed in specified directories, specified mean_images binaryproto - file. For more information about extensions, please refer to the [Custom Layers Guide](../../../HOWTO/Custom_Layers_Guide.md). -```sh -mo --input_model bvlc_alexnet.caffemodel --extensions /home/,/some/other/path/ --mean_file /path/to/binaryproto --output_dir -``` - -Launch the Model Optimizer for TensorFlow* FaceNet* model with a placeholder freezing value. -It replaces the placeholder with a constant layer that contains the passed value. -For more information about FaceNet conversion, please refer to [this](tf_specific/Convert_FaceNet_From_Tensorflow.md) page. -```sh -mo --input_model FaceNet.pb --input "phase_train->False" --output_dir -``` -Launch the Model Optimizer for any model with a placeholder freezing tensor of values. -It replaces the placeholder with a constant layer that contains the passed values. - -Tensor here is represented in square brackets with each value separated from another by a whitespace. -If data type is set in the model, this tensor will be reshaped to a placeholder shape and casted to placeholder data type. -Otherwise, it will be casted to data type passed to `--data_type` parameter (by default, it is FP32). -```sh -mo --input_model FaceNet.pb --input "placeholder_layer_name->[0.1 1.2 2.3]" --output_dir -``` - ## See Also -* [Configuring the Model Optimizer](../../Deep_Learning_Model_Optimizer_DevGuide.md) -* [IR Notation Reference](../../IR_and_opsets.md) -* [Model Optimizer Extensibility](../customize_model_optimizer/Customize_Model_Optimizer.md) -* [Model Cutting](Cutting_Model.md) +* [Introduction](../../Deep_Learning_Model_Optimizer_DevGuide.md) +* [Cutting Off Parts of a Model](Cutting_Model.md) diff --git a/docs/MO_DG/prepare_model/convert_model/Cutting_Model.md b/docs/MO_DG/prepare_model/convert_model/Cutting_Model.md index aac7d73f006..8b22503439e 100644 --- a/docs/MO_DG/prepare_model/convert_model/Cutting_Model.md +++ b/docs/MO_DG/prepare_model/convert_model/Cutting_Model.md @@ -6,10 +6,10 @@ Sometimes some parts of a model must be removed while the Model Optimizer is con The following examples are the situations when model cutting is useful or even required: -* model has pre- or post-processing parts that cannot be translated to existing Inference Engine layers. +* model has pre- or post-processing parts that cannot be translated to existing OpenVINO operations. * model has a training part that is convenient to be kept in the model, but not used during inference. * model is too complex (contains lots of unsupported operations that cannot be easily implemented as custom layers), so the complete model cannot be converted in one shot. -* problem with model conversion in the Model Optimizer or inference in the Inference Engine occurred. To localize the issue, limit the scope for conversion by iteratively searching for problematic places in the model. +* problem with model conversion in the Model Optimizer or inference in the OpenVINO Runtime occurred. To localize the issue, limit the scope for conversion by iteratively searching for problematic places in the model. * single custom layer or a combination of custom layers is isolated for debugging purposes. ## Command-Line Options diff --git a/docs/MO_DG/prepare_model/convert_model/IR_suitable_for_INT8_inference.md b/docs/MO_DG/prepare_model/convert_model/IR_suitable_for_INT8_inference.md index 34df1408cdb..9f7ac357e30 100644 --- a/docs/MO_DG/prepare_model/convert_model/IR_suitable_for_INT8_inference.md +++ b/docs/MO_DG/prepare_model/convert_model/IR_suitable_for_INT8_inference.md @@ -2,7 +2,7 @@ ## Introduction -Inference Engine CPU and GPU plugin can infer models in the low precision. +OpenVINO Runtime CPU and GPU devices can infer models in the low precision. For details, refer to [Low Precision Inference on the CPU](../../../OV_Runtime_UG/Int8Inference.md). Intermediate Representation (IR) should be specifically formed to be suitable for low precision inference. diff --git a/docs/MO_DG/prepare_model/convert_model/Legacy_IR_Layers_Catalog_Spec.md b/docs/MO_DG/prepare_model/convert_model/Legacy_IR_Layers_Catalog_Spec.md deleted file mode 100644 index b94e2b5d331..00000000000 --- a/docs/MO_DG/prepare_model/convert_model/Legacy_IR_Layers_Catalog_Spec.md +++ /dev/null @@ -1,5319 +0,0 @@ -# [DEPRECATED] Intermediate Representation Notation Reference Catalog {#openvino_docs_MO_DG_prepare_model_convert_model_Legacy_IR_Layers_Catalog_Spec} - -> **NOTE**: This IR Notation Reference is no longer supported since the new concept of operation sets is introduced in OpenVINO 2020.1 version. For a complete list of supported operations, see the [Intermediate Representation and Operation Sets](../../IR_and_opsets.md) topic. - -## Table of Сontents - -* Activation Layer -* ArgMax Layer -* BatchNormalization Layer -* BinaryConvolution Layer -* Bucketize Layer -* Broadcast Layer -* Clamp Layer -* Concat Layer -* Const Layer -* Convolution Layer -* Crop (Type 1) Layer -* Crop (Type 2) Layer -* Crop (Type 3) Layer -* CTCGreadyDecoder Layer -* Deconvolution Layer -* DeformableConvolution Layer -* DepthToSpace Layer -* DetectionOutput Layer -* Erf Layer -* Eltwise Layer -* Fill Layer -* Flatten Layer -* FullyConnected Layer -* Gather Layer -* GRN Layer -* GRUCell Layer -* Input Layer -* Interp Layer -* LSTMCell Layer -* Memory Layer -* MVN Layer -* NonMaxSuppression Layer -* Norm Layer -* Normalize Layer -* OneHot Layer -* Pad Layer -* Permute Layer -* Pooling Layer -* Power Layer -* PReLU Layer -* PriorBox Layer -* PriorBoxClustered Layer -* Proposal Layer -* PSROIPooling Layer -* FakeQuantize Layer -* Range Layer -* RegionYolo Layer -* ReLU Layer -* ReorgYolo Layer -* Resample (Type 1) Layer -* Resample (Type 2) Layer -* Reshape Layer -* ReverseSequence Layer -* RNNCell Layer -* ROIPooling Layer -* ExperimentalDetectronROIFeatureExtractor layer -* ExperimentalSparseWeightedSum layer -* ScaleShift Layer -* Select Layer -* Shape Layer -* ShuffleChannels Layer -* SimplerNMS Layer -* Slice Layer -* SoftMax Layer -* SparseFillEmptyRows Layer -* SparseSegmentMean Layer -* SparseSegmentSqrtN Layer -* SparseSegmentSum Layer -* SparseToDense Layer -* Split Layer -* Squeeze Layer -* StridedSlice Layer -* TensorIterator Layer -* Tile Layer -* TopK Layer -* Unique Layer -* Unsqueeze Layer - -## Activation Layer -Back to top - -**Name**: *Activation* - -**Category**: *Activation* - -**Short description**: *Activation* layer represents an activation function of each neuron in a layer, which is used to add non-linearity to the computational flow. - -**Detailed description**: [Reference](https://medium.com/the-theory-of-everything/understanding-activation-functions-in-neural-networks-9491262884e0) - -**Parameters**: *Activation* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *type* - - * **Description**: *type* represents particular activation function. For example, *type* equal to `sigmoid` means that the neurons of this layer have a sigmoid activation function. - * **Range of values**: - * *sigmoid* - sigmoid activation function. Learn more from the **Detailed description** section. - * *tanh* - tanh activation function. Learn more from the **Detailed description** section. - * *elu* - elu activation function. Learn more from the **Detailed description** section. - * *relu6* - relu6 activation function - * *not* - logical NOT function - * *exp* - exponent function - * **Type**: `string` - * **Default value**: None - * **Required**: *yes* - -**Mathematical Formulation** - -* Sigmoid function: - \f[ - f(x) = \frac{1}{1+e^{-x}} - \f] -* Tahn function: - \f[ - f (x) = \frac{2}{1+e^{-2x}} - 1 = 2sigmoid(2x) - 1 - \f] -* Elu function: - \f[ - f(x) = \left\{\begin{array}{ll} - e^{x} - 1 \quad \mbox{if } x < 0 \\ - x \quad \mbox{if } x \geq 0 - \end{array}\right. - \f] -* Relu6 function: - \f[ - f(x) = min(max(0, x), 6) - \f] - -**Inputs**: - -* **1**: Multidimensional input blob. Required. - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## ArgMax Layer -Back to top - -**Name**: *ArgMax* - -**Category**: *Layer* - -**Short description**: *ArgMax* layer computes indexes and values of the *top_k* maximum values for each datum across all dimensions *CxHxW*. - -**Detailed description**: *ArgMax* layer is used after a classification layer to produce a prediction. If the parameter *out_max_val* is 1, output is a vector of pairs `(max_ind, max_val)` for each batch. The *axis* parameter specifies an axis along which to maximize. - -**Parameters**: *ArgMax* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *out_max_val* - - * **Description**: If *out_max_val* is 1, the output is a list of pairs `(max_ind, max_val)`. If *out_max_val* is 0, the output is a list of indexes of size *top_k*. - * **Range of values**: 0 or 1 - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *top_k* - - * **Description**: *top_k* is the number of elements to save in output. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *axis* - - * **Description**: If *axis* is set, maximizes along the specified axis, else maximizes the flattened trailing dimensions for each index of the first / num dimension. - * **Range of values**: an integer. Negative value means counting dimension from the end. - * **Type**: `int` - * **Default value**: None - * **Required**: *no* - -**Inputs**: - -* **1**: 4D input blob. Required. - -**Mathematical Formulation** - -*ArgMax* generally does the following with the input blobs: -\f[ -o_{i} = \left\{ -x| x \in S \wedge \forall y \in S : f(y) \leq f(x) -\right\} -\f] - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## BatchNormalization Layer -Back to top - -**Name**: *BatchNormalization* - -**Category**: *Normalization* - -**Short description**: [Reference](http://caffe.berkeleyvision.org/tutorial/layers/batchnorm.html) - -**Detailed description**: [Reference](https://kratzert.github.io/2016/02/12/understanding-the-gradient-flow-through-the-batch-normalization-layer.html) - -**Parameters**: *BatchNormalization* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *epsilon* - - * **Description**: *epsilon* is the number to be added to the variance to avoid division by zero when normalizing a value. For example, *epsilon* equal to 0.001 means that 0.001 is added to the variance. - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *yes* - -**Inputs**: - -* **1**: 4D input blob. Required. - -**Mathematical Formulation** - -*BatchNormalization* normalizes the output in each hidden layer. -* **Input**: Values of \f$x\f$ over a mini-batch: - \f[ - \beta = \{ x_{1...m} \} - \f] -* **Parameters to learn**: \f$ \gamma, \beta\f$ -* **Output**: - \f[ - \{ o_{i} = BN_{\gamma, \beta} ( b_{i} ) \} - \f] -* **Mini-batch mean**: - \f[ - \mu_{\beta} \leftarrow \frac{1}{m}\sum_{i=1}^{m}b_{i} - \f] -* **Mini-batch variance**: - \f[ - \sigma_{\beta }^{2}\leftarrow \frac{1}{m}\sum_{i=1}^{m} ( b_{i} - \mu_{\beta} )^{2} - \f] -* **Normalize**: - \f[ - \hat{b_{i}} \leftarrow \frac{b_{i} - \mu_{\beta}}{\sqrt{\sigma_{\beta }^{2} + \epsilon }} - \f] -* **Scale and shift**: - \f[ - o_{i} \leftarrow \gamma\hat{b_{i}} + \beta = BN_{\gamma ,\beta } ( b_{i} ) - \f] - -**Example** - -```xml - - - ... - ... - -``` - -* * * - - -## BinaryConvolution Layer -Back to top - -**Name**: *BinaryConvolution* - -**Category**: *Layer* - -**Short description**: *BinaryConvolution* convolution with binary weights - -**Parameters**: *BinaryConvolution* layer parameters are specified in the `data` node, which is a child of the `layer` node. The layer has the same parameters as a regular *Convolution* layer and several unique parameters. - -* **Parameter name**: *input* - - * **Description**: *input* is the number of input channels. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *mode* - - * **Description**: *mode* defines how input tensor 0/1 values and weights 0/1 are interpreted as real numbers and how the result is computed. - * **Range of values**: - * *xnor-popcount* - * **Type**: `string` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *pad_value* - - * **Description**: *pad_value* is a floating-point value used to fill pad area. - * **Range of values**: a floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *yes* - -**Inputs**: - -* **1**: 4D input blob containing integer or floats; filled with 0/1 values. 0 means -1, 1 means 1 for `mode="xnor-popcount"`. Required. - -* * * - -## Bucketize Layer -Back to top - -**Name**: *Bucketize* - -**Category**: *Layer* - -**Short description**: *Bucketize* bucketizes the input based on boundaries. This is an equivalent to np.digitize. - -**Detailed description**: [Reference](https://www.tensorflow.org/api_docs/cc/class/tensorflow/ops/bucketize) - -* **Parameter name**: *with_right_bound* - - * **Description**: Indicates whether the intervals include the right or the left bucket edge. - * **Range of values**: *True* or *False* - * **Type**: `bool` - * **Default value**: None - * **Required**: *yes* - -**Inputs**: - -* **1**: N-D tensor. Input tensor for the bucketization. It contains with float or integer types. Required. -* **2**: 1-D tensor. Sorted boundaries of the buckets. It contains with a float type. Required. - -**Outputs**: - -* **1**: Output tensor with bucket indices for each element of the first input tensor. If the second input is empty, the bucket indice for all elements is equal to 0. The output tensor shape is the same as the first input tensor shape. - -* * * - -## Clamp Layer -Back to top - -**Name**: *Clamp* - -**Category**: *Layer* - -**Short description**: *Clamp* layer represents clipping activation operation. - -**Detailed description**: [Reference](https://www.tensorflow.org/versions/r1.2/api_docs/MO_DG/prepare_model/python/tf/clip_by_value) - -**Parameters**: *Clamp* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *min* - - * **Description**: *min* is the lower bound of values in the output. Any value in the input that is smaller than the bound is replaced by the *min* value. For example, *min* equal to 10.0 means that any value in the input that is smaller than the bound is replaced by 10.0. - * **Range of values**: a non-negative floating-point number - * **Type**: `float` - * **Default value**: 0.0 - * **Required**: *yes* - -* **Parameter name**: *max* - - * **Description**: *max* is the upper bound of values in the output. Any value in the input that is greater than the bound, is replaced by the *max* value. For example, *max* equal to 50.0 means that any value in the input that is greater than the bound is replaced by 50.0. - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: 6.0 - * **Required**: *yes* - -**Inputs**: - -* **1**: Multidimensional input blob. Required. - -**Mathematical Formulation** - -*Clamp* generally does the following with the input blobs: -\f[ -out_i=\left\{\begin{array}{ll} - max\_value \quad \mbox{if } \quad input_i>max\_value \\ - min\_value \quad \mbox{if } \quad input_i -\end{array}\right. -\f] - -**Example** - -```xml - - - ... - ... - -``` - - -* * * - -## Broadcast -Back to top - -**Category**: Layer - -**Short description**: *Broadcast* replicates data on the first input to fit a given shape. - -**Detailed description**: - -*Broadcast* takes the first tensor and, following the [NumPy broadcasting rules specification](https://docs.scipy.org/doc/numpy/user/basics.broadcasting.html), builds a new tensor with shape matching the second input tensor. The second input value represents desired output shape. - -**Parameters**: *Broadcast* layer does not have parameters. - -**Inputs**: - -* **1**: Source tensor that is being broadcasted. Required. - -* **2**: 1D tensor describing output shape. Required. - - -**Outputs**: - -* **1**: Output tensor with replicated content from the first tensor with shape defined by the second input. - -**Example** - -```xml - - - - 16 - 1 - 1 - - - 4 - - - - - 1 - 16 - 50 - 50 - - - -``` - -* * * - - -## Concat Layer -Back to top - -**Name**: *Concat* - -**Category**: *Layer* - -**Short description**: [Reference](http://caffe.berkeleyvision.org/tutorial/layers/concat.html) - -**Parameters**: *Concat* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *axis* - - * **Description**: *axis* is the number of axis over which input blobs are concatenated. For example, *axis* equal to 1 means that input blobs are concatenated over the first axis. - * **Range of values**: a non-negative integer - * **Type**: `int` - * **Default value**: 1 - * **Required**: *yes* - -**Inputs**: - -* **1**: Multidimensional input blob. Required. - -* **2**: Multidimensional input blob. Required. - -**Mathematical Formulation** - -*Axis* parameter specifies a blob dimension to concatenate values over. For example, for two input blobs *B1xC1xH1xW1* and *B2xC2xH2xW2*, if `axis="1"`, the output blob is *B1xC1+C2xH1xW1*. This is only possible if *B1=B2*, *H1=H2*, *W1=W2*. - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## Const Layer -Back to top - -**Name**: *Const* - -**Category**: *Layer* - -**Short description**: *Const* layer produces a blob with a constant value specified in the *blobs* section. - -**Parameters**: *Const* layer does not have parameters. - -**Example** - -```xml - - - - 3 - 100 - - - - - - -``` - -* * * - -## Convolution Layer -Back to top - -**Name**: *Convolution* - -**Category**: *Layer* - -**Short description**: [Reference](http://caffe.berkeleyvision.org/tutorial/layers/convolution.html) - -**Detailed description**: [Reference](http://cs231n.github.io/convolutional-networks/#conv) - -**Parameters**: *Convolution* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *strides* - - * **Description**: *strides* is a distance (in pixels) to slide the filter on the feature map over the `(z, y, x)` axes for 3D convolutions and `(y, x)` axes for 2D convolutions. For example, *strides* equal to "4,2,1" means sliding the filter four pixels at a time over depth dimension, two pixels over height dimension, and one pixel over width dimension. - * **Range of values**: a list of non-negative integers - * **Type**: `int[]` - * **Default value**: a list of 1 with length equal to the number of convolution kernel dimensions - * **Required**: *no* - -* **Parameter name**: *pads_begin* - - * **Description**: *pads_begin* is the number of pixels to add to the beginning of each axis. For example, *pads_begin* equal to "1,2" means adding one pixel to the top of the input and two pixels to the left of the input. - * **Range of values**: a list of non-negative integers - * **Type**: `int[]` - * **Default value**: a list of 0 with length equal to the number of convolution kernel dimensions - * **Required**: *no* - -* **Parameter name**: *pads_end* - - * **Description**: *pads_end* is the number of pixels to add to the end of each axis. For example, *pads_end* equal to "1,2" means adding one pixel to the bottom of the input and two pixels to the right of the input. - * **Range of values**: a list of non-negative integers - * **Type**: `int[]` - * **Default value**: a list of 0 with length equal to the number of convolution kernel dimensions - * **Required**: *no* - -* **Parameter name**: *kernel* - - * **Description**: *kernel* is a size of each filter. For example, *kernel* equal to "2,3" means that each filter has height equal to 2 and width equal to 3. - * **Range of values**: a list of non-negative integers - * **Type**: `int[]` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *output* - - * **Description**: *output* is a number of output feature maps in the output. If *group* parameter value is greater than 1, *output* still matches the number of output features regardless of *group* value. For example, *output* equal to 1 means that there is one output feature map in a layer. - * **Range of values**: a non-negative integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *group* - - * **Description**: *group* is the number of groups which *output* and *input* should be split into. For example, *group* equal to 1 means that all filters are applied to the whole input (usual convolution), *group* equal to 2 means that both *input* and *output* channels are separated into two groups and the *i-th output* group is connected to the *i-th input* group channel. *group* equal to a number of output feature maps implies depth-wise separable convolution. For more information, see the [Reference](https://medium.com/towards-data-science/types-of-convolutions-in-deep-learning-717013397f4d#6f51). - * **Range of values**: an integer - * **Type**: `int` - * **Default value**: 1 - * **Required**: *no* - -* **Parameter name**: *dilations* - - * **Description**: *dilations* is a distance in width and height between elements (weights) in the filter. For example, *dilations* equal to "1,1" means that all elements in the filter are neighbors, so it is the same as the usual convolution. *dilations* equal to "2,2" means that all elements in the filter are matched to the elements in the input matrix separated by one pixel. - * **Range of values**: a non-negative integer - * **Type**: `int[]` - * **Default value**: a list of 1 with length equal to the number of convolution kernel dimensions - * **Required**: *no* - -* **Parameter name**: *auto_pad* - - * **Description**: *auto_pad* defines how the padding is calculated. Possible values: - * Not specified: use explicit padding values - * *same_upper/same_lower*: add paddings to the input to match the output size. In case of odd padding value, an extra padding is added to the beginning if `auto_pad="same_upper"` or to the end if `auto_pad="same_lower"`. - * *valid*: do not use padding - * **Type**: `string` - * **Default value**: None - * **Required**: *no* - -**Inputs**: - -* **1**: 4D or 5D input blob. Required. - -**Weights Layout** - -Weights layout is GOIYX (GOIZYX for 3D convolution), which means that *X* changes the fastest, then *Y*, *Input* and *Output*, *Group*. - - -**Mathematical Formulation** - -* For the convolutional layer, the number of output features in each dimension is calculated as: -\f[ -n_{out} = \left ( \frac{n_{in} + 2p - k}{s} \right ) + 1 -\f] -* The receptive field in each layer is calculated as: - * Jump in the output feature map: - \f[ - j_{out} = j_{in} * s - \f] - * Size of the receptive field of output feature: - \f[ - r_{out} = r_{in} + ( k - 1 ) * j_{in} - \f] - * Center position of the receptive field of the first output feature: - \f[ - start_{out} = start_{in} + ( \frac{k - 1}{2} - p ) * j_{in} - \f] - * Output is calculated as: - \f[ - out = \sum_{i = 0}^{n}w_{i}x_{i} + b - \f] - -**Example** - -```xml - - - ... - ... - - - -``` - -* * * - -## Crop (Type 1) Layer -Back to top - -**Name**: *Crop* - -**Category**: *Layer* - -**Short description**: *Crop* layer changes selected dimensions of the input blob according to the specified parameters. - -**Parameters**: *Crop* layer parameters are specified in the `data` section, which is a child of the `layer` node. *Crop* **Type 1** layer takes two input blobs, and the shape of the second blob specifies the *Crop* size. The *Crop* layer of this type supports shape inference. - -* **Parameter name**: *axis* - - * **Description**: *axis* is the number of a dimension to crop. For example, *axis* equal to [1] means that the first dimension is cropped. - * **Range of values**: a list of unique integers, where each element is greater than or equal to 0 and less than input shape length - * **Type**: `int[]` - * **Default value**: `[1]` - * **Required**: *yes* - -* **Parameter name**: *offset* - - * **Description**: *offset* is the starting point for crop in the input blob. For example, *offset* equal to 2 means that crop starts from the second value of a specified axis. - * **Range of values**: a list of integers of the length equal to the length of the *axis* attribute. In the list, *offset[i]* is greater than or equal to 0 and less than or equal to *input_shape[axis[i]] - crop_size[axis[i]]*, where *crop_size* is the shape of the second input. - * **Type**: `int[]` - * **Default value**: None - * **Required**: *yes* - -**Inputs** - -* **1**: Multidimensional input blob -* **2**: Shape of this input will be used for crop - -**Example** - -```xml - - - - - 1 - 21 - 44 - 44 - - - 1 - 21 - 34 - 34 - - - - - 1 - 21 - 34 - 34 - - - -``` - -* * * - -## Crop (Type 2) Layer -Back to top - -**Name**: *Crop* - -**Category**: *Layer* - -**Short description**: *Crop* layer changes selected dimensions of the input blob according to the specified parameters. - -**Parameters**: Specify parameters for the *Crop* layer in the `data` section, which is a child of the `layer` node. *Crop* **Type 2** layer takes one input blob to crop. The *Crop* layer of this type supports shape inference only when shape propagation is applied to dimensions not specified in the *axis* attribute. - -* **Parameter name**: *axis* - - * **Description**: *axis* is the number of a dimension to crop. For example, *axis* equal to [1] means that the first dimension is cropped. - * **Range of values**: a list of unique integers, where each element is greater than or equal to 0 and less than input shape length - * **Type**: `int[]` - * **Default value**: `[1]` - * **Required**: *yes* - -* **Parameter name**: *offset* - - * **Description**: *offset* is the starting point for crop in the input blob. For example, *offset* equal to 2 means that cropping starts from the second value of the specified axis. - * **Range of values**: a list of integers with the length equal to the length of *axis* attribute, where *offset[i]* is greater than or equal to 0 and less or equal to *input_shape[axis[i]] - dim[i]* - * **Type**: `int[]` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *dim* - - * **Description**: *dim* is the resulting size of the output blob for the specified axis. For example, *dim* equal to [88] means that the output blob gets the dimension equal to 88 for the specified axis. - * **Range of values**: a list of integers - * **Type**: `int[]` - * **Default value**: None - * **Required**: *yes* - -**Example** - -```xml - - - - - 1 - 21 - 44 - 44 - - - - - 1 - 21 - 34 - 34 - - - -``` - -* * * - -## Crop (Type 3) Layer -Back to top - -**Name**: *Crop* - -**Category**: *Layer* - -**Short description**: *Crop* layer changes selected dimensions of the input blob according to the specified parameters. - -**Parameters**: *Crop* layer parameters are specified in the `data` section, which is a child of the `layer` node. *Crop* **Type 3** layer takes one input blob to crop. The *Crop* layer of this type supports shape inference. - -* **Parameter name**: *axis* - - * **Description**: *axis* is the number of a dimension to crop. For example, *axis* equal to [1] means that the first dimension is cropped. - * **Range of values**: a list of unique integers, where each element is greater than or equal to 0 and less than input shape length - * **Type**: `int[]` - * **Default value**: `[1]` - * **Required**: *yes* - -* **Parameter name**: *crop_begin* - - * **Description**: *crop_begin* specifies the starting offset for crop in the input blob for a specified axes. - * **Range of values**: a list of integers, where *crop_begin[i]* is greater than or equal to 0 and less than *input_shape[axis[i]] - crop_end[i]* - * **Type**: `int[]` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *crop_end* - - * **Description**: *crop_end* specifies the ending offset for crop in the input blob for the specified axes. - * **Range of values**: a list of integers, where *crop_end[i]* is greater than or equal to 0 and less than *input_shape[axis[i]] - crop_begin[i]* - * **Type**: `int[]` - * **Default value**: None - * **Required**: *yes* - -**Example** - -```xml - - - - - 1 - 21 - 44 - 44 - - - - - 1 - 21 - 34 - 34 - - - -``` - -* * * - -## CTCGreedyDecoder Layer -Back to top - -**Name**: *CTCGreedyDecoder* - -**Category**: *Layer* - -**Short description**: *CTCGreedyDecoder* performs greedy decoding on the logits given in input (best path). - -**Detailed description**: [Reference](https://www.tensorflow.org/api_docs/python/tf/nn/ctc_greedy_decoder) - -**Parameters**: *CTCGreedyDecoder* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *ctc_merge_repeated* - - * **Description**: *ctc_merge_repeated* is a flag for merging repeated labels during the CTC calculation. - * **Range of values**: 0 or 1 - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -**Mathematical Formulation** - -Given an input sequence \f$X\f$ of length \f$T\f$, *CTCGreadyDecoder* assumes the probability of a length \f$T\f$ character sequence \f$C\f$ is given by -\f[ -p(C|X) = \prod_{t=1}^{T} p(c_{t}|X) -\f] - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## Deconvolution Layer -Back to top - -**Name**: *Deconvolution* - -**Category**: *Layer* - -**Short description**: *Deconvolution* layer is applied for upsampling the output to the higher image resolution. - -**Detailed description**: [Reference](https://distill.pub/2016/deconv-checkerboard/) - -**Parameters**: *Deconvolution* layer parameters should be specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *strides* - - * **Description**: *strides* is a distance (in pixels) to slide the filter on the feature map over the `(z, y, x)` axes for 3D deconvolutions and `(y, x)` axes for 2D deconvolutions. For example, *strides* equal to "4,2,1" means sliding the filter four pixels at a time over depth dimension, two pixels over height dimension, and one pixel over width dimension. - * **Range of values**: a list of non-negative integers - * **Type**: `int[]` - * **Default value**: a list of 1 with length equal to the number of convolution kernel dimensions - * **Required**: *no* - -* **Parameter name**: *pads_begin* - - * **Description**: *pads_begin* is the number of pixels to add to the beginning of each axis. For example, *pads_begin* equal to "1,2" means adding one pixel to the top of the input and two pixels to the left of the input. - * **Range of values**: a list of non-negative integers - * **Type**: `int[]` - * **Default value**: a list of 0 with length equal to the number of convolution kernel dimensions - * **Required**: *no* - -* **Parameter name**: *pads_end* - - * **Description**: *pads_end* is the number of pixels to add to the end of each axis. For example, *pads_end* equal to "1,2" means adding one pixel to the bottom of the input and two pixels to the right of the input. - * **Range of values**: a list of non-negative integers - * **Type**: `int[]` - * **Default value**: a list of 1 with length equal to the number of convolution kernel dimensions - * **Required**: *no* - -* **Parameter name**: *kernel* - - * **Description**: *kernel* is a size of each filter. For example, *kernel* equal to "2,3" means that each filter has height equal to 2 and width equal to 3. - * **Range of values**: a list of positive integers - * **Type**: `int[]` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *output* - - * **Description**: *output* is the number of output feature maps in the output. If *group* parameter value is greater than 1, *output* still matches the number of output features regardless of the *group* value. For example, *output* equal to 1 means that there is one output feature map in a layer. - * **Range of values**: a non-negative integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *group* - - * **Description**: *group* denotes the number of groups to which *output* and *input* should be split. For example, *group* equal to 1 means that all filters are applied to the whole input (usual convolution), *group* equal to 2 means that both *input* and *output* channels are separated into 2 groups and *i-th output* group is connected to *i-th input* group channels. *group* equal to a number of output feature maps implies depth-wise separable convolution. For more information, see the [Reference](https://medium.com/towards-data-science/types-of-convolutions-in-deep-learning-717013397f4d#6f51). - * **Range of values**: a non-negative integer - * **Type**: `int` - * **Default value**: 1 - * **Required**: *no* - -* **Parameter name**: *dilations* - - * **Description**: *dilations* is the distance in width and height between elements (weights) in the filter. For example, *dilation* equal to "1,1" means that all elements in the filter are neighbors, so it is the same as the usual convolution. *dilation* equal to "2,2" means that all elements in the filter are matched to the elements in the input matrix separated by one pixel. - * **Range of values**: a list of non-negative integers - * **Type**: `int[]` - * **Default value**: a list of 1 with length equal to the number of convolution kernel dimensions - * **Required**: *no* - -* **Parameter name**: *auto_pad* - - * **Description**: *auto_pad* defines how the padding is calculated. - * **Range of values**: - * Not specified: use explicit padding values. - * *same_upper/same_lower*: add paddings to the input to match the output size. In case of odd padding value, an extra padding is added to the beginning if `auto_pad="same_upper"` or to the end if `auto_pad="same_lower"`. - * *valid*: do not use padding - * **Type**: string - * **Default value**: None - * **Required**: *no* - -**Inputs**: - -* **1**: 4D or 5D blob with input data. Required. - -**Weights Layout** - -Weights layout is GOIYX, which means that *X* changes the fastest, then *Y*, *Input* and *Output*, *Group*. - - -**Mathematical Formulation** - -*Deconvolution* is also called transpose convolution and performs operation that is reverse to convolution. -The number of output features for each dimensions is calculated as: -\f[S_{o}=stride(S_{i} - 1 ) + S_{f} - 2pad \f] -Where \f$S\f$ is the size of output, input, and filter. -Output is calculated in the same way as for convolution layer: -\f[out = \sum_{i = 0}^{n}w_{i}x_{i} + b\f] - -**Example** - -```xml - - - - - 1 - 512 - 8 - 8 - 8 - - - - - 1 - 512 - 16 - 16 - 16 - - - - - - - -``` - -* * * - -## DeformableConvolution Layer -Back to top - -**Name**: *DeformableConvolution* - -**Category**: *Layer* - -**Short description**: *DeformableConvolution* convolution layer enhances the transformation modeling capacity of CNNs. - -**Detailed description**: [Reference](https://arxiv.org/abs/1703.06211) - -**Parameters**: *DeformableConvolution* layer parameters are specified in the `data` node, which is a child of the `layer` node. The layer has the same parameters as a regular *Convolution* layer and several unique parameters. - -* **Parameter name**: *num_deformable_group* - - * **Description**: *num_deformable_group* is the number of deformable group partitions. - * **Range of values**: a non-negative integer - * **Type**: `int` - * **Default value**: 1 - * **Required**: *no* - -**Inputs**: - -* **1**: 4D or 5D blob with input data. Required. -* **2**: Input offset to the DeformableConvolution - -**Weights Layout** - -Weights layout is GOIYX (GOIZYX for 3D convolution), which means that *X* changes the fastest, then *Y*, *Input* and *Output*, *Group*. - -**Example** - -```xml - - - - - 1 - 512 - 40 - 27 - - - 1 - 72 - 40 - 27 - - - - - 1 - 512 - 40 - 27 - - - - - - -``` - -* * * - -## DepthToSpace Layer -Back to top - -**Name**: *DepthToSpace* - -**Category**: *Layer* - -**Short description**: *DepthToSpace* layer rearranges data from the depth dimension of the input blob into spatial dimensions. - -**Detailed description**: *DepthToSpace* layer outputs a copy of the input blob, where values from the depth dimension (features) are moved to spatial blocks. Refer to the [ONNX* specification](https://github.com/onnx/onnx/blob/master/docs/Operators.md#DepthToSpace) for an example of the 4D input blob case. - -**Parameters**: *DepthToSpace* layer parameters are specified parameters in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *block_size* - - * **Description**: *block_size* specifies the size of the value block to be moved. The depth dimension size must be evenly divided by `block_size ^ (len(input.shape) - 2)`. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: 1 - * **Required**: *no* - -**Inputs**: - -* **1**: 3D+ blob with input data. Required. - -**Mathematical Formulation** - -The operation is equivalent to the following transformation of the input blob *x* with *K* spatial dimensions of shape *[N, C, D1, D2, D3 , ... , DK]*: - -``` -x' = reshape(x, [N, block_size, block_size, ... , block_size, D1 * block_size, D2 * block_size, ... Dk * block_size]) -x'' = transpose(x', [0, K + 1, K + 2, 1, K + 3, 2, K + 4, 3, ... K + K + 1, K]) -y = reshape(x'', [N, C / block_size ^ K, D1 * block_size, D2 * block_size, D3 * block_size, ... , DK * block_size]) - -``` -**Example** - -```xml - - - - - 5 - 4 - 2 - 3 - - - - - 5 - 1 - 4 - 6 - - - -``` - -* * * - -## DetectionOutput Layer -Back to top - -**Name**: *DetectionOutput* - -**Category**: *Layer* - -**Short description**: *DetectionOutput* layer performs non-maximum suppression to generate the detection output using information on location and confidence predictions. - -**Detailed description**: [Reference](https://arxiv.org/pdf/1512.02325.pdf). The layer has three required inputs: blob with box logits, blob with confidence predictions, and blob with box coordinates (proposals). It can have two additional inputs with additional confidence predictions and box coordinates described in the [article](https://arxiv.org/pdf/1711.06897.pdf). The five input version of the layer is supported with MYRIAD plugin only. The output blob contains information about filtered detections described with seven element tuples: *[batch_id, class_id, confidence, x_1, y_1, x_2, y_2]*. The first tuple with *batch_id* equal to -1 means end of output. - -**Parameters**: *DetectionOutput* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *num_classes* - - * **Description**: *num_classes* is the number of classes to be predicted. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *background_label_id* - - * **Description**: *background_label_id* is the background label ID. If there is no background class, set it to -1. - * **Range of values**: an integer - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - -* **Parameter name**: *top_k* - - * **Description**: *top_k* is the maximum number of results to keep per batch after NMS step. -1 means keeping all bounding boxes. - * **Range of values**: an integer - * **Type**: `int` - * **Default value**: -1 - * **Required**: *no* - -* **Parameter name**: *variance_encoded_in_target* - - * **Description**: *variance_encoded_in_target* is a flag that specifies if variance is encoded in target. If flag is 0 (that is, `false`), you need to adjust the predicted offset accordingly. - * **Range of values**: 0 or 1 - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - -* **Parameter name**: *keep_top_k* - - * **Description**: *keep_top_k* is the maximum number of bounding boxes per batch to keep after NMS step. -1 means keeping all bounding boxes. - * **Range of values**: an integer - * **Type**: `int` - * **Default value**: -1 - * **Required**: *yes* - -* **Parameter name**: *code_type* - - * **Description**: *code_type* is a coding method for bounding boxes. - * **Range of values**: `"caffe.PriorBoxParameter.CENTER_SIZE"`, `"caffe.PriorBoxParameter.CORNER"` - * **Type**: `string` - * **Default value**: `caffe.PriorBoxParameter.CORNER` - * **Required**: *no* - -* **Parameter name**: *share_location* - - * **Description**: *share_location* is a flag that specifies if bounding boxes are shared among different classes. - * **Range of values**: 0 or 1 - * **Type**: `int` - * **Default value**: 1 - * **Required**: *no* - -* **Parameter name**: *nms_threshold* - - * **Description**: *nms_threshold* is the threshold to be used in the NMS stage. - * **Range of values**: a floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *confidence_threshold* - - * **Description**: *confidence_threshold* is a threshold to filter out detections with smaller confidence. If not set, all boxes are used. - * **Range of values**: a floating-point number - * **Type**: `float` - * **Default value**: `-FLT_MAX` - * **Required**: *no* - -* **Parameter name**: *clip_after_nms* - - * **Description**: *clip_after_nms* is a flag that specifies whether to perform clip bounding boxes after non-maximum suppression or not. - * **Range of values**: 0 or 1 - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - -* **Parameter name**: *clip_before_nms* - - * **Description**: *clip_before_nms* is a flag that specifies whether to clip bounding boxes before non-maximum suppression or not. - * **Range of values**: 0 or 1 - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - -* **Parameter name**: *decrease_label_id* - - * **Description**: *decrease_label_id* is a flag that denotes how to perform NMS. - * **Range of values**: - * *0* - perform NMS like in Caffe\* - * *1* - perform NMS like in MxNet\* - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - -* **Parameter name**: *normalized* - - * **Description**: *normalized* is a flag that specifies whether input blobs with boxes are normalized. If blobs are not normalized, the *input_height* and *input_width* parameters are used to normalize box coordinates. - * **Range of values**: 0 or 1 - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - -* **Parameter name**: *input_height* - - * **Description**: *input_height* is the height of an input image. If the *normalized* is 1, *input_height* is ignored. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: 1 - * **Required**: *no* - -* **Parameter name**: *input_width* - - * **Description**: *input_width* is the width of an input image. If the *normalized* is 1, *input_width* is ignored. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: 1 - * **Required**: *no* - -* **Parameter name**: *objectness_score* - - * **Description**: *objectness_score* is the threshold to sort out confidence predictions. Used only when the *DetectionOutput* layer has five inputs. - * **Range of values**: a non-negative floating-point number - * **Type**: `float` - * **Default value**: 0.0 - * **Required**: *no* - -**Inputs**: - -* **1**: 2D input blob with box logits. Required. -* **2**: 2D input blob with class predictions. Required. -* **3**: 3D input blob with proposals. Required. -* **4**: 2D input blob with additional class predictions information described in the [article](https://arxiv.org/pdf/1711.06897.pdf). Optional. -* **5**: 2D input blob with additional box predictions information described in the [article](https://arxiv.org/pdf/1711.06897.pdf). Optional. - -**Mathematical Formulation** - -At each feature map cell, *DetectionOutput* predicts the offsets relative to the default box shapes in the cell, as well as the per-class scores that indicate the presence of a class instance in each of those boxes. Specifically, for each box out of k at a given location, *DetectionOutput* computes class scores and the four offsets relative to the original default box shape. This results are a total of \f$(c + 4)k\f$ filters that are applied around each location in the feature map, yielding \f$(c + 4)kmn\f$ outputs for a *m \* n* feature map. - -**Example** - -```xml - - - ... - ... - -``` - -* * * - - -## Erf Layer -Back to top - -**Name**: *Erf* - -**Category**: *Layer* - -**Short description**: *Erf* layer computes the Gauss error function of input element-wise. - -**Detailed Description**: [Reference](https://www.tensorflow.org/api_docs/python/tf/math/erf) - -**Parameters**: *Erf* layer does not have parameters. - -**Inputs**: - -* **1**: Input tensor X of any floating-point type. Required. - -**Outputs**: - -* **1**: Result of Erf function applied on input tensor x. Floating point tensor with shape and type matching input tensor. Required. - -**Mathematical Formulation** - -For each element from an input tensor, *Erf* layer calculates corresponding -element in the output tensor by the formula: -\f[ -erf(x) = \frac{2}{\sqrt{\pi}} \int_0^x e^{-t^2} dt -\f] - -**Example** - -```xml - - - - 5 - 4 - - - - - 5 - 4 - - - -``` - - - -* * * - - -## Eltwise Layer -Back to top - -**Name**: *Eltwise* - -**Category**: *Layer* - -**Short description**: *Eltwise* layer performs element-wise operation specified in parameters, over given inputs. - -**Parameters**: *Eltwise* layer parameters are specified in the `data` node, which is a child of the `layer` node. *Eltwise* accepts two inputs of arbitrary number of dimensions. The operation supports broadcasting input blobs according to the [NumPy specification](https://docs.scipy.org/doc/numpy/user/basics.broadcasting.html). - -* **Parameter name**: *operation* - - * **Description**: *operation* is a mathematical operation to be performed over inputs. - * **Range of values**: - * *sum* - summation - * *sub* - subtraction - * *mul* - multiplication - * *div* - division - * *max* - maximum - * *min* - minimum - * *squared_diff* - squared difference - * *floor_mod* - reminder of division - * *pow* - power - * *logical_and* - logical AND - * *logical_or* - logical OR - * *logical_xor* - logical XOR - * *less* - less - * *less_equal* - less or equal - * *greater* - greater - * *greater_equal* - greater equal - * *equal* - equal - * *not_equal* - not equal - * **Type**: string - * **Default value**: *sum* - * **Required**: *no* - -**Inputs** - -* **1**: Multidimensional input blob. Required. -* **2**: Multidimensional input blob. Required. - -**Mathematical Formulation** -*Eltwise* does the following with the input blobs: -\f[ -o_{i} = f(b_{i}^{1}, b_{i}^{2}) -\f] -where \f$b_{i}^{1}\f$ - first blob \f$i\f$-th element, \f$b_{i}^{2}\f$ - second blob \f$i\f$-th element, \f$o_{i}\f$ - output blob \f$i\f$-th element, \f$f(a, b)\f$ - is a function that performs an operation over its two arguments \f$a, b\f$. - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## Fill Layer -Back to top - -**Name**: *Fill* - -**Category**: *Layer* - -**Short description**: *Fill* layer generates a blob of the specified shape filled with the specified value. - -**Parameters**: *Fill* layer does not have parameters. - -**Inputs**: - -* **1**: 1D blob with an output blob shape. Required. - -* **2**: 0D blob (constant) with the value for fill. Required. - -**Example** - -```xml - - - - 2 - - - - - - 3 - 4 - - - -``` - -* * * - -## Flatten Layer -Back to top - -**Name**: *Flatten* - -**Category**: *Layer* - -**Short description**: *Flatten* layer performs flattening of specific dimensions of the input blob. - -**Parameters**: *Flatten* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *axis* - - * **Description**: *axis* specifies the first axis to flatten. - * **Range of values**: a non-negative integer - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - -* **Parameter name**: *end_axis* - - * **Description**: *end_axis* specifies the last dimension to flatten. The value can be negative meaning counting axes from the end. - * **Range of values**: an integer - * **Type**: `int` - * **Default value**: -1 - * **Required**: *no* - -**Inputs** - -* **1**: Multidimensional input blob. Required. - -**Example** - -```xml - - - - - 7 - 19 - 19 - 12 - - - - - 7 - 4332 - - - -``` - -* * * - -## FullyConnected Layer -Back to top - -**Name**: *FullyConnected* - -**Category**: *Layer* - -**Short description**: [Reference](http://caffe.berkeleyvision.org/tutorial/layers/innerproduct.html) - -**Detailed description**: [Reference](http://cs231n.github.io/convolutional-networks/#fc) - -**Parameters**: *FullyConnected* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *out-size* - - * **Description**: *out-size* is the length of the output vector. For example, *out-size* equal to 4096 means that the output vector length is 4096. - * **Range of values**: a non-negative integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -**Inputs** - -* **1**: 2D or 4D input blob. Required. - -**Weights Layout** - -OI, which means that Input changes the fastest, then Output. - -**Mathematical Formulation** - -* If previous layer is *FullyConnected*: - \f[ - y_{i} = f( z_{i} ) \quad with \quad z_{i} = \sum_{j=1}^{m_{1}^{( l-1 )}}w_{i,j}^{( l )}y_{i}^{ ( l -1 )} - \f] -* Otherwise: - \f[ - y_{i} = f( z_{i} ) \quad with \quad z_{i}^{ ( l )} = \sum_{j=1}^{m_{1}^{( l-1 )}}\sum_{r=1}^{m_{2}^{ ( l-1 )}}\sum_{s=1}^{m_{3}^{ ( l-1 )}}w_{i,j,r,s}^{ ( l )} ( Y_{i}^{ (l-1) })_{r,s} - \f] - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## Gather Layer -Back to top - -**Name**: *Gather* - -**Category**: *Layer* - -**Short description**: *Gather* layer takes slices of data in the second input blob according to the indexes specified in the first input blob. The output blob shape is `input2.shape[:axis] + input1.shape + input2.shape[axis + 1:]`. - -**Parameters**: *Gather* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *axis* - - * **Description**: *axis* is a dimension index to gather data from. For example, *axis* equal to 1 means that gathering is performed over the first dimension. - * **Range of values**: an integer in the range `[-len(input2.shape), len(input2.shape) - 1]`. - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -**Mathematical Formulation** - -\f[ - output[:, ... ,:, i, ... , j,:, ... ,:] = input2[:, ... ,:, input1[i, ... ,j],:, ... ,:] -\f] - - -**Inputs** - -* **1**: Multidimensional input blob with indexes to gather. The values for indexes are in the range `[0, input1[axis] - 1]`. -* **2**: Multidimensional input blob with arbitrary data. - -**Example** - -```xml - - - - - 15 - 4 - 20 - 28 - - - 6 - 12 - 10 - 24 - - - - - 6 - 15 - 4 - 20 - 28 - 10 - 24 - - - -``` - -* * * - -## GRN Layer -Back to top - -**Name**: *GRN* - -**Category**: *Normalization* - -**Short description**: *GRN* is the Global Response Normalization with L2 norm (across channels only). - -**Parameters**: *GRN* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *bias* - - * **Description**: *bias* is added to the variance. - * **Range of values**: a floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *yes* - -**Inputs** - -* **1**: 2D, 3D or 4D input blob. Required. - -**Mathematical Formulation** - -*GRN* computes the L2 norm by channels for input blob. *GRN* generally does the following with the input blob: -\f[ -output_{i} = \frac{input_{i}}{\sqrt{\sum_{i}^{C} input_{i}}} -\f] - -**Example** - -```xml - - - ... - ... - -``` - -* * * -## GRUCell Layer -Back to top - -**Name**: *GRUCell* - -**Category**: *Layer* - -**Short description**: *GRUCell* layer computes the output using the formula described in the [paper](https://arxiv.org/abs/1406.1078). - -**Parameters**: *GRUCell* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *hidden_size* - - * **Description**: *hidden_size* specifies hidden state size. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *activations* - - * **Description**: *activations* specifies activation functions for gates. - * **Range of values**: any combination of *relu*, *sigmoid*, *tanh* - * **Type**: a list of strings - * **Default value**: *sigmoid,tanh* - * **Required**: *no* - -* **Parameter name**: *activations_alpha, activations_beta* - - * **Description**: *activations_alpha, activations_beta* parameters of functions - * **Range of values**: a list of floating-point numbers - * **Type**: `float[]` - * **Default value**: None - * **Required**: *no* - -* **Parameter name**: *clip* - - * **Description**: *clip* specifies bound values *[-C, C]* for tensor clipping. Clipping is performed before activations. - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *no* - -* **Parameter name**: *linear_before_reset* - - * **Description**: *linear_before_reset* flag denotes if the layer behaves according to the modification of *GRUCell* described in the formula in the [ONNX documentation](https://github.com/onnx/onnx/blob/master/docs/Operators.md#GRU). - * **Range of values**: 0 or 1 - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - -**Inputs** - -* **1**: `X` - 2D ([batch_size, input_size]) input data. Required. - -* **2**: `Hi` - 2D ([batch_size, hidden_size]) input hidden state data. Required. - -**Outputs** - -* **1**: `Ho` - 2D ([batch_size, hidden_size]) output hidden state. - -**Example** -```xml - - - - - 1 - 16 - - - 1 - 128 - - - - - 1 - 128 - - - - - - - -``` - -* * * -## Input Layer -Back to top - -**Name**: *Input* - -**Category**: *Layer* - -**Short description**: *Input* layer specifies input to the model. - -**Parameters**: *Input* layer does not have parameters. - -**Example** - -```xml - - - - 1 - 3 - 224 - 224 - - - -``` - -* * * - -## Interp Layer -Back to top - -**Name**: *Interp* - -**Category**: *Layer* - -**Short description**: *Interp* layer performs bilinear interpolation of the input blob by the specified parameters. - -**Parameters**: *Interp* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *height* - - * **Description**: *height* specifies output height. If the parameter is not set, other parameters are used for output size calculation. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *no* - -* **Parameter name**: *width* - - * **Description**: *width* specifies output width. If the parameter is not set, other parameters are used for output size calculation. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *no* - -* **Parameter name**: *align_corners* - - * **Description**: *align_corners* is a flag that specifies whether to align corners or not. - * **Range of values**: 0 or 1 - * **Type**: `int` - * **Default value**: 1 - * **Required**: *no* - -* **Parameter name**: *pad_beg* - - * **Description**: *pad_beg* specify the number of pixels to add to the beginning of the image being interpolated. - * **Range of values**: a non-negative integer number - * **Type**: `int` - * **Default value**: 0 - * **Required**: *yes* - -* **Parameter name**: *pad_end* - - * **Description**: *pad_end* specify the number of pixels to add to the end of the image being interpolated. - * **Range of values**: a non-negative integer - * **Type**: `int` - * **Default value**: 0 - * **Required**: *yes* - -**Inputs** - -* **1**: 4D input blob. Required. - -**Example** - -```xml - - - - - 1 - 2 - 48 - 80 - - - - - 1 - 2 - 96 - 160 - - - -``` - -* * * - -## LSTMCell Layer -Back to top - -**Name**: *LSTMCell* - -**Category**: *Layer* - -**Short description**: *LSTMCell* layer computes the output using the formula described in the original paper [Long Short-Term Memory](http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.676.4320&rep=rep1&type=pdf). - -**Parameters**: *LSTMCell* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *hidden_size* - - * **Description**: *hidden_size* specifies hidden state size. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *activations* - - * **Description**: *activations* specifies activation functions for gates. - * **Range of values**: any combination of *relu*, *sigmoid*, *tanh* - * **Type**: a list of strings - * **Default value**: *sigmoid,tanh,tanh* - * **Required**: *no* - -* **Parameter name**: *activations_alpha, activations_beta* - - * **Description**: *activations_alpha, activations_beta* parameters of functions. - * **Range of values**: a list of floating-point numbers - * **Type**: `float[]` - * **Default value**: None - * **Required**: *no* - -* **Parameter name**: *clip* - - * **Description**: *clip* specifies bound values *[-C, C]* for tensor clipping. Clipping is performed before activations. - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *no* - -**Inputs** - -* **1**: `X` - 2D ([batch_size, input_size]) input data. Required. - -* **2**: `Hi` - 2D ([batch_size, hidden_size]) input hidden state data. Required. - -* **3**: `Ci` - 2D ([batch_size, hidden_size]) input cell state data. Required. - - -**Outputs** - -* **1**: `Ho` - 2D ([batch_size, hidden_size]) output hidden state. - -* **2**: `Co` - 2D ([batch_size, hidden_size]) output cell state. - -**Mathematical Formulation** - -``` -Formula: - * - matrix mult - (.) - eltwise mult - [,] - concatenation -sigm - 1/(1 + e^{-x}) -tanh - (e^{2x} - 1)/(e^{2x} + 1) - f = sigm(Wf*[Hi, X] + Bf) - i = sigm(Wi*[Hi, X] + Bi) - c = tanh(Wc*[Hi, X] + Bc) - o = sigm(Wo*[Hi, X] + Bo) - Co = f (.) Ci + i (.) c - Ho = o (.) tanh(Co) -``` - -**Example** - -```xml - - ... - ... - -``` - -* * * - -## Memory Layer -Back to top - -**Name**: *Memory* - -**Category**: *Layer* - -**Short description**: *Memory* layer represents the delay layer in terms of LSTM terminology. For more information about LSTM topologies, please refer to this [article](http://colah.github.io/posts/2015-08-Understanding-LSTMs). - -**Detailed description**: *Memory* layer saves the state between two infer requests. In the topology, it is the single layer, however, in the Intermediate Representation, it is always represented as a pair of **Memory** layers. One of these layers does not have outputs and another does not have inputs (in terms of the Intermediate Representation). - -**Parameters**: *Memory* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *id* - - * **Description**: *id* is the ID of the pair of *Memory* layers. Two layers with the same value of the *id* parameter are paired. - * **Range of values**: any combination of Latin characters, numbers, and underscores (`_`) in the `string` format - * **Type**: string - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *index* - - * **Description**: *index* specifies whether the given layer is input or output. For example, *index* equal to 0 means the layer is output. - * **Range of values**: - * 0 - current layer is output - * 1 - current layer is input - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *size* - - * **Description**: *size* is the size of the group. For example, *size* equal to 2 means this group is a pair. - * **Range of values**: only *2* is supported - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -**Mathematical Formulation** - -*Memory* saves data from the input blob. - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## MVN Layer -Back to top - -**Name**: *MVN* - -**Category**: *Normalization* - -**Short description**: [Reference](http://caffe.berkeleyvision.org/tutorial/layers/mvn.html) - -**Parameters**: *MVN* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *across_channels* - - * **Description**: *across_channels* is a flag that specifies whether mean values are shared across channels. For example, *across_channels* equal to 0 means that mean values are not shared across channels. - * **Range of values**: - * 0 - do not share mean values across channels - * 1 - share mean values across channels - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - -* **Parameter name**: *normalize_variance* - - * **Description**: *normalize_variance* is a flag that specifies whether to perform variance normalization. - * **Range of values**: - * 0 - do not normalize variance - * 1 - normalize variance - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - -* **Parameter name**: *eps* - - * **Description**: *eps* is the number to be added to the variance to avoid division by zero when normalizing the value. For example, *epsilon* equal to 0.001 means that 0.001 is added to the variance. - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *yes* - -**Inputs** - -* **1**: 4D or 5D input blob. Required. - -**Mathematical Formulation** - -*MVN* subtracts mean value from the input blob: -\f[ -o_{i} = i_{i} - \frac{\sum{i_{k}}}{C * H * W} -\f] -If *normalize_variance* is set to 1, the output blob is divided by variance: -\f[ -o_{i}=\frac{o_{i}}{\sum \sqrt {o_{k}^2}+\epsilon} -\f] - -**Example** - -```xml - - - - ... - - - ... - - -``` - -* * * - -## NonMaxSuppression Layer -Back to top - -**Name**: *NonMaxSuppression* - -**Category**: *Layer* - -**Short description**: *NonMaxSuppression* performs non-maximum suppression of the input boxes and return indices of the selected boxes. - -**Detailed description**: [Reference](https://github.com/onnx/onnx/blob/rel-1.5.0/docs/Operators.md#NonMaxSuppression) - -**Parameters**: *NonMaxSuppression* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *center_point_box* - - * **Description**: *center_point_box* is flag that specifies the format of the box data. - * **Range of values**: - * false (0) - the box data is supplied as `[y1, x1, y2, x2]` where `(y1, x1)` and `(y2, x2)` are the coordinates of any diagonal pair of box corners. - * true (1) - the box data is supplied as `[x_center, y_center, width, height]`. - * **Type**: `bool` - * **Default value**: false - * **Required**: *no* - - -**Inputs** - -* **1**: 3D floating point blob with the boxes data of shape [batch_size, num_boxes, 4]. Required. -* **2**: 3D floating point blob with the boxes scores of shape [batch_size, num_classes, num_boxes]. Required. -* **3**: 1D integer blob with of shape [1] representing maximum number of boxes to be selected per class. Optional. If not specified then all boxes will be selected. -* **4**: 1D floating point blob with of shape [1] representing intersection over union threshold. Optional. If not specified then it is equal to 1.0. -* **5**: 1D floating point blob with of shape [1] representing box score threshold. Optional. If not specified then it is equal to 0.0. - - -**Mathematical Formulation** - -\f[o_{i} = \left( 1 + \left( \frac{\alpha}{n} \right)\sum_{i}x_{i}^{2} \right)^{\beta}\f] -Where \f$n\f$ is the size of each local region. - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## Norm Layer -Back to top - -**Name**: *Norm* - -**Category**: *Normalization* - -**Short description**: [Reference](http://caffe.berkeleyvision.org/tutorial/layers/lrn.html) - -**Detailed description**: [Reference](http://yeephycho.github.io/2016/08/03/Normalizations-in-neural-networks/#Local-Response-Normalization-LRN) - -**Parameters**: *Norm* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *alpha* - - * **Description**: *alpha* is a scaling parameter for the normalizing sum. For example, *alpha* equal to 0.0001 means that the normalizing sum is multiplied by 0.0001. - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *beta* - - * **Description**: *beta* is an exponent for the normalizing sum. For example, *beta* equal to 0.75 means that the normalizing sum is raised to the power of 0.75. - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *region* - - * **Description**: *region* is the strategy of local regions extension. For example, *region* equal to *across* means that the normalizing sum is performed over adjacent channels. - * **Range of values**: - * *across* - normalize sum over adjacent channels - * *same* - normalize sum over nearby spatial locations - * **Type**: string - * **Default value**: `across` - * **Required**: *yes* - -* **Parameter name**: *local-size* - - * **Description**: *local-size* represents the side length of the region to be used for the normalization sum or number of channels depending on the strategy specified in the *region* parameter. For example, *local-size* equal to 5 for the *across* strategy means application of sum across 5 adjacent channels. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -**Inputs** - -* **1**: 4D input blob. Required. - -**Mathematical Formulation** - -\f[o_{i} = \left( 1 + \left( \frac{\alpha}{n} \right)\sum_{i}x_{i}^{2} \right)^{\beta}\f] -Where \f$n\f$ is the size of each local region. - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## Normalize Layer -Back to top - -**Name**: *Normalize* - -**Category**: *Normalization* - -**Short description**: *Normalize* layer performs l-p normalization of 1 of input blob. - -**Parameters**: *Normalize* layer parameters should be specified as the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *across_spatial* - - * **Description**: *across_spatial* is a flag that specifies if normalization is performed over CHW or HW. For example, *across_spatial* equal to 0 means that normalization is not shared across channels. - * **Range of values**: - * 0 - do not share normalization across channels - * 1 - not supported - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - -* **Parameter name**: *channel_shared* - - * **Description**: *channel_shared* is a flag that specifies if scale parameters are shared across channels. For example, *channel_shared* equal to 0 means that scale parameters are not shared across channels. - * **Range of values**: - * 0 - do not share scale parameters across channels - * 1 - not supported - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - -* **Parameter name**: *eps* - - * **Description**: *eps* is the number to be added to the variance to avoid division by zero when normalizing the value. For example, *eps* equal to 0.001 means that 0.001 is used if all the values in normalization are equal to zero. - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *yes* - -**Inputs** - -* **1**: 2D, 3D or 4D input blob. Required. - -**Mathematical Formulation** - -\f[ -o_{i} = \sum_{i}^{H*W}\frac{\left ( n*C*H*W \right )` scale}{\sqrt{\sum_{i=0}^{C*H*W}\left ( n*C*H*W \right )^{2}}} -\f] - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## OneHot Layer -Back to top - -**Name**: *OneHot* - -**Category**: *Layer* - -**Short description**: *OneHot* layer fills the locations represented by indices specified in input with the value of *on_value* and fills all other locations with the value of *off_value*. If an index is out of range, the corresponding element is also filled with the *off_value*. - -**Detailed description**: [Reference](https://www.tensorflow.org/api_docs/python/tf/one_hot) - -**Parameters**: *OneHot* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *axis* - - * **Description**: *axis* is a new axis position in the output shape to fill with one-hot values. - * **Range of values**: an integer. Negative value means counting dimension from the end. - * **Type**: `int` - * **Default value**: -1 - * **Required**: *no* - -* **Parameter name**: *depth* - - * **Description**: *depth* is depth of a new one-hot dimension. - * **Range of values**: a non-negative integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *on_value* - - * **Description**: *on_value* is the value that the locations represented by indices in input take. - * **Range of values**: a floating-point number. - * **Type**: `float` - * **Default value**: 1.0 - * **Required**: *no* - -* **Parameter name**: *off_value* - - * **Description**: *off_value* is the value that the locations not represented by indices in input take. - * **Range of values**: a floating-point number. - * **Type**: `float` - * **Default value**: 0.0 - * **Required**: *no* - -**Inputs**: - -* **1**: Multidimensional input tensor with indices of type T (can be 0D). Required. - -**Outputs**: - -* **1** Multidimensional output tensor. If the input indices have rank N, the output will have rank N+1. - A new axis of the size `depth` is created at the dimension `axis`. - -**Examples** - -```xml - - - - - 3 - - - - - 3 - 3 - - - -``` - -* * * - -## Pad Layer -Back to top - -**Name**: *Pad* - -**Category**: *Layer* - -**Short description**: *Pad* layer extends an input blob on edges. New element values are generated based on the *Pad* layer parameters described below. - -**Parameters**: *Pad* layer parameters are specified in the `data` section, which is a child of the `layer` node. The parameters specify a number of elements to add along each axis and a rule by which new element values are generated: for example, whether they are filled with a given constant or generated based on the input blob content. - -* **Parameter name**: *pads_begin* - - * **Description**: *pads_begin* specifies the number of padding elements at the beginning of each axis. - * **Range of values**: a list of non-negative integers. The length of the list must be equal to the number of dimensions in the input blob. - * **Type**: `int[]` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *pads_end* - - * **Description**: *pads_end* specifies the number of padding elements at the end of each axis. - * **Range of values**: a list of non-negative integers. The length of the list must be equal to the number of dimensions in the input blob. - * **Type**: `int[]` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *pad_mode* - - * **Description**: *pad_mode* specifies the method used to generate new element values. - * **Range of values**: Name of the method in string format: - * `constant` - padded values are equal to the value of the *pad_value* layer parameter. - * `edge` - padded values are copied from the respective edge of the input blob. - * `reflect` - padded values are a reflection of the input blob; values on the edges are not duplicated. `pads_begin[D]` and `pads_end[D]` must be not greater than `input.shape[D] – 1` for any valid `D`. - * `symmetric` - padded values are symmetrically added from the input blob. This method is similar to the `reflect`, but values on edges are duplicated. Refer to the examples below for more details. `pads_begin[D]` and `pads_end[D]` must be not greater than `input.shape[D]` for any valid `D`. - * **Type**: string - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *pad_value* - - * **Description**: Use with the `pad_mode = "constant"` only. All new elements are filled with this value. - * **Range of values**: a floating-point number - * **Type**: `float` - * **Default value**: 0.0 - * **Required**: *no* - -**Inputs** - -* **1**: Multidimensional input blob. Required. - - -**Outputs** - -* **1**: Multidimensional input blob with dimensions `pads_begin[D] + input.shape[D] + pads_end[D]` for each `D` from `0` to `len(input.shape) - 1`. - - -**pad_mode Examples** - -The following examples illustrate how output blob is generated for the *Pad* layer for a given input blob: -``` -INPUT = -[[ 1 2 3 4 ] -[ 5 6 7 8 ] -[ 9 10 11 12 ]] -``` -with the following parameters: -``` -pads_begin = [0, 1] -pads_end = [2, 3] -``` -depending on the *pad_mode*. -* `pad_mode = "constant"`: -``` -OUTPUT = -[[ 0 1 2 3 4 0 0 0 ] -[ 0 5 6 7 8 0 0 0 ] -[ 0 9 10 11 12 0 0 0 ] -[ 0 0 0 0 0 0 0 0 ] -[ 0 0 0 0 0 0 0 0 ]] -``` -* `pad_mode = "edge"`: -``` -OUTPUT = -[[ 1 1 2 3 4 4 4 4 ] -[ 5 5 6 7 8 8 8 8 ] -[ 9 9 10 11 12 12 12 12 ] -[ 9 9 10 11 12 12 12 12 ] -[ 9 9 10 11 12 12 12 12 ]] -``` -* `pad_mode = "reflect"`: -``` -OUTPUT = -[[ 2 1 2 3 4 3 2 1 ] -[ 6 5 6 7 8 7 6 5 ] -[ 10 9 10 11 12 11 10 9 ] -[ 6 5 6 7 8 7 6 5 ] -[ 2 1 2 3 4 3 2 1 ]] -``` -* `pad_mode = "symmetric"`: -``` -OUTPUT = -[[ 1 1 2 3 4 4 3 2 ] -[ 5 5 6 7 8 8 7 6 ] -[ 9 9 10 11 12 12 11 10 ] -[ 9 9 10 11 12 12 11 10 ] -[ 5 5 6 7 8 8 7 6 ]] -``` - -**Example** - -```xml - - - - - 1 - 3 - 32 - 40 - - - - - 2 - 8 - 37 - 48 - - - -``` - -* * * - -## Permute Layer -Back to top - -**Name**: *Permute* - -**Category**: *Layer* - -**Short description**: *Permute* layer reorders input blob dimensions. - -**Detailed description**: [Reference](http://caffe.help/manual/layers/tile.html) - -**Parameters**: *Permute* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *order* - - * **Description**: *order* is a list of dimensions indexes for output blob. For example, *order* equal to "0,2,3,1" means that the output blob has the following dimensions: the first dimension from the input blob, the third dimension from the input blob, the fourth dimension from the input blob, the second dimension from the input blob. - * **Range of values**: a list of positive integers separated by comma - * **Type**: `int[]` - * **Default value**: None - * **Required**: *yes* - - -**Inputs**: - -* **1**: Multidimensional input blob. Required. - -**Mathematical Formulation** - -*Permute* layer reorders input blob dimensions. Source indexes and destination indexes are bound by the formula: -\f[ -src\_ind_{offset} = n * ordered[1] * ordered[2] * ordered[3] + (h * ordered[3] + w) -\f] -\f[ -n \in ( 0, order[0] ) -\f] -\f[ -h \in ( 0, order[2] ) -\f] -\f[ -w \in ( 0, order[3] ) -\f] - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## Pooling Layer -Back to top - -**Name**: *Pooling* - -**Category**: *Pool* - -**Short description**: [Reference](http://caffe.berkeleyvision.org/tutorial/layers/pooling.html) - -**Detailed description**: [Reference](http://cs231n.github.io/convolutional-networks/#pool) - -**Parameters**: *Pooling* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *strides* - - * **Description**: *strides* is a distance (in pixels) to slide the window on the feature map over the `(z, y, x)` axes for 3D poolings and `(y, x)` axes for 2D poolings. For example, *strides* equal to "4,2,1" means sliding the window four pixels at a time over depth dimension, two pixels over height dimension, and one pixel over width dimension. - * **Range of values**: a list of non-negative integers - * **Type**: `int[]` - * **Default value**: a list of 1 with length equal to the number of convolution kernel dimensions - * **Required**: *no* - -* **Parameter name**: *pads_begin* - - * **Description**: *pads_begin* is the number of pixels to add to the beginning along each axis. For example, *pads_begin* equal to "1,2" means adding one pixel to the top of the input and two pixels to the left of the input. - * **Range of values**: a list of non-negative integers - * **Type**: `int[]` - * **Default value**: a list of 0 with length equal to the number of convolution kernel dimensions - * **Required**: *no* - -* **Parameter name**: *pads_end* - - * **Description**: *pads_end* is the number of pixels to add to the ending along each axis. For example, *pads_end* equal "1,2" means adding one pixel to the bottom of the input and two pixels to the right of the input. - * **Range of values**: a list of non-negative integers - * **Type**: `int[]` - * **Default value**: a list of 1 with length equal to the number of convolution kernel dimensions - * **Required**: *no* - -* **Parameter name**: *kernel* - - * **Description**: *kernel* is a size of each filter. For example, *kernel* equal to "2,3" means that each filter has height equal to 2 and width equal to 3. - * **Range of values**: a list of positive integers - * **Type**: `int[]` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *pool-method* - - * **Description**: *pool-method* is a type of pooling strategy for values. - * **Range of values**: - * *max* - choose the biggest value in a feature map for each window position - * *avg* - take the average value in a feature map for each windows position - * **Type**: string - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *exclude-pad* - - * **Description**: *exclude-pad* is a flag that specifies whether to ignore zeros in a padding area. For example, *exclude-pad* equal to *true* means that zero values in the padding are not used. - * **Range of values**: *true* or *false* - * **Type**: string - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *rounding_type* - - * **Description**: *rounding_type* is a type of rounding to apply. - * **Range of values**: - * *ceil* - * *floor* - * **Type**: string - * **Default value**: *floor* - * **Required**: *no* - -* **Parameter name**: *auto_pad* - - * **Description**: *auto_pad* specifies how to calculate padding. - * **Range of values**: - * Not specified: use explicit padding values - * *same_upper/same_lower*: the input is padded to match the output size. In case of odd padding value, an extra padding is added at the end (at the beginning). - * *valid*: do not use padding - * **Type**: string - * **Default value**: None - * **Required**: *no* - -**Inputs**: - -* **1**: 4D or 5D input blob. Required. - -**Mathematical Formulation** - -* For `pool-method="max"`: - \f[ - output_{j} = MAX\{ x_{0}, ... x_{i}\} - \f] -* For `pool-method="avg"`: - \f[ - output_{j} = \frac{\sum_{i = 0}^{n}x_{i}}{n} - \f] - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## Power Layer -Back to top - -**Name**: *Power* - -**Category**: *Layer* - -**Short description**: *Power* layer computes the output as `(shift + scale * x) ^ power` for each input element `x`. - -**Parameters**: *Power* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *power* - - * **Description**: *power* is a parameter in the formula described above. - * **Range of values**: a floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *scale* - - * **Description**: *scale* is a parameter in the formula described above. - * **Range of values**: a floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *shift* - - * **Description**: *shift* is a parameter in the formula described above. - * **Range of values**: a floating-point number - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -**Inputs**: - -* **1**: Multidimensional input blob. Required. - -**Mathematical Formulation** - -\f[ -p = (shift + scale * x)^{power} -\f] - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## PReLU Layer -Back to top - -**Name**: *PReLU* - -**Category**: *Activation* - -**Short description**: *PReLU* is the Parametric Rectifier Linear Unit. The difference from *ReLU* is that negative slopes can vary across channels. - -**Parameters**: *PReLU* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *channel_shared* - - * **Description**: *channel_shared* specifies whether a negative slope is shared across channels or not. If the *channel_shared* is equal to 0, the slope shape is equal to the number of channels, if the *channel_shared* is equal to 1, the slope is scalar. - * **Range of values**: 0 or 1 - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - -**Inputs**: - -* **1**: 4D or 5D input blob. Required. - - -**Mathematical Formulation** - -*PReLU* accepts one input with four dimensions. The produced blob has the same dimensions as input. -*PReLU* does the following with the input blob: -\f[ -o_{i} = max(0, x_{i}) + w_{i} * min(0,x_{i}) -\f] -where \f$w_{i}\f$ is from weights blob. - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## PriorBox Layer -Back to top - -**Name**: *PriorBox* - -**Category**: *Layer* - -**Short description**: *PriorBox* layer generates prior boxes of specified sizes and aspect ratios across all dimensions. - -**Parameters**: *PriorBox* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *min_size* - - * **Description**: *min_size* is the minimum box size (in pixels). For example, *min_size* equal to `[15.0]` means that the minimum box size is 15.0. - * **Range of values**: a list of positive floating-point numbers - * **Type**: `float[]` - * **Default value**: `[]` - * **Required**: *yes* - -* **Parameter name**: *max_size* - - * **Description**: *max_size* is the maximum box size (in pixels). For example, *max_size* equal to `[15.0]` means that the maximum box size is 15.0. - * **Range of values**: a list of positive floating-point numbers - * **Type**: `float[]` - * **Default value**: `[]` - * **Required**: *yes* - -* **Parameter name**: *aspect_ratio* - - * **Description**: *aspect_ratio* is a variance of aspect ratios. Duplicate values are ignored. For example, *aspect_ratio* equal to "[2.0,3.0]" means that for the first box, aspect ratio is 2.0, for the second box, it is 3.0. - * **Range of values**: a list of positive floating-point numbers - * **Type**: `float[]` - * **Default value**: `[]` - * **Required**: *no* - -* **Parameter name**: *flip* - - * **Description**: *flip* is a flag that specifies whether each *aspect_ratio* is duplicated and flipped. For example, *flip* equal to 1 and *aspect_ratio* equal to "4.0,2.0" mean that *aspect_ratio* is equal to "4.0,2.0,0.25,0.5". - * **Range of values**: - * 0 - flip each *aspect_ratio* - * 1 - do not flip each *aspect_ratio* - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *clip* - - * **Description**: *clip* is a flag that specifies if each value in the output blob is clipped to *[0,1]* interval. - * **Range of values**: - * 0 - do not perform clipping - * 1 - clip each value in the output blob to *[0,1]* interval - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *step* - - * **Description**: *step* is the distance between box centers. For example, *step* equal to `85.0` means that the distance between neighborhood prior boxes centers is 85.0. - * **Range of values**: a non-negative floating-point number - * **Type**: `float` - * **Default value**: 0.0 - * **Required**: *yes* - -* **Parameter name**: *offset* - - * **Description**: *offset* is a shift of box respectively to top left corner. For example, *offset* equal to `85.0` means that the shift of neighborhood prior boxes centers is 85.0. - * **Range of values**: a non-negative floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *variance* - - * **Description**: *variance* is the variance of adjusting bounding boxes. The parameter can contain 0, 1 or 4 elements. - * **Range of values**: a list of positive floating-point numbers - * **Type**: `float[]` - * **Default value**: `[]` - * **Required**: *yes* - -* **Parameter name**: *scale_all_sizes* - - * **Description**: *scale_all_sizes* is a flag that specifies the type of inference. For example, *scale_all_sizes* equal to 0 means that the *PriorBox* layer is inferred in MXNet-like manner, which means that the *max_size* parameter is ignored. - * **Range of values**: - * 0 - do not use *max_size* - * 1 - use *max_size* - * **Type**: `int` - * **Default value**: 1 - * **Required**: *no* - -* **Parameter name**: *fixed_ratio* - - * **Description**: *fixed_ratio* is an aspect ratio of a box. For example, *fixed_ratio* equal to 2.000000 means that the aspect ratio for the first box aspect ratio is 2. - * **Range of values**: a list of positive floating-point numbers - * **Type**: `float[]` - * **Default value**: None - * **Required**: *no* - -* **Parameter name**: *fixed_size* - - * **Description**: *fixed_size* is an initial box size (in pixels). For example, *fixed_size* equal to 15 means that the initial box size is 15. - * **Range of values**: a list of positive floating-point numbers - * **Type**: `float[]` - * **Default value**: None - * **Required**: *no* - -* **Parameter name**: *density* - - * **Description**: *density* is the square root of the number of boxes of each type. For example, *density* equal to 2 means that the first box generates four boxes of the same size and with the same shifted centers. - * **Range of values**: a list of positive floating-point numbers - * **Type**: `float[]` - * **Default value**: None - * **Required**: *no* - -**Inputs**: - -* **1**: 4D input blob. Used to get height and width only. Required. - -* **2**: 4D input blob. Used to get image height and image width only. Required. - -**Mathematical Formulation**: - -*PriorBox* computes coordinates of prior boxes as follows: -1. Calculates *center_x* and *center_y* of prior box: - \f[ - W \equiv Width \quad Of \quad Image - \f] - \f[ - H \equiv Height \quad Of \quad Image - \f] - * If step equals 0: - \f[ - center_x=(w+0.5) - \f] - \f[ - center_y=(h+0.5) - \f] - * else: - \f[ - center_x=(w+offset)`step - \f] - \f[ - center_y=(h+offset)`step - \f] - \f[ - w \subset \left( 0, W \right ) - \f] - \f[ - h \subset \left( 0, H \right ) - \f] -2. For each \f$ s \subset \left( 0, min_sizes \right ) \f$, calculates coordinates of prior boxes: - \f[ - xmin = \frac{\frac{center_x - s}{2}}{W} - \f] - \f[ - ymin = \frac{\frac{center_y - s}{2}}{H} - \f] - \f[ - xmax = \frac{\frac{center_x + s}{2}}{W} - \f] - \f[ - ymax = \frac{\frac{center_y + s}{2}}{H} - \f] - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## PriorBoxClustered Layer -Back to top - -**Name**: *PriorBoxClustered* - -**Category**: *Layer* - -**Short description**: *PriorBoxClustered* layer generates prior boxes of specified sizes normalized to the input image size. - -**Parameters**: *PriorBoxClustered* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *width* - - * **Description**: *width* specifies desired boxes widths in pixels. - * **Range of values**: a list of positive floating-point numbers - * **Type**: `float[]` - * **Default value**: `[]` - * **Required**: *yes* - -* **Parameter name**: *height* - - * **Description**: *height* specifies desired boxes heights in pixels. - * **Range of values**: positive floating-point numbers - * **Type**: `float[]` - * **Default value**: `[]` - * **Required**: *yes* - -* **Parameter name**: *clip* - - * **Description**: *clip* is a flag that specifies if each value in the output blob is clipped within *[0,1]*. - * **Range of values**: - * 0 - do not perform clipping - * 1 - clip each value in the output blob to *[0,1]* - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *step (step_w, step_h)* - - * **Description**: *step (step_w, step_h)* is the distance between box centers. For example, *step* equal to 85.0 means that the distance between neighborhood prior boxes centers is 85.0. If both *step_h* and *step_w* are 0.0, they are updated with value of *step*. If after that they are still 0.0, they are calculated as input image heights/width divided by the first input heights/width. - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: 0.0 - * **Required**: *yes* - -* **Parameter name**: *offset* - - * **Description**: *offset* is a shift of box respectively to top left corner. For example, *offset* equal to 85.0 means that the shift of neighborhood prior boxes centers is 85.0. - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *variance* - - * **Description**: *variance* is the variance of adjusting bounding boxes. - * **Range of values**: a list of positive floating-point numbers - * **Type**: `float[]` - * **Default value**: `[]` - * **Required**: *yes* - -* **Parameter name**: *img_h* - - * **Description**: *img_h* is the height of input image. It is calculated as the second input height unless provided explicitly. - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: 1.0 - * **Required**: *yes* - -* **Parameter name**: *img_w* - - * **Description**: *img_w* is the width of input image. It is calculated as second input width unless provided explicitly. - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: 1.0 - * **Required**: *yes* - -**Inputs**: - -* **1**: 4D input blob. Used to get height and width only. Required. - -* **2**: 4D input blob. Used to get image height and image width only. Required. - -**Mathematical Formulation** - -*PriorBoxClustered* computes coordinates of prior boxes as follows: -1. Calculates the *center_x* and *center_y* of prior box: - \f[ - W \equiv Width \quad Of \quad Image - \f] - \f[ - H \equiv Height \quad Of \quad Image - \f] - \f[ - center_x=(w+offset)`step - \f] - \f[ - center_y=(h+offset)`step - \f] - \f[ - w \subset \left( 0, W \right ) - \f] - \f[ - h \subset \left( 0, H \right ) - \f] -2. For each \f$s \subset \left( 0, W \right )\f$, calculates the prior boxes coordinates: - \f[ - xmin = \frac{center_x - \frac{width_s}{2}}{W} - \f] - \f[ - ymin = \frac{center_y - \frac{height_s}{2}}{H} - \f] - \f[ - xmax = \frac{center_x - \frac{width_s}{2}}{W} - \f] - \f[ - ymax = \frac{center_y - \frac{height_s}{2}}{H} - \f] -If *clip* is defined, the coordinates of prior boxes are recalculated with the formula: -\f$coordinate = \min(\max(coordinate,0), 1)\f$ - -**Example** - -```xml - - - - ... - - - ... - - -``` - -* * * - -## Proposal Layer -Back to top - -**Name**: *Proposal* - -**Category**: *Layer* - -**Short description**: *Proposal* layer filters bounding boxes and outputs only those with the highest prediction confidence. - -**Parameters**: *Proposal* layer parameters are specified in the `data` node, which is a child of the `layer` node. The layer has three inputs: a blob with probabilities whether particular bounding box corresponds to background and foreground, a blob with logits for each of the bounding boxes, a blob with input image size in the [`image_height`, `image_width`, `scale_height_and_width`] or [`image_height`, `image_width`, `scale_height`, `scale_width`] format. - -* **Parameter name**: *base_size* - - * **Description**: *base_size* is the size of the anchor to which *scale* and *ratio* parameters are applied. - * **Range of values**: a positive integer number - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *pre_nms_topn* - - * **Description**: *pre_nms_topn* is the number of bounding boxes before the NMS operation. For example, *pre_nms_topn* equal to 15 means that the minimum box size is 15. - * **Range of values**: a positive integer number - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *post_nms_topn* - - * **Description**: *post_nms_topn* is the number of bounding boxes after the NMS operation. For example, *post_nms_topn* equal to 15 means that the maximum box size is 15. - * **Range of values**: a positive integer number - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *nms_thresh* - - * **Description**: *nms_thresh* is the minimum value of the proposal to be taken into consideration. For example, *nms_thresh* equal to 0.5 means that all boxes with prediction probability less than 0.5 are filtered out. - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *feat_stride* - - * **Description**: *feat_stride* is the step size to slide over boxes (in pixels). For example, *feat_stride* equal to 16 means that all boxes are analyzed with the slide 16. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *min_size* - - * **Description**: *min_size* is the minimum size of box to be taken into consideration. For example, *min_size* equal 35 means that all boxes with box size less than 35 are filtered out. - * **Range of values**: a positive integer number - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *ratio* - - * **Description**: *ratio* is the ratios for anchor generation. - * **Range of values**: a list of floating-point numbers - * **Type**: `float[]` - * **Default value**: `[]` - * **Required**: *yes* - -* **Parameter name**: *scale* - - * **Description**: *scale* is the scales for anchor generation. - * **Range of values**: a list of floating-point numbers - * **Type**: `float[]` - * **Default value**: `[]` - * **Required**: *yes* - -* **Parameter name**: *clip_before_nms* - - * **Description**: *clip_before_nms* flag that specifies whether to perform clip bounding boxes before non-maximum suppression or not. - * **Range of values**: 0 or 1 - * **Type**: `int` - * **Default value**: 1 - * **Required**: *no* - -* **Parameter name**: *clip_after_nms* - - * **Description**: *clip_after_nms* is a flag that specifies whether to perform clip bounding boxes after non-maximum suppression or not. - * **Range of values**: 0 or 1 - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - -* **Parameter name**: *normalize* - - * **Description**: *normalize* is a flag that specifies whether to perform normalization of output boxes to *[0,1]* interval or not. - * **Range of values**: 0 or 1 - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - -* **Parameter name**: *box_size_scale* - - * **Description**: *box_size_scale* specifies the scale factor applied to logits of box sizes before decoding. - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: 1.0 - * **Required**: *no* - -* **Parameter name**: *box_coordinate_scale* - - * **Description**: *box_coordinate_scale* specifies the scale factor applied to logits of box coordinates before decoding. - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: 1.0 - * **Required**: *no* - -* **Parameter name**: *framework* - - * **Description**: *framework* specifies how the box coordinates are calculated. - * **Range of values**: - * "" (empty string) - calculate box coordinates like in Caffe* - * *tensorflow* - calculate box coordinates like in the TensorFlow* Object Detection API models - * **Type**: string - * **Default value**: "" (empty string) - * **Required**: *no* - -* **Parameter name**: *for_deformable* - - * **Description**: *for_deformable* specifies how the box coordinates are calculated. - * **Range of values**: 0 or 1 - * **Type**: int - * **Default value**: 0 - * **Required**: *no* - -**Mathematical Formulation** - -*Proposal* layer accepts three inputs with four dimensions. The produced blob has two dimensions: the first one equals `batch_size * post_nms_topn`. -*Proposal* layer does the following with the input blob: -1. Generates initial anchor boxes. Left top corner of all boxes is at (0, 0). Width and height of boxes are calculated from *base_size* with *scale* and *ratio* parameters. -2. For each point in the first input blob: - * pins anchor boxes to the image according to the second input blob that contains four deltas for each box: for *x* and *y* of center, for *width* and for *height* - * finds out score in the first input blob -3. Filters out boxes with size less than *min_size* -4. Sorts all proposals (*box*, *score*) by score from highest to lowest -5. Takes top *pre_nms_topn* proposals -6. Calculates intersections for boxes and filter out all boxes with \f$intersection/union > nms\_thresh\f$ -7. Takes top *post_nms_topn* proposals -8. Returns top proposals - -**Inputs**: - -* **1**: 4D input blob with class prediction scores. Required. - -* **2**: 4D input blob with box logits. Required. - -* **3**: 1D input blob 3 or 4 elements: [image height, image width, scale for image height/width OR scale for image height and scale for image width]. Required. - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## PSROIPooling Layer -Back to top - -**Name**: *PSROIPooling* - -**Category**: *Pool* - -**Short description**: *PSROIPooling* layer compute position-sensitive pooling on regions of interest specified by input. - -**Detailed description**: [Reference](https://arxiv.org/pdf/1703.06211.pdf) - -**Parameters**: *PSRoiPooling* layer parameters are specified in the `data` node, which is a child of the `layer` node. *PSROIPooling* layer takes two input blobs: with feature maps and with regions of interests (box coordinates). The latter is specified as five element tuples: *[batch_id, x_1, y_1, x_2, y_2]*. ROIs coordinates are specified in absolute values for the average mode and in normalized values (to *[0,1]* interval) for bilinear interpolation. - -* **Parameter name**: *output_dim* - - * **Description**: *output_dim* is a pooled output channel number. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *group_size* - - * **Description**: *group_size* is the number of groups to encode position-sensitive score maps. Use for *average* mode only. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: 1 - * **Required**: *no* - -* **Parameter name**: *spatial_scale* - - * **Description**: *spatial_scale* is a multiplicative spatial scale factor to translate ROI coordinates from their input scale to the scale used when pooling. - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *mode* - * **Description**: *mode* specifies mode for pooling. - * **Range of values**: - * *average* - perform average pooling - * *bilinear* - perform pooling with bilinear interpolation - * **Type**: string - * **Default value**: *average* - * **Required**: *yes* - -* **Parameter name**: *spatial_bins_x* - * **Description**: *spatial_bins_x* specifies numbers of bins to divide the input feature maps over width. Used for "bilinear" mode only. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: 1 - * **Required**: *no* - -* **Parameter name**: *spatial_bins_y* - * **Description**: *spatial_bins_y* specifies numbers of bins to divide the input feature maps over height. Used for *bilinear* mode only. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: 1 - * **Required**: *no* - -**Inputs**: - -* **1**: 4D input blob with feature maps. Required. - -* **2**: 2D input blob describing box consisting of five element tuples: `[batch_id, x_1, y_1, x_2, y_2]`. Required. - -**Example** - -```xml - - - - - 1 - 3240 - 38 - 38 - - - 100 - 5 - - - - - 100 - 360 - 6 - 6 - - - -``` - -* * * - -## FakeQuantize Layer -Back to top - -**Name**: *FakeQuantize* - -**Category**: *Layer* - -**Short description**: *FakeQuantize* layer is element-wise linear quantization of floating-point input values into a discrete set of floating-point values. - -**Detailed description**: Input and output ranges as well as the number of levels of quantization are specified by dedicated inputs and attributes. There can be different limits for each element or groups of elements (channels) of the input blobs. Otherwise, one limit applies to all elements. It depends on shape of inputs that specify limits and regular broadcasting rules applied for input blobs. The output of the operator is a floating-point number of the same type as the input blob. In general, there are four values that specify quantization for each element: *input_low*, *input_high*, *output_low*, *output_high*. *input_low* and *input_high* parameters specify the input range of quantization. All input values that are outside this range are clipped to the range before actual quantization. *output_low* and *output_high* specify minimum and maximum quantized values at the output. - -**Parameters**: *Quantize* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *levels* - - * **Description**: *levels* is the number of quantization levels. - * **Range of values**: an integer greater than or equal to 2 - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -**Inputs**: - -* **1**: `X` - multidimensional input blob to quantize. Required. - -* **2**: `input_low` - minimum limit for input value. The shape must be broadcastable to the shape of `X`. Required. - -* **3**: `input_high` - maximum limit for input value. Can be the same as `input_low` for binarization. The shape must be broadcastable to the shape of `X`. Required. - -* **4**: `output_low` - minimum quantized value. The shape must be broadcastable to the shape of `X`. Required. - -* **5**: `output_high` - maximum quantized value. The shape must be broadcastable to the of `X`. Required. - -**Mathematical Formulation** - -Each element of the output is defined as the result of the following expression: - -```python -if x <= input_low: - output = output_low -elif x > input_high: - output = output_high -else: - # input_low < x <= input_high - output = round((x - input_low) / (input_high - input_low) * (levels-1)) / (levels-1) * (output_high - output_low) + output_low -``` - -**Example** -```xml - - - - - 1 - 64 - 56 - 56 - - - 1 - 64 - 1 - 1 - - - 1 - 64 - 1 - 1 - - - 1 - 1 - 1 - 1 - - - 1 - 1 - 1 - 1 - - - - - 1 - 64 - 56 - 56 - - - -``` - -* * * - -## Range Layer -Back to top - -**Name**: *Range* - -**Category**: *Layer* - -**Short description**: *Range* sequence of numbers according input values. - -**Detailed description**: *Range* layers generates a sequence of numbers starting from the value in the first input up to but not including the value in the second input with a step equal to the value in the third input. - -**Parameters**: *Range* layer does not have parameters. - -**Inputs**: - -* **1**: 0D blob (constant) with the start value of the range. Required. - -* **2**: 0D blob (constant) with the limit value of the range. Required. - -* **3**: 0D blob (constant) with the step value. Required. - -**Example** - -```xml - - - - - - - - - 10 - - - -``` - -* * * - -## RegionYolo Layer -Back to top - -**Name**: *RegionYolo* - -**Category**: *Layer* - -**Short description**: *RegionYolo* computes the coordinates of regions with probability for each class. - -**Detailed description**: [Reference](https://arxiv.org/pdf/1612.08242.pdf) - -**Parameters**: *RegionYolo* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *coords* - - * **Description**: *coords* is the number of coordinates for each region. - * **Range of values**: an integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *classes* - - * **Description**: *classes* is the number of classes for each region. - * **Range of values**: an integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *num* - - * **Description**: *num* is the number of regions. - * **Range of values**: an integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *do_softmax* - - * **Description**: *do_softmax* is a flag that specifies the inference method and affects how the number of regions is determined. - * **Range of values**: - * *0* - do not perform softmax - * *1* - perform softmax - * **Type**: `int` - * **Default value**: 1 - * **Required**: *no* - -* **Parameter name**: *mask* - - * **Description**: *mask* specifies the number of regions. Use this parameter instead of *num* when *do_softmax* is equal to 0. - * **Range of values**: a list of integers - * **Type**: `int[]` - * **Default value**: `[]` - * **Required**: *no* - -**Inputs**: - -* **1**: 4D input blob. Required. - -**Example** - -```xml - - - ... - ... - - -``` - -* * * - -## ReLU Layer -Back to top - -**Name**: *ReLU* - -**Category**: *Activation* - -**Short description**: [Reference](http://caffe.berkeleyvision.org/tutorial/layers/relu.html) - -**Detailed description**: [Reference](https://github.com/Kulbear/deep-learning-nano-foundation/wiki/ReLU-and-Softmax-Activation-Functions#rectified-linear-units) - -**Parameters**: *ReLU* layer parameters are specified parameters in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *negative_slope* - - * **Description**: *negative_slope* is a multiplier, which is used if the unit is not active (that is, negative). For example, *negative_slope* equal to 0.1 means that an inactive unit value would be multiplied by 0.1 and this is the [Leaky ReLU](https://keras.io/layers/advanced-activations/#leakyrelu). If *negative_slope* is equal to 0.0, this is the usual *ReLU*. - * **Range of values**: a non-negative floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *no* - -**Mathematical Formulation** - -\f[ -Y_{i}^{( l )} = max(0, Y_{i}^{( l - 1 )}) -\f] - -**Inputs**: - -* **1**: Multidimensional input blob. Required. - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## ReorgYolo Layer -Back to top - -**Name**: *ReorgYolo* - -**Category**: *Layer* - -**Short description**: *ReorgYolo* reorganizes input blob taking into account strides. - -**Detailed description**: [Reference](https://arxiv.org/pdf/1612.08242.pdf) - -**Parameters**: *ReorgYolo* layer parameters are specified parameters in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *stride* - - * **Description**: *stride* is the distance between cut throws in output blobs. - * **Range of values**: an integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -**Inputs**: - -* **1**: 4D input blob. Required. - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## Resample (Type 1) Layer -Back to top - -**Name**: *Resample* - -**Category**: *Layer* - -**Short description**: *Resample* layer scales the input blob by the specified parameters. - -**Parameters**: *Resample* layer parameters are specified in the `data` node, which is a child of the `layer` node. *Resample* **Type 1** layer has one input blob containing image to resample. - -* **Parameter name**: *type* - - * **Description**: *type* parameter specifies the type of blob interpolation. - * **Range of values**: - * *caffe.ResampleParameter.LINEAR* - linear blob interpolation - * *caffe.ResampleParameter.NEAREST* - nearest-neighbor blob interpolation - * **Type**: string - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *antialias* - - * **Description**: *antialias* is a flag that specifies whether to perform anti-aliasing. - * **Range of values**: - * 0 - do not perform anti-aliasing - * 1 - perform anti-aliasing - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - -* **Parameter name**: *factor* - - * **Description**: *factor* specifies a scale factor for output height and width. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -**Inputs**: - -* **1**: 4D input blob. Required. - -**Example** - -```xml - - - - - 1 - 3 - 25 - 30 - - - - - 1 - 3 - 50 - 60 - - -​ -``` - -* * * - -## Resample (Type 2) Layer -Back to top - -**Name**: *Resample* - -**Category**: *Layer* - -**Short description**: *Resample* layer scales the input blob by the specified parameters. - -**Parameters**: *Resample* layer parameters are specified in the `data` node, which is a child of the `layer` node. *Resample* **Type 2** layer has two input blobs containing image to resample and output dimensions. - -* **Parameter name**: *type* - - * **Description**: *type* parameter specifies the type of blob interpolation. - * **Range of values**: - * *caffe.ResampleParameter.LINEAR* - linear blob interpolation - * *caffe.ResampleParameter.NEAREST* - nearest-neighbor blob interpolation - * **Type**: string - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *antialias* - - * **Description**: *antialias* is a flag that specifies whether to perform anti-aliasing. - * **Range of values**: - * 0 - do not perform anti-aliasing - * 1 - perform anti-aliasing - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - -* **Parameter name**: *factor* - - * **Description**: *factor* parameter is ignored in the *Resample* **Type 2**. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -**Inputs**: - -* **1**: 4D input blob. Required. - -* **2**: 1D blob describing output shape. Required. - -**Example** - -```xml - - - - - 1 - 3 - 25 - 30 - - - 4 - - - - - 1 - 3 - 50 - 60 - - -​ -``` - -* * * - -## Reshape Layer -Back to top - -**Name**: *Reshape* - -**Category**: *Layer* - -**Short description**: *Reshape* layer changes dimensions of the input blob according to the specified order. Input blob volume is equal to output blob volume, where volume is the product of dimensions. - -**Detailed description**: [Reference](http://caffe.berkeleyvision.org/tutorial/layers/reshape.html) - -**Parameters**: *Reshape* layer does not have parameters. *Reshape* layer takes two input blobs: the blob to be resized and the output blob shape. The values in the second blob can be -1, 0 and any positive integer number. The two special values -1 and 0: - * 0 means copying the respective dimension of the input blob. - * -1 means that this dimension is calculated to keep the overall elements count the same as in the input blob. No more than one `-1` can be used in a reshape operation. - -**Inputs**: - -* **1**: Multidimensional input blob. Required. - -* **2**: 1D blob describing output shape. Required. - -**Example** - -```xml - - - - 2 - 5 - 5 - 24 - - - 3 - - - - - 2 - 150 - 4 - - - -``` - -* * * - -## ReverseSequence Layer -Back to top - -**Name**: *ReverseSequence* - -**Category**: *Layer* - -**Short description**: *ReverseSequence* reverses variable length slices of data. - -**Detailed description**: *ReverseSequence* slices input along the dimension specified in the *batch_axis*, and for each slice *i*, reverses the first *lengths[i]* (the second input) elements along the dimension specified in the *seq_axis*. - -**Parameters**: *ReverseSequence* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *batch_axis* - - * **Description**: *batch_axis* is the index of the batch dimension. - * **Range of values**: an integer. Can be negative. - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - -* **Parameter name**: *seq_axis* - - * **Description**: *seq_axis* is the index of the sequence dimension. - * **Range of values**: an integer. Can be negative. - * **Type**: `int` - * **Default value**: 1 - * **Required**: *no* - -**Inputs**: - -* **1**: Blob with input data to reverse. Required. - -* **2**: 1D blob with sequence lengths in the first input blob. Required. - -**Example** - -```xml - - - - - 3 - 10 - 100 - 200 - - - 10 - - - - - 3 - 10 - 100 - 200 - - - -``` - -* * * -## RNNCell Layer -Back to top - -**Name**: *RNNCell* - -**Category**: *Layer* - -**Short description**: *RNNCell* layer computes the output using the formula described in the [article](https://hackernoon.com/understanding-architecture-of-lstm-cell-from-scratch-with-code-8da40f0b71f4). - -**Parameters**: *RNNCell* layer parameters should be specified as the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *hidden_size* - - * **Description**: *hidden_size* specifies hidden state size. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *activations* - - * **Description**: activation functions for gates - * **Range of values**: any combination of *relu*, *sigmoid*, *tanh* - * **Type**: a list of strings - * **Default value**: *sigmoid,tanh* - * **Required**: *no* - -* **Parameter name**: *activations_alpha, activations_beta* - - * **Description**: *activations_alpha, activations_beta* functions parameters - * **Range of values**: a list of floating-point numbers - * **Type**: `float[]` - * **Default value**: None - * **Required**: *no* - -* **Parameter name**: *clip* - - * **Description**: *clip* specifies value for tensor clipping to be in *[-C, C]* before activations - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *no* - -**Inputs** - -* **1**: `X` - 2D ([batch_size, input_size]) input data. Required. - -* **2**: `Hi` - 2D ([batch_size, hidden_size]) input hidden state data. Required. - -**Outputs** - -* **1**: `Ho` - 2D ([batch_size, hidden_size]) output hidden state. - -* * * - -## ROIPooling Layer -Back to top - -**Name**: *ROIPooling* - -**Category**: *Pool* - -**Short description**: *ROIPooling* is a *pooling layer* used over feature maps of non-uniform input sizes and outputs a feature map of a fixed size. - -**Detailed description**: [deepsense.io reference](https://blog.deepsense.ai/region-of-interest-pooling-explained/) - -**Parameters**: *ROIPooling* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *pooled_h* - - * **Description**: *pooled_h* is the height of the ROI output feature map. For example, *pooled_h* equal to 6 means that the height of the output of *ROIPooling* is 6. - * **Range of values**: a non-negavive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *pooled_w* - - * **Description**: *pooled_w* is the width of the ROI output feature map. For example, *pooled_w* equal to 6 means that the width of the output of *ROIPooling* is 6. - * **Range of values**: a non-negative integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *spatial_scale* - - * **Description**: *spatial_scale* is the ratio of the input feature map over the input image size. - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *method* - - * **Description**: *method* specifies a method to perform pooling. If the method is *bilinear*, the input box coordinates are normalized to the [0,1] interval. - * **Range of values**: *max* or *bilinear* - * **Type**: string - * **Default value**: *max* - * **Required**: *no* - -**Inputs**: - -* **1**: 4D input blob with feature maps. Required. - -* **2**: 2D input blob describing box consisting of 5 element tuples: [batch_id, x_1, y_1, x_2, y_2]. Required. - -**Mathematical Formulation** - -\f[ -output_{j} = MAX\{ x_{0}, ... x_{i}\} -\f] - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## ExperimentalDetectronROIFeatureExtractor Layer -Back to top - -**Name**: *ExperimentalDetectronROIFeatureExtractor* (aka *ROIAlignPyramid*) - -**Category**: *Pool* - -**Short description**: *ExperimentalDetectronROIFeatureExtractor* is the *ROIAlign* operation applied over a feature pyramid. - -**Detailed description**: *ExperimentalDetectronROIFeatureExtractor* maps input ROIs to the levels of the pyramid depending on the sizes of ROIs and parameters of the operation, and then extracts features via *ROIAlign* from corresponding pyramid levels. -For more details please see the math formulas below and the following sources: - - * [Feature Pyramid Networks for Object Detection](https://arxiv.org/pdf/1612.03144.pdf) - * [Facebook AI / detectron](https://ai.facebook.com/tools/detectron/) - * [ONNX / ROI Align](https://github.com/onnx/onnx/blob/rel-1.5.0/docs/Operators.md#RoiAlign) - * [NNEF / ROI Align](https://www.khronos.org/registry/NNEF/specs/1.0/nnef-1.0.2.html#roi-resize) - -**Parameters**: *ExperimentalDetectronROIFeatureExtractor* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *output_size* - - * **Description**: *output_size* is the width and height of the output tensor. - * **Range of values**: a positive integer number - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *sampling_ratio* - - * **Description**: *sampling_ratio* is the number of sampling points per the output value. If 0, then use adaptive number computed as `ceil(roi_width / output_width)`, and likewise for height. - * **Range of values**: a non-negative integer number - * **Type**: `int` - * **Default value**: 0 - * **Required**: *yes* - -* **Parameter name**: *pyramid_scales* - - * **Description**: *pyramid_scales* enlists `image_size / layer_size[l]` ratios for pyramid layers `l=1,...,L`, where `L` is the number of pyramid layers, and `image_size` refers to network's input image. Note that pyramid's largest layer may have smaller size than input image, e.g. `image_size` is 640 in the XML example below. - * **Range of values**: a list of positive integer numbers - * **Type**: `int[]` - * **Default value**: None - * **Required**: *yes* - -**Inputs**: - -* **0**: 2D input blob describing the rois as 4-tuples: [x1, y1, x2, y2]. Batch size is the number of rois. Coordinates *x* and *y* are `float` numbers and refer to the input *image_size*. Required. - -* **1**, ..., **L**: Pyramid of 4D input blobs with feature maps. Batch size must be 1. The number of channels must be the same for all layers of the pyramid. The layer width and height must equal to the `layer_size[l] = image_size / pyramid_scales[l]`. Required. - -**Outputs**: - -* **0**: 4D output blob. Batch size equals to number of rois. -Channels number is the same as for all images in the input pyramid. -Data type is `float`. Required. - -**Mathematical Formulation** - -*ExperimentalDetectronROIFeatureExtractor* applies the *ROIAlign* algorithm to the pyramid layers: - -* output[i, :, :, :] = ROIAlign(inputPyramid[j], rois[i]) -* j = PyramidLevelMapper(rois[i]) - -PyramidLevelMapper maps the ROI to the pyramid level using the following formula: - -* j = floor(2 + log2(sqrt(w * h) / 224) - -Here 224 is the "canonical" size, 2 is the pyramid starting level, and w, h are the ROI width and height. - -**Example** - -```xml - - - - - 100 - 4 - - - 1 - 256 - 160 - 160 - - - 1 - 256 - 80 - 80 - - - 1 - 256 - 40 - 40 - - - 1 - 256 - 20 - 20 - - - - - 100 - 256 - 14 - 14 - - - -``` - -* * * - -## ExperimentalSparseWeightedSum Layer -Back to top - -**Name**: *ExperimentalSparseWeightedSum* - -**Category**: *Layer* - -**Short description**: *ExperimentalSparseWeightedSum* extracts embedding vectors from the parameters table for each object feature value and sum up these embedding vectors multiplied by weights for each object. - -**Detailed description**: [Reference](https://www.tensorflow.org/api_docs/python/tf/nn/embedding_lookup_sparse). This is similar to *embedding_lookup_sparse* but it accepts objects with empty feature values for which it uses a default value to extract an embedding from the parameters table. In comparison with *embedding_lookup_sparse* it has a limitation to work only with two-dimensional indices tensor. - -**Inputs**: - -* **1**: 2-D tensor. Input indices of the sparse tensor. It contains with an integer type. Required. -* **2**: 1-D tensor. Input values of the sparse tensor. It contains with an integer type. Required. -* **3**: 1-D tensor. Dense shape of the sparse tensor. It contains with an integer type. Required. -* **4**: N-D tensor. The parameters table. It contains with a float type. Required. -* **5**: 0-D tensor. The default value. It contains with an integer type. Required. -* **6**: 1-D tensor. Input weights. It contains with a float type. Optional. - -**Outputs**: - -* **1**: The output tensor of resulted embedding vectors for each object. It is has a shape [batch_size, params_table_shape[1], ..., params_table_shape[-1]] where batch_size is a number of objects or a number of rows in the sparse tensor. - -* * * - -## ScaleShift Layer -Back to top - -**Name**: *ScaleShift* - -**Category**: *Layer* - -**Short description**: *ScaleShift* layer performs linear transformation of the input blobs. Weights denote a scaling parameter, biases denote a shift. - -**Parameters**: *ScaleShift* layer does not have parameters. - -**Inputs**: - -* **1**: 4D input blob. Required. - -**Mathematical Formulation** - -\f[ -o_{i} =\gamma b_{i} + \beta -\f] - -**Example** - -``` - - ... - ... - -``` - -* * * - -## Select Layer -Back to top -**Name**: *Select* - -**Category**: *Layer* - -**Short description**: *Select* layer returns a tensor filled with the elements from the second or the third input, depending on the condition (the first input) value. - -**Detailed description**: *Select* takes elements from the second (`then`) or the third (`else`) input based on a condition mask - provided in the first input (`cond`). The `cond` tensor is broadcasted to `then` and `else` tensors. The output tensor shape is equal - to the broadcasted shape of `cond`, `then`, and `else`. The behavior is similar to [numpy.where](https://docs.scipy.org/doc/numpy/reference/generated/numpy.where.html) with three parameters. - -**Parameters**: *Select* layer does not have parameters. - -**Inputs**: -* **1**: `cond` tensor with selection mask (only integer values). The tensor can be 0D. -* **2**: `then` the tensor with elements to take where condition is true. -* **3**: `else` the tensor with elements to take where condition is false. - -**Example** - -```xml - - - - 3 - 2 - - - 3 - 2 - - - 3 - 2 - - - - - 3 - 2 - - - -``` - -* * * - -## Shape Layer -Back to top - -**Name**: *Shape* - -**Category**: *Layer* - -**Short description**: *Shape* produces a blob with the input blob shape. - -**Parameters**: *Shape* layer does not have parameters. - -**Inputs**: - -* **1**: Multidimensional input blob. Required. - -**Example** - -```xml - - - - 2 - 3 - 224 - 224 - - - - - 4 - - - -``` - -* * * - -## ShuffleChannels Layer -Back to top - -**Name**: *ShuffleChannels* - -**Category**: *Layer* - -**Short description**: *ShuffleChannels* permutes data in the channel dimension of the input blob. - -**Parameters**: *ShuffleChannels* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *axis* - - * **Description**: *axis* specifies the index of a channel dimension. - * **Range of values**: a non-negative integer - * **Type**: `int` - * **Default value**: 1 - * **Required**: *No* - -* **Parameter name**: *group* - - * **Description**: *group* specifies the number of groups to split the channel dimension into. This number must evenly divide the channel dimension size. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: 1 - * **Required**: *No* - -**Inputs**: - -* **1**: 4D input blob. Required. - -**Mathematical Formulation** - -The operation is the equivalent with the following transformation of the input blob *x* of shape *[N, C, H, W]*: - -``` -x' = reshape(x, [N, group, C / group, H * W]) -x'' = transpose(x', [0, 2, 1, 3]) -y = reshape(x'', [N, C, H, W]) -``` - -where *group* is the layer parameter described above. - -**Example** - -```xml - - - - - 3 - 12 - 200 - 400 - - - - - 3 - 12 - 200 - 400 - - - -``` - -* * * - -## SimplerNMS Layer -Back to top - -**Name**: *SimplerNMS* - -**Category**: *Layer* - -**Short description**: *SimplerNMS* layer filters bounding boxes and outputs only those with the highest confidence of prediction. - -**Parameters**: *SimplerNMS* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *pre_nms_topn* - - * **Description**: *pre_nms_topn* is the number of bounding boxes before the NMS operation. For example, *pre_nms_topn* equal to 15 means that the minimum box size is 15. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *post_nms_topn* - - * **Description**: *post_nms_topn* is the quantity of bounding boxes after the NMS operation. For example, *post_nms_topn* equal to 15 means that the maximum box size is 15. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *iou_threshold* - - * **Description**: *iou_threshold* is the minimum ratio of boxes overlapping to be taken into consideration. For example, *iou_threshold* equal to 0.7 means that all boxes with overlapping ratio less than 0.7 are filtered out. - * **Range of values**: a positive floating-point number - * **Type**: `float` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *feat_stride* - - * **Description**: *feat_stride* is the step size to slide over boxes (in pixels). For example, *feat_stride* equal to 16 means that all boxes are analyzed with the slide 16. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *min_bbox_size* - - * **Description**: *min_bbox_size* is the minimum size of a box to be taken into consideration. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *scale* - - * **Description**: *scale* is for generating anchor boxes. - * **Range of values**: a list of positive floating-point numbers - * **Type**: `float[]` - * **Default value**: `[]` - * **Required**: *no* - -**Inputs**: - -* **1**: 4D input blob with class prediction scores. Required. - -* **2**: 4D input blob with box logits. Required. - -* **3**: 1D input blob 3 or 4 elements: [image height, image width, scale for image height/width OR scale for image height and scale for image width]. Required. - -**Mathematical Formulation** - -*SimplerNMS* accepts three inputs with four dimensions. Produced blob has two dimensions, the first one equals *post_nms_topn*. -*SimplerNMS* does the following with the input blob: -1. Generates initial anchor boxes. Left top corner of all boxes is (0, 0). Width and height of boxes are calculated based on scaled (according to the scale parameter) default widths and heights -2. For each point in the first input blob: - * pins anchor boxes to a picture according to the second input blob, which contains four deltas for each box: for `x` and `y` of the center, for width, and for height - * finds out score in the first input blob -3. Filters out boxes with size less than *min_bbox_size.* -4. Sorts all proposals (*box, score*) by score from highest to lowest -5. Takes top *pre_nms_topn* proposals -6. Calculates intersections for boxes and filters out all with \f$intersection/union > iou\_threshold\f$ -7. Takes top *post_nms_topn* proposals -8. Returns top proposals - -**Example** - -```xml - - - ... - ... - -``` - -* * * - -## Slice Layer -Back to top - -**Name**: *Slice* - -**Category**: *Layer* - -**Short description**: *Slice* layer splits the input blob into several pieces over the specified axis. - -**Parameters**: *Slice* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *axis* - - * **Description**: *axis* specifies the axis to split the input blob along - * **Range of values**: a non-negative integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -**Inputs**: - -* **1**: Multidimensional input blob. Required. - -**Example** - -```xml - - - - - 1 - 1048 - 14 - 14 - - - - - 1 - 1024 - 14 - 14 - - - 1 - 24 - 14 - 14 - - - -``` - -* * * - -## SoftMax Layer -Back to top - -**Name**: *SoftMax* - -**Category**: *Activation* - -**Short description**: [Reference](https://github.com/Kulbear/deep-learning-nano-foundation/wiki/ReLU-and-Softmax-Activation-Functions#softmax) - -**Detailed description**: [Reference](http://cs231n.github.io/linear-classify/#softmax) - -**Parameters**: *SoftMax* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *axis* - - * **Description**: *axis* is the axis along which the *SoftMax* is calculated. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: 1 - * **Required**: *no* - -**Mathematical Formulation** - -\f[ -y_{c} = \frac{e^{Z_{c}}}{\sum_{d=1}^{C}e^{Z_{d}}} -\f] -where \f$C\f$ is a number of classes - -**Example** - -```xml - - - ... - ... - -``` - -**Inputs**: - -* **1**: Multidimensional input blob. Required. - -* * * - -## SparseFillEmptyRows Layer -Back to top - -**Name**: *SparseFillEmptyRows* - -**Category**: *Layer* - -**Short description**: *SparseFillEmptyRows* fills empty rows in the input 2-D SparseTensor with a default value. - -**Detailed description**: [Reference](https://www.tensorflow.org/api_docs/cc/class/tensorflow/ops/sparse-fill-empty-rows) - -**Inputs**: - -* **1**: 2-D tensor. Input indices of the sparse tensor. Required. -* **2**: 1-D tensor. Input values of the sparse tensor. Required. -* **3**: 1-D tensor. Shape of the sparse tensor. Value of this input is required for the Model Optimizer. -* **4**: 0-D tensor. Default value to insert at rows missing from the input sparse tensor. Required. - -**Outputs**: - -* **1**: 2-D tensor. Indices of the filled sparse tensor. -* **2**: 1-D tensor. Values of the filled sparse tensor. -* **3**: 1-D tensor. An indicator of whether the dense row was missing in the input sparse tensor. - -* * * - -## SparseSegmentMean Layer -Back to top - -**Name**: *SparseSegmentMean* - -**Category**: *Layer* - -**Short description**: *SparseSegmentMean* computes the mean along sparse segments of a tensor. - -**Detailed description**: [Reference](https://www.tensorflow.org/api_docs/cc/class/tensorflow/ops/sparse-segment-mean) - -**Parameters**: *SparseSegmentMean* layer does not have parameters. - -**Inputs**: - -* **1**: ND tensor. Data tensor from which rows are selected for the mean operation. Required. -* **2**: 1D tensor. Tensor of rows indices selected from the first input tensor along 0 dimension. Required. -* **3**: 1D tensor. Tensor of segment IDs that rows selected for the operation belong to. Rows belonging to the same segment are summed up and divided by N, where N is a number of selected rows in a segment. This input has the same size as the second input. Values must be sorted in ascending order and can be repeated. Required. - -**Outputs**: - -* **1**: ND tensor. It has the same shape as the data tensor, except for dimension 0, which has a size equal to a size of an indices tensor. - -* * * - -## SparseSegmentSqrtN Layer -Back to top - -**Name**: *SparseSegmentSqrtN* - -**Category**: *Layer* - -**Short description**: *SparseSegmentSqrtN* computes the sum along sparse segments of a tensor and divides it by the square root of N, where N is a number of rows in a segment. - -**Detailed description**: [Reference](https://www.tensorflow.org/api_docs/cc/class/tensorflow/ops/sparse-segment-sqrt-n) - -**Parameters**: *SparseSegmentSqrtN* layer does not have parameters. - -**Inputs**: - -* **1**: ND tensor. Data tensor from which rows are selected. Required. -* **2**: 1D tensor. Tensor of rows indices selected from the first input tensor along 0 dimension. Required. -* **3**: 1D tensor. Tensor of segment IDs that selected rows belong to. Rows belonging to the same segment are summed up and divided by the square root of N, where N is a number of rows in a segment. This input tensor has the same size as the second input. Values must be sorted in ascending order and can be repeated. Required. - -**Outputs**: - -* **1**: ND tensor. It has the same shape as the data tensor, except for a dimension 0, which has a size equal to a size of an indices tensor. - -* * * - -## SparseSegmentSum Layer -Back to top - -**Name**: *SparseSegmentSum* - -**Category**: *Layer* - -**Short description**: *SparseSegmentSum* computes the sum along sparse segments of a tensor. - -**Detailed description**: [Reference](https://www.tensorflow.org/api_docs/cc/class/tensorflow/ops/sparse-segment-sum) - -**Parameters**: *SparseSegmentSum* layer does not have parameters. - -**Inputs**: - -* **1**: ND tensor. Data tensor from which rows are selected. Required. -* **2**: 1D tensor. Tensor of rows indices selected from the first input tensor along 0 dimension. Required. -* **3**: 1D tensor. Tensor of segment IDs that selected rows belong to. Rows belonging to the same segment are summed up. This input tensor has the same size as the second input. Values must be sorted in ascending order and can be repeated. Required. - -**Outputs**: - -* **1**: ND tensor. It has the same shape as the data tensor, except for a dimension 0, which has a size equal to a size of an indices tensor. - -* * * - -## SparseToDense Layer -Back to top - -**Name**: *SparseToDense* - -**Category**: *Layer* - -**Short description**: *SparseToDense* converts a sparse tensor into a dense tensor. - -**Detailed description**: [Reference](https://www.tensorflow.org/api_docs/python/tf/sparse/to_dense) - -**Inputs**: - -* **1**: 2-D tensor. Input indices of the sparse tensor. It contains with an integer type. Required. -* **2**: 1-D tensor. Dense shape of the sparse tensor. It contains with an integer type. Required. -* **3**: 1-D tensor. Input values of the sparse tensor. It contains with integer and float types. Required. -* **4**: 0-D tensor. Default value to insert at missing positions. The fourth input type must be the same as the third input type. If it is not specified, zero value is used. Optional. - -**Outputs**: - -* **1**: The output dense tensor. The output tensor shape is equal to a value of the second input. - -* * * - -## Split Layer -Back to top - -**Name**: *Split* - -**Category**: *Layer* - -**Short description**: *Split* layer splits the input along the specified axis into several output pieces. - -**Detailed description**: [Reference](http://caffe.berkeleyvision.org/tutorial/layers/split.html) - -**Parameters**: *Split* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *axis* - - * **Description**: *axis* is the number of the axis to split input blob along. - * **Range of values**: a non-negative integer less than the number of dimensions in the input - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *num_split* - - * **Description**: *num_split* is the number of pieces to split the input into. The *num_split* must evenly divide the size of the *axis* dimension. - * **Range of values**: a positive integer less than or equal to the size of the dimension being split over - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -**Mathematical Formulation** - -For example, if the blob is *BxC+CxHxW*, `axis="1"`, and `num_split="2"`, the sizes of output blobs are *BxCxHxW*. - -**Inputs**: - -* **1**: Multidimensional input blob. Required. - -**Example** - -```xml - - - ... - ... - -``` - -* * * - - -## Squeeze Layer - -**Name**: *Squeeze* - -**Category**: *Layer* - -**Short description**: *Squeeze* removes specified dimensions (second input) equal to 1 of the first input tensor. If the second input is omitted then all dimensions equal to 1 are removed. If the specified dimension is not equal to one then error is raised. - -**Parameters**: *Squeeze* layer doesn't have parameters. - -**Inputs**: - -* **1**: Multidimensional input blob. Required. - -* **2**: `(optional)`: 0D or 1D tensor with dimensions indices to squeeze. Values could be negative. Indices could be integer or float values. - -**Example** - -*Example 1:* -```xml - - - - 1 - 3 - 1 - 2 - - - - - 2 - - - - - 3 - 2 - - - -``` - -*Example 2: squeeze 1D tensor with 1 element to a 0D tensor (constant)* -```xml - - - - 1 - - - - - 1 - - - - - - - -``` - - -* * * - -## StridedSlice Layer - -**Name**: *StridedSlice* - -**Short description**: *StridedSlice* layer extracts a strided slice of a blob. - It is similar to generalized array indexing in Python\*. - -**Parameters**: *StridedSlice* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *begin_mask* - - * **Description**: *begin_mask* is a bit mask. *begin_mask[i]* equal to 0 means that the corresponding dimension of the `begin` input is ignored. - * **Range of values**: a list of `0`s and `1`s - * **Type**: `int[]` - * **Default value**: `[1]` - * **Required**: *yes* - -* **Parameter name**: *end_mask* - - * **Description**: *end_mask* is a bit mask. If *end_mask[i]* is 0, the corresponding dimension of the `end` input is ignored. - * **Range of values**: a list of `0`s and `1`s - * **Type**: `int[]` - * **Default value**: `[1]` - * **Required**: *yes* - -* **Parameter name**: *new_axis_mask* - - * **Description**: *new_axis_mask* is a bit mask. If *new_axis_mask[i]* is 1, a length 1 dimension is inserted on the `i`-th position of input blob. - * **Range of values**: a list of `0`s and `1`s - * **Type**: `int[]` - * **Default value**: `[0]` - * **Required**: *no* - - -* **Parameter name**: *shrink_axis_mask* - - * **Description**: *shrink_axis_mask* is a bit mask. If *shrink_axis_mask[i]* is 1, the dimension on the `i`-th position is deleted. - * **Range of values**: a list of `0`s and `1`s - * **Type**: `int[]` - * **Default value**: `[0]` - * **Required**: *no* - -* **Parameter name**: *ellipsis_mask* - - * **Description**: *ellipsis_mask* is a bit mask. It inserts missing dimensions on a position of a non-zero bit. - * **Range of values**: a list of `0`s and `1`. Only one non-zero bit is allowed. - * **Type**: `int[]` - * **Default value**: `[0]` - * **Required**: *no* - -**Inputs**: - -* **1**: Multidimensional input blob. Required. - -* **2**: `begin` input - 1D input blob with begin indexes for input blob slicing. Required. - Out-of-bounds values are silently clamped. If `begin_mask[i]` is 0, the value of `begin[i]` is ignored - and the range of the appropriate dimension starts from 0. - Negative values mean indexing starts from the end. For example, if `foo=[1,2,3]`, `begin[0]=-1` means `begin[0]=3`. - -* **3**: `end` input - 1D input blob with end indexes for input blob slicing. Required. - Out-of-bounds values will be silently clamped. If `end_mask[i]` is 0, the value of `end[i]` is ignored - and the full range of the appropriate dimension is used instead. - Negative values mean indexing starts from the end. For example, if `foo=[1,2,3]`, `end[0]=-1` means `end[0]=3`. - -* **4**: `stride` input - 1D input blob with strides. Optional. - -**Example** -```xml - - - - - 1 - 2 - 384 - 640 - 8 - - - 5 - - - 5 - - - 5 - - - - - 1 - 384 - 640 - 8 - - - -``` - - -* * * - - -## TensorIterator Layer -Back to top - -**Name**: *TensorIterator* - -**Category**: *Layer* - -**Short description**: *TensorIterator* (TI) layer performs recurrent sub-graph execution iterating through the data. - -**Parameters**: The parameters are specified in the child nodes of the `port_map` and `back_edges` sections, which are child nodes of the layer node. The `port_map` and `back_edges` sections specify data mapping rules. - -* **Node**: *port_map* is a set of rules to map input/output data blobs of the `TensorIterator` layer onto `body` data blobs. Port mapping rule is presented as `input`/`output` nodes. - - * **Parameter name**: *external_port_id* - - * **Description**: *external_port_id* is a port ID of the `TensorIterator` layer. - * **Range of values**: indexes of the *TensorIterator* outputs - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - - * **Parameter name**: *internal_layer_id* - - * **Description**: *internal_layer_id* is a layer ID inside the `body` sub-network to map to. - * **Range of values**: IDs of the layers inside in the *TensorIterator* layer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - - * **Parameter name**: *internal_port_id* - - * **Description**: *internal_port_id* is a port ID of the `body` layer to map to. - * **Range of values**: indexes of the `body` layer input - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - - * **Parameter name**: *axis* - - * **Description**: *axis* is an axis to iterate through. `-1` means no iteration is done. - * **Range of values**: an integer - * **Type**: `int` - * **Default value**: -1 - * **Required**: *no* - - * **Parameter name**: *start* - - * **Description**: *start* is an index where the iteration starts from. Negative value means counting indexes from the end. - * **Range of values**: an integer - * **Type**: `int` - * **Default value**: 0 - * **Required**: *no* - - * **Parameter name**: *end* - - * **Description**: *end* is an index where iteration ends. Negative value means counting indexes from the end. - * **Range of values**: an integer - * **Type**: `int` - * **Default value**: -1 - * **Required**: *no* - - * **Parameter name**: *stride* - - * **Description**: *stride* is a step of iteration. Negative value means backward iteration. - * **Range of values**: an integer - * **Type**: `int` - * **Default value**: 1 - * **Required**: *no* - -* **Node**: *back_edges* is a set of rules to transfer data blobs between `body` iteration. Mapping rule is presented as a general `edge` node with port and layer indexes of `body` sub-network. - - * **Parameter name**: *from-layer* - - * **Description**: *from-layer* is a layer ID inside the `body` sub-network. - * **Range of values**: IDs of the layers inside the *TensorIterator* - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - - * **Parameter name**: *from-port* - - * **Description**: *from-port* is a port ID inside the `body` sub-network to start mapping from. - * **Range of values**: the respective layer port index - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - - * **Parameter name**: *to-layer* - - * **Description**: *to-layer* is a layer ID inside the `body` sub-network to end mapping. - * **Range of values**: IDs of the layers inside the *TensorIterator* - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - - * **Parameter name**: *to-port* - - * **Description**: *to-port* is a port ID inside the `body` sub-network to end mapping. - * **Range of values**: the respective layer port index - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Node**: *body* is a sub-network that will be recurrently executed. - - * **Parameters**: The *body* node does not have parameters. - -**Example** - -```xml - - ... - ... - - - - ... - - ... - - - - ... - - - ... - ... - - -``` - -* * * - -## Tile Layer -Back to top - -**Name**: *Tile* - -**Category**: *Layer* - -**Short description**: *Tile* layer extends input blob with copies of data along a specified axis. - -**Detailed description**: [Reference](http://caffe.help/manual/layers/tile.html) - -**Parameters**: *Tile* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *axis* - - * **Description**: *axis* is the index of an axis to tile. For example, *axis* equal to 3 means that the fourth axis is used for tiling. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *tiles* - - * **Description**: *tiles* is the size of the specified axis in the output blob. For example, *tiles* equal to 88 means that the output blob gets 88 copies of data from the specified axis. - * **Range of values**: a positive integer - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -**Mathematical Formulation** - -*Tile* extends input blobs and filling in output blobs by the following rules: -\f[ -out_i=input_i[inner\_dim*t] -\f] -\f[ -t \in \left ( 0, \quad tiles \right ) -\f] - -**Inputs**: - -* **1**: Multidimensional input blob. Required. - -**Example** - -```xml - - - ... - ... - -``` - -* * * - - -## TopK Layer -Back to top - -**Name**: *TopK* - -**Category**: *Layer* - -**Short description**: *TopK* layer computes indices and values of the *k* maximum/minimum values for each slice along the axis specified. - -**Parameters**: *TopK* layer parameters are specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *axis* - - * **Description**: Specifies the axis along which to search for k maximum/minimum values. - * **Range of values**: an integer. Negative value means counting dimension from the end. - * **Type**: `int` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *mode* - - * **Description**: *mode* specifies an operation to use for selecting the largest element of two. - * **Range of values**: `min`, `max` - * **Type**: `string` - * **Default value**: None - * **Required**: *yes* - -* **Parameter name**: *sort* - - * **Description**: *sort* specifies an order of output elements and/or indices. - * **Range of values**: `value`, `index`, `none` - * **Type**: `string` - * **Default value**: None - * **Required**: *yes* - - -**Inputs**: - -* **1**: Arbitrary tensor. Required. - -* **2**: *k* - scalar specifies how many maximum/minimum elements should be computed - -**Outputs**: - -* **1**: Output tensor with top *k* values from input tensor along specified dimension *axis*. The shape of the tensor is `[input1.shape[0], ..., input1.shape[axis-1], k, input1.shape[axis+1], ...]`. - -* **2**: Output tensor with top *k* indices for each slice along *axis* dimension. - The shape of the tensor is the same as for the 1st output, that is `[input1.shape[0], ..., input1.shape[axis-1], k, input1.shape[axis+1], ...]` - -**Mathematical Formulation** - -Output tensor is populated by values computes in the following way: - -\f[ -output[i1, ..., i(axis-1), j, i(axis+1) ..., iN] = top_k(input[i1, ...., i(axis-1), :, i(axis+1), ..., iN]), k, sort, mode) -\f] - -So for each slice `input[i1, ...., i(axis-1), :, i(axis+1), ..., iN]` which represents 1D array, top_k value is computed individually. Sorting and minimum/maximum are controlled by `sort` and `mode` attributes. - -**Example** - -```xml - - - - - 6 - 12 - 10 - 24 - - - - - - 6 - 3 - 10 - 24 - - - -``` - -* * * - -## Unique Layer -Back to top - -**Name**: *Unique* - -**Category**: *Layer* - -**Short description**: *Unique* finds unique elements in 1-D tensor. - -**Detailed description**: [Reference](https://pytorch.org/docs/stable/torch.html?highlight=unique#torch.unique) - -**Parameters**: *Unique* layer parameters should be specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *sorted* - - * **Description**: If *sorted* is equal to *true*, the unique elements in the output are sorted in ascending order. Otherwise, all of the unique elements are sorted in the same order as they occur in the input. - * **Range of values**: *true* or *false* - * **Type**: `string` - * **Required**: *yes* - -* **Parameter name**: *return_inverse* - - * **Description**: If *return_inverse* is equal to *true*, the layer outputs the indices. Otherwise, it does not. - * **Range of values**: *true* or *false* - * **Type**: `string` - * **Required**: *yes* - -* **Parameter name**: *return_counts* - - * **Description**: If *return_counts* is equal to *true*, the layer outputs the counts for each unique element. Otherwise, it does not. - * **Range of values**: *true* or *false* - * **Type**: `string` - * **Required**: *yes* - -**Input**: - -* **1**: 1-D tensor. Input tensor. Required. - -**Outputs**: - -* **1**: 1-D tensor. Tensor of all unique elements from the input tensor. As a number of unique elements can be less than a size of the input, the end of this tensor is marked with the latest unique element. Required. -* **2**: 1-D tensor. Tensor of indices of unique elements of the first output that can be used to reconstruct the input. The size of this tensor is equal to the input size. It outputs in the second output port. Optional. -* **3**: 1-D tensor. Tensor of counts of occurrences of each unique element in the input. It has the same size as the output with unique elements. The end of this tensor is marked with zero. It outputs in the second output port if return_inverse is *false*, otherwise, it outputs in the third output port. Optional. - -**Example** -```xml - - - - - 20 - - - - - 20 - - - 20 - - - -``` - -* * * - - - -## Unsqueeze Layer - -**Name**: *Unsqueeze* - -**Category**: *Layer* - -**Short description**: *Unsqueeze* adds dimensions of size 1 to the first input tensor. The second input value specifies a list of dimensions that will be inserted. Indices specify dimensions in the output tensor. - -**Parameters**: *Unsqueeze* layer doesn't have parameters. - -**Inputs**: - -* **1**: Multidimensional input blob. Required. - -* **2**: OD or 1D tensor with dimensions indices to be set to 1. Values could be negative. Indices could be integer or float values. - -**Example** - -*Example 1:* -```xml - - - - 2 - 3 - - - - - 2 - - - - - 1 - 2 - 3 - 1 - - - -``` - -*Example 2: (unsqueeze 0D tensor (constant) to 1D tensor)* -```xml - - - - - - - - 1 - - - - - 1 - - - -``` - -* * * - -## Unique Layer -Back to top - -**Name**: *Unique* - -**Category**: *Layer* - -**Short description**: *Unique* finds unique elements in 1-D tensor. - -**Detailed description**: [Reference](https://pytorch.org/docs/stable/torch.html?highlight=unique#torch.unique) - -**Parameters**: *Unique* layer parameters should be specified in the `data` node, which is a child of the `layer` node. - -* **Parameter name**: *sorted* - - * **Description**: If *sorted* is equal to *true*, the unique elements in the output are sorted in ascending order. Otherwise, all of the unique elements are sorted in the same order as they occur in the input. - * **Range of values**: *true* or *false* - * **Type**: `string` - * **Required**: *yes* - -* **Parameter name**: *return_inverse* - - * **Description**: If *return_inverse* is equal to *true*, the layer outputs the indices. Otherwise, it does not. - * **Range of values**: *true* or *false* - * **Type**: `string` - * **Required**: *yes* - -* **Parameter name**: *return_counts* - - * **Description**: If *return_counts* is equal to *true*, the layer outputs the counts for each unique element. Otherwise, it does not. - * **Range of values**: *true* or *false* - * **Type**: `string` - * **Required**: *yes* - -**Input**: - -* **1**: 1-D tensor. Input tensor. Required. - -**Outputs**: - -* **1**: 1-D tensor. Tensor of all unique elements from the input tensor. As a number of unique elements can be less than a size of the input, the end of this tensor is marked with the latest unique element. Required. -* **2**: 1-D tensor. Tensor of indices of unique elements of the first output that can be used to reconstruct the input. The size of this tensor is equal to the input size. It outputs in the second output port. Optional. -* **3**: 1-D tensor. Tensor of counts of occurrences of each unique element in the input. It has the same size as the output with unique elements. The end of this tensor is marked with zero. It outputs in the second output port if return_inverse is *false*, otherwise, it outputs in the third output port. Optional. - -**Example** -```xml - - - - - 20 - - - - - 20 - - - 20 - - - -``` - -* * * \ No newline at end of file diff --git a/docs/MO_DG/prepare_model/convert_model/kaldi_specific/Aspire_Tdnn_Model.md b/docs/MO_DG/prepare_model/convert_model/kaldi_specific/Aspire_Tdnn_Model.md index 3097d419705..73f5bc1306b 100644 --- a/docs/MO_DG/prepare_model/convert_model/kaldi_specific/Aspire_Tdnn_Model.md +++ b/docs/MO_DG/prepare_model/convert_model/kaldi_specific/Aspire_Tdnn_Model.md @@ -1,6 +1,6 @@ -# Convert Kaldi* ASpIRE Chain Time Delay Neural Network (TDNN) Model to the Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_kaldi_specific_Aspire_Tdnn_Model} +# Convert Kaldi* ASpIRE Chain Time Delay Neural Network (TDNN) Model {#openvino_docs_MO_DG_prepare_model_convert_model_kaldi_specific_Aspire_Tdnn_Model} -You can [download a pre-trained model](https://kaldi-asr.org/models/1/0001_aspire_chain_model.tar.gz) +You can [download a pre-trained model](https://kaldi-asr.org/models/1/0001_aspire_chain_model.tar.gz) for the ASpIRE Chain Time Delay Neural Network (TDNN) from the Kaldi* project official website. ## Convert ASpIRE Chain TDNN Model to IR @@ -10,15 +10,15 @@ To generate the Intermediate Representation (IR) of the model, run the Model Opt mo --input_model exp/chain/tdnn_7b/final.mdl --output output ``` -The IR will have two inputs: `input` for data and `ivector` for ivectors. +The IR will have two inputs: `input` for data and `ivector` for ivectors. ## Example: Run ASpIRE Chain TDNN Model with the Speech Recognition Sample These instructions show how to run the converted model with the [Speech Recognition sample](../../../../../samples/cpp/speech_sample/README.md). -In this example, the input data contains one utterance from one speaker. +In this example, the input data contains one utterance from one speaker. -To follow the steps described below, you must first do the following: -1. Download a [Kaldi repository](https://github.com/kaldi-asr/kaldi). +To follow the steps described below, you must first do the following: +1. Download a [Kaldi repository](https://github.com/kaldi-asr/kaldi). 2. Build it using instructions in `README.md` in the repository. 3. Download the [model archive](https://kaldi-asr.org/models/1/0001_aspire_chain_model.tar.gz) from Kaldi website. 4. Extract the downloaded model archive to the `egs/aspire/s5` folder of the Kaldi repository. @@ -49,10 +49,10 @@ cd /egs/aspire/s5/ 2. Extract ivectors from the data: ```sh -./steps/online/nnet2/extract_ivectors_online.sh --nj 1 --ivector_period exp/tdnn_7b_chain_online/ivector_extractor +./steps/online/nnet2/extract_ivectors_online.sh --nj 1 --ivector_period exp/tdnn_7b_chain_online/ivector_extractor ``` -To simplify the preparation of ivectors for the Speech Recognition sample, -specify the maximum number of frames in utterances as a parameter for `--ivector_period` +To simplify the preparation of ivectors for the Speech Recognition sample, +specify the maximum number of frames in utterances as a parameter for `--ivector_period` to get only one ivector per utterance. To get the maximum number of frames in utterances, you can use the following command line: @@ -71,7 +71,7 @@ cd /src/featbin/copy-feats --binary=False ark:ivector_online.1.ark ark,t:ivector_online.1.ark.txt ``` -5. For the Speech Recognition sample, the `.ark` file must contain an ivector +5. For the Speech Recognition sample, the `.ark` file must contain an ivector for each frame. You must copy the ivector `frame_count` times. To do this, you can run the following script in the Python* command prompt: ```python @@ -108,5 +108,5 @@ Run the Speech Recognition sample with the created ivector `.ark` file as follow speech_sample -i feats.ark,ivector_online_ie.ark -m final.xml -d CPU -o prediction.ark -cw_l 17 -cw_r 12 ``` -Results can be decoded as described in "Use of Sample in Kaldi* Speech Recognition Pipeline" chapter +Results can be decoded as described in "Use of Sample in Kaldi* Speech Recognition Pipeline" chapter in [the Speech Recognition Sample description](../../../../../samples/cpp/speech_sample/README.md). diff --git a/docs/MO_DG/prepare_model/convert_model/mxnet_specific/Convert_GluonCV_Models.md b/docs/MO_DG/prepare_model/convert_model/mxnet_specific/Convert_GluonCV_Models.md index 5f4897093e4..e9ca88fbec5 100644 --- a/docs/MO_DG/prepare_model/convert_model/mxnet_specific/Convert_GluonCV_Models.md +++ b/docs/MO_DG/prepare_model/convert_model/mxnet_specific/Convert_GluonCV_Models.md @@ -1,15 +1,15 @@ -# Converting GluonCV* Models {#openvino_docs_MO_DG_prepare_model_convert_model_mxnet_specific_Convert_GluonCV_Models} +# Convert MXNet GluonCV* Models {#openvino_docs_MO_DG_prepare_model_convert_model_mxnet_specific_Convert_GluonCV_Models} This document provides the instructions and examples on how to use Model Optimizer to convert [GluonCV SSD and YOLO-v3 models](https://gluon-cv.mxnet.io/model_zoo/detection.html) to IR. -1. Choose the topology available from the [GluonCV Model Zoo](https://gluon-cv.mxnet.io/model_zoo/detection.html) and export to the MXNet format using the GluonCV API. For example, for the `ssd_512_mobilenet1.0` topology: +1. Choose the topology available from the [GluonCV Model Zoo](https://gluon-cv.mxnet.io/model_zoo/detection.html) and export to the MXNet format using the GluonCV API. For example, for the `ssd_512_mobilenet1.0` topology: ```python from gluoncv import model_zoo, data, utils from gluoncv.utils import export_block net = model_zoo.get_model('ssd_512_mobilenet1.0_voc', pretrained=True) export_block('ssd_512_mobilenet1.0_voc', net, preprocess=True, layout='HWC') ``` -As a result, you will get an MXNet model representation in `ssd_512_mobilenet1.0.params` and `ssd_512_mobilenet1.0.json` files generated in the current directory. +As a result, you will get an MXNet model representation in `ssd_512_mobilenet1.0.params` and `ssd_512_mobilenet1.0.json` files generated in the current directory. 2. Run the Model Optimizer tool specifying the `--enable_ssd_gluoncv` option. Make sure the `--input_shape` parameter is set to the input shape layout of your model (NHWC or NCHW). The examples below illustrates running the Model Optimizer for the SSD and YOLO-v3 models trained with the NHWC layout and located in the ``: * **For GluonCV SSD topologies:** ```sh diff --git a/docs/MO_DG/prepare_model/convert_model/mxnet_specific/Convert_Style_Transfer_From_MXNet.md b/docs/MO_DG/prepare_model/convert_model/mxnet_specific/Convert_Style_Transfer_From_MXNet.md index 86a4990ee58..c204700bd2f 100644 --- a/docs/MO_DG/prepare_model/convert_model/mxnet_specific/Convert_Style_Transfer_From_MXNet.md +++ b/docs/MO_DG/prepare_model/convert_model/mxnet_specific/Convert_Style_Transfer_From_MXNet.md @@ -1,4 +1,4 @@ -# Converting a Style Transfer Model from MXNet* {#openvino_docs_MO_DG_prepare_model_convert_model_mxnet_specific_Convert_Style_Transfer_From_MXNet} +# Convert MXNet Style Transfer Model {#openvino_docs_MO_DG_prepare_model_convert_model_mxnet_specific_Convert_Style_Transfer_From_MXNet} The tutorial explains how to generate a model for style transfer using the public MXNet\* neural style transfer sample. To use the style transfer sample from OpenVINO™, follow the steps below as no public pre-trained style transfer model is provided with the OpenVINO toolkit. @@ -86,8 +86,8 @@ import make_image maker = make_image.Maker('models/13', (1024, 768)) maker.generate('output.jpg', '../images/tubingen.jpg') ``` -Where the `models/13` string is composed of the following substrings: -* `models/`: path to the folder that contains .nd files with pre-trained styles weights +Where the `models/13` string is composed of the following substrings: +* `models/`: path to the folder that contains .nd files with pre-trained styles weights * `13`: prefix pointing to 13_decoder, which is the default decoder for the repository. > **NOTE**: If you get an error saying "No module named 'cPickle'", try running the script from this step in Python 2. Then return to Python 3 for the remaining steps. @@ -114,4 +114,4 @@ cp models/13_decoder_auxs.nd nst_model ```sh mo --input_symbol /nst_vgg19-symbol.json --framework mxnet --output_dir --input_shape [1,3,224,224] --nd_prefix_name 13_decoder --pretrained_model /vgg19-0000.params ``` -4. The IR is generated (`.bin`, `.xml` and `.mapping` files) in the specified output directory and ready to be consumed by the Inference Engine. +4. The IR is generated (`.bin`, `.xml` and `.mapping` files) in the specified output directory and ready to be consumed by the OpenVINO Runtime. diff --git a/docs/MO_DG/prepare_model/convert_model/onnx_specific/Convert_DLRM.md b/docs/MO_DG/prepare_model/convert_model/onnx_specific/Convert_DLRM.md deleted file mode 100644 index 8e87467a528..00000000000 --- a/docs/MO_DG/prepare_model/convert_model/onnx_specific/Convert_DLRM.md +++ /dev/null @@ -1,32 +0,0 @@ -[DEPRECATED] Convert ONNX* DLRM to the Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_onnx_specific_Convert_DLRM} -=============================== - -> **NOTE**: These instructions are currently deprecated. Since OpenVINO™ 2020.4 version, no specific steps are needed to convert ONNX\* DLRM models. For general instructions on converting ONNX models, please refer to [Converting a ONNX* Model](../Convert_Model_From_ONNX.md) topic. - -These instructions are applicable only to the DLRM converted to the ONNX* file format from the [facebookresearch/dlrm model](https://github.com/facebookresearch/dlrm). - -**Step 1**. Save trained Pytorch* model to ONNX* format or download pretrained ONNX* from -[MLCommons/inference/recommendation/dlrm](https://github.com/mlcommons/inference/tree/r1.0/recommendation/dlrm/pytorch#supported-models) repository. -If you train the model using the [script provided in model repository](https://github.com/facebookresearch/dlrm/blob/master/dlrm_s_pytorch.py), just add the `--save-onnx` flag to the command line parameters and you'll get the `dlrm_s_pytorch.onnx` file containing the model serialized in ONNX* format. - -**Step 2**. To generate the Intermediate Representation (IR) of the model, change your current working directory to the Model Optimizer installation directory and run the Model Optimizer with the following parameters: -```sh -mo --input_model dlrm_s_pytorch.onnx -``` - -Note that Pytorch model uses operation `torch.nn.EmbeddingBag`. This operation converts to onnx as custom `ATen` layer and not directly supported by OpenVINO*, but it is possible to convert this operation to: -* `Gather` if each "bag" consists of exactly one index. In this case `offsets` input becomes obsolete and not needed. They will be removed during conversion. -* `ExperimentalSparseWeightedSum` if "bags" contain not just one index. In this case Model Optimizer will print warning that pre-process of offsets is needed, because `ExperimentalSparseWeightedSum` and `torch.nn.EmbeddingBag` have different format of inputs. -For example if you have `indices` input of shape [indices_shape] and `offsets` input of shape [num_bags] you need to get offsets of shape [indices_shape, 2]. To do that you may use the following code snippet: -```python -import numpy as np - -new_offsets = np.zeros((indices.shape[-1], 2), dtype=np.int32) -new_offsets[:, 1] = np.arange(indices.shape[-1]) -bag_index = 0 -for i in range(offsets.shape[-1] - 1): - new_offsets[offsets[i]:offsets[i + 1], 0] = bag_index - bag_index += 1 -new_offsets[offsets[-1]:, 0] = bag_index -``` -If you have more than one `torch.nn.EmbeddingBag` operation you'll need to do that for every offset input. If your offsets have same shape they will be merged into one input of shape [num_embedding_bags, indices_shape, 2]. diff --git a/docs/MO_DG/prepare_model/convert_model/onnx_specific/Convert_Faster_RCNN.md b/docs/MO_DG/prepare_model/convert_model/onnx_specific/Convert_Faster_RCNN.md index 422a1028ce0..7054613af32 100644 --- a/docs/MO_DG/prepare_model/convert_model/onnx_specific/Convert_Faster_RCNN.md +++ b/docs/MO_DG/prepare_model/convert_model/onnx_specific/Convert_Faster_RCNN.md @@ -1,4 +1,4 @@ -# Convert ONNX* Faster R-CNN Model to the Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_onnx_specific_Convert_Faster_RCNN} +# Convert ONNX* Faster R-CNN Model {#openvino_docs_MO_DG_prepare_model_convert_model_onnx_specific_Convert_Faster_RCNN} These instructions are applicable only to the Faster R-CNN model converted to the ONNX* file format from the [facebookresearch/maskrcnn-benchmark model](https://github.com/facebookresearch/maskrcnn-benchmark). @@ -11,7 +11,7 @@ These instructions are applicable only to the Faster R-CNN model converted to th --input_shape [1,3,800,800] \ --input 0:2 \ --mean_values [102.9801,115.9465,122.7717] \ ---transformations_config front/onnx/faster_rcnn.json +--transformations_config front/onnx/faster_rcnn.json ``` Note that the height and width specified with the `input_shape` command line parameter could be different. Refer to the [documentation](https://github.com/onnx/models/tree/master/vision/object_detection_segmentation/faster-rcnn) for more information about supported input image dimensions and required pre- and post-processing steps. diff --git a/docs/MO_DG/prepare_model/convert_model/onnx_specific/Convert_GPT2.md b/docs/MO_DG/prepare_model/convert_model/onnx_specific/Convert_GPT2.md index 2e7af9bf042..92374d3f1e6 100644 --- a/docs/MO_DG/prepare_model/convert_model/onnx_specific/Convert_GPT2.md +++ b/docs/MO_DG/prepare_model/convert_model/onnx_specific/Convert_GPT2.md @@ -1,4 +1,4 @@ -# Convert ONNX* GPT-2 Model to the Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_onnx_specific_Convert_GPT2} +# Convert ONNX* GPT-2 Model {#openvino_docs_MO_DG_prepare_model_convert_model_onnx_specific_Convert_GPT2} [Public pre-trained GPT-2 model](https://github.com/onnx/models/tree/master/text/machine_comprehension/gpt-2) is a large transformer-based language model with a simple objective: predict the next word, given all of the previous words within some text. diff --git a/docs/MO_DG/prepare_model/convert_model/onnx_specific/Convert_Mask_RCNN.md b/docs/MO_DG/prepare_model/convert_model/onnx_specific/Convert_Mask_RCNN.md index fb613bacf4a..6f641625092 100644 --- a/docs/MO_DG/prepare_model/convert_model/onnx_specific/Convert_Mask_RCNN.md +++ b/docs/MO_DG/prepare_model/convert_model/onnx_specific/Convert_Mask_RCNN.md @@ -1,4 +1,4 @@ -# Convert ONNX* Mask R-CNN Model to the Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_onnx_specific_Convert_Mask_RCNN} +# Convert ONNX* Mask R-CNN Model {#openvino_docs_MO_DG_prepare_model_convert_model_onnx_specific_Convert_Mask_RCNN} These instructions are applicable only to the Mask R-CNN model converted to the ONNX* file format from the [facebookresearch/maskrcnn-benchmark model](https://github.com/facebookresearch/maskrcnn-benchmark). @@ -11,7 +11,7 @@ These instructions are applicable only to the Mask R-CNN model converted to the --input "0:2" \ --input_shape [1,3,800,800] \ --mean_values [102.9801,115.9465,122.7717] \ ---transformations_config front/onnx/mask_rcnn.json +--transformations_config front/onnx/mask_rcnn.json ``` Note that the height and width specified with the `input_shape` command line parameter could be different. Refer to the [documentation](https://github.com/onnx/models/tree/master/vision/object_detection_segmentation/mask-rcnn) for more information about supported input image dimensions and required pre- and post-processing steps. diff --git a/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_Bert_ner.md b/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_Bert_ner.md index 2bd66bd1228..0769ca1759d 100644 --- a/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_Bert_ner.md +++ b/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_Bert_ner.md @@ -1,4 +1,4 @@ -# Convert PyTorch* BERT-NER to the Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_Bert_ner} +# Convert PyTorch* BERT-NER Model {#openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_Bert_ner} ## Download and Convert the Model to ONNX* diff --git a/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_Cascade_RCNN_res101.md b/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_Cascade_RCNN_res101.md new file mode 100644 index 00000000000..b65e9f51ecc --- /dev/null +++ b/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_Cascade_RCNN_res101.md @@ -0,0 +1,28 @@ +# Convert PyTorch Cascade RCNN R-101 Model {#openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_Cascade_RCNN_res101} + +## Download and Convert Model to ONNX + +* Clone the [repository](https://github.com/open-mmlab/mmdetection): + +```bash +git clone https://github.com/open-mmlab/mmdetection +cd mmdetection +``` + +> **NOTE**: To set up an environment, refer to this [instruction](https://github.com/open-mmlab/mmdetection/blob/master/docs/en/get_started.md#installation). + +* Download the pre-trained [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco/cascade_rcnn_r101_fpn_1x_coco_20200317-0b6a2fbf.pth). You can also find the link to the model [here](https://github.com/open-mmlab/mmdetection/blob/master/configs/cascade_rcnn/README.md). + +* To convert the model to ONNX format, use this [script](https://github.com/open-mmlab/mmdetection/blob/master/tools/deployment/pytorch2onnx.py). + +```bash +python3 tools/deployment/pytorch2onnx.py configs/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py cascade_rcnn_r101_fpn_1x_coco_20200317-0b6a2fbf.pth --output-file cascade_rcnn_r101_fpn_1x_coco.onnx +``` + +The script generates ONNX model file `cascade_rcnn_r101_fpn_1x_coco.onnx` in the directory `tools/deployment/`. If required, you can specify the model name or output directory using `--output-file /.onnx` + +## Convert ONNX Cascade RCNN R-101 Model to IR + +```bash +mo --input_model cascade_rcnn_r101_fpn_1x_coco.onnx --mean_values [123.675,116.28,103.53] --scale_values [58.395,57.12,57.375] +``` \ No newline at end of file diff --git a/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_F3Net.md b/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_F3Net.md index d26b712d7fa..eef87a8e743 100644 --- a/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_F3Net.md +++ b/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_F3Net.md @@ -1,4 +1,4 @@ -# Convert PyTorch* F3Net to the Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_F3Net} +# Convert PyTorch* F3Net Model {#openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_F3Net} [F3Net](https://github.com/weijun88/F3Net): Fusion, Feedback and Focus for Salient Object Detection @@ -7,12 +7,12 @@ To clone the repository, run the following command: ```sh -git clone http://github.com/weijun88/F3Net.git +git clone http://github.com/weijun88/F3Net.git ``` ## Download and Convert the Model to ONNX* -To download the pre-trained model or train the model yourself, refer to the +To download the pre-trained model or train the model yourself, refer to the [instruction](https://github.com/weijun88/F3Net/blob/master/README.md) in the F3Net model repository. First, convert the model to ONNX\* format. Create and run the following Python script in the `src` directory of the model repository: ```python import torch diff --git a/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_QuartzNet.md b/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_QuartzNet.md index 560ef2e03be..a954dcd9173 100644 --- a/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_QuartzNet.md +++ b/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_QuartzNet.md @@ -1,4 +1,4 @@ -# Convert PyTorch* QuartzNet to the Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_QuartzNet} +# Convert PyTorch* QuartzNet Model {#openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_QuartzNet} [NeMo project](https://github.com/NVIDIA/NeMo) provides the QuartzNet model. diff --git a/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_RCAN.md b/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_RCAN.md index 26d6f173971..687ae4f90eb 100644 --- a/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_RCAN.md +++ b/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_RCAN.md @@ -1,4 +1,4 @@ -# Convert PyTorch* RCAN to the Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_RCAN} +# Convert PyTorch* RCAN Model {#openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_RCAN} [RCAN](https://github.com/yulunzhang/RCAN): Image Super-Resolution Using Very Deep Residual Channel Attention Networks diff --git a/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_RNNT.md b/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_RNNT.md index ecaf315295a..4784fe0f791 100644 --- a/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_RNNT.md +++ b/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_RNNT.md @@ -1,22 +1,22 @@ -# Convert PyTorch\* RNN-T Model to the Intermediate Representation (IR) {#openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_RNNT} +# Convert PyTorch* RNN-T Model {#openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_RNNT} -This instruction covers conversion of RNN-T model from [MLCommons](https://github.com/mlcommons) repository. Follow +This instruction covers conversion of RNN-T model from [MLCommons](https://github.com/mlcommons) repository. Follow the steps below to export a PyTorch* model into ONNX* before converting it to IR: -**Step 1**. Clone RNN-T PyTorch implementation from MLCommons repository (revision r1.0). Make a shallow clone to pull +**Step 1**. Clone RNN-T PyTorch implementation from MLCommons repository (revision r1.0). Make a shallow clone to pull only RNN-T model without full repository. If you already have a full repository, skip this and go to **Step 2**: ```bash git clone -b r1.0 -n https://github.com/mlcommons/inference rnnt_for_openvino --depth 1 cd rnnt_for_openvino -git checkout HEAD speech_recognition/rnnt +git checkout HEAD speech_recognition/rnnt ``` -**Step 2**. If you already have a full clone of MLCommons inference repository, create a folder for -pretrained PyTorch model, where conversion into IR will take place. You will also need to specify the path to +**Step 2**. If you already have a full clone of MLCommons inference repository, create a folder for +pretrained PyTorch model, where conversion into IR will take place. You will also need to specify the path to your full clone at **Step 5**. Skip this step if you have a shallow clone. ```bash -mkdir rnnt_for_openvino +mkdir rnnt_for_openvino cd rnnt_for_openvino ``` @@ -25,7 +25,7 @@ For UNIX*-like systems you can use `wget`: ```bash wget https://zenodo.org/record/3662521/files/DistributedDataParallel_1576581068.9962234-epoch-100.pt ``` -The link was taken from `setup.sh` in the `speech_recoginitin/rnnt` subfolder. You will get exactly the same weights as +The link was taken from `setup.sh` in the `speech_recoginitin/rnnt` subfolder. You will get exactly the same weights as if you were following the steps from [https://github.com/mlcommons/inference/tree/master/speech_recognition/rnnt](https://github.com/mlcommons/inference/tree/master/speech_recognition/rnnt). **Step 4**. Install required Python packages: @@ -33,7 +33,7 @@ if you were following the steps from [https://github.com/mlcommons/inference/tre pip3 install torch toml ``` -**Step 5**. Export RNN-T model into ONNX with the script below. Copy the code below into a file named +**Step 5**. Export RNN-T model into ONNX with the script below. Copy the code below into a file named `export_rnnt_to_onnx.py` and run it in the current directory `rnnt_for_openvino`: > **NOTE**: If you already have a full clone of MLCommons inference repository, you need to @@ -94,7 +94,7 @@ torch.onnx.export(model.joint, (f, g), "rnnt_joint.onnx", opset_version=12, python3 export_rnnt_to_onnx.py ``` -After completing this step, the files `rnnt_encoder.onnx`, `rnnt_prediction.onnx`, and `rnnt_joint.onnx` will be saved in the current directory. +After completing this step, the files `rnnt_encoder.onnx`, `rnnt_prediction.onnx`, and `rnnt_joint.onnx` will be saved in the current directory. **Step 6**. Run the conversion commands: @@ -103,6 +103,6 @@ mo --input_model rnnt_encoder.onnx --input "input[157 1 240],feature_length->157 mo --input_model rnnt_prediction.onnx --input "symbol[1 1],hidden_in_1[2 1 320],hidden_in_2[2 1 320]" mo --input_model rnnt_joint.onnx --input "0[1 1 1024],1[1 1 320]" ``` -Please note that hardcoded value for sequence length = 157 was taken from the MLCommons but conversion to IR preserves -network [reshapeability](../../../../OV_Runtime_UG/ShapeInference.md), this means you can change input shapes manually to any value either during conversion or +Please note that hardcoded value for sequence length = 157 was taken from the MLCommons but conversion to IR preserves +network [reshapeability](../../../../OV_Runtime_UG/ShapeInference.md), this means you can change input shapes manually to any value either during conversion or inference. diff --git a/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_YOLACT.md b/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_YOLACT.md index aa68f9d6185..fe6364280ae 100644 --- a/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_YOLACT.md +++ b/docs/MO_DG/prepare_model/convert_model/pytorch_specific/Convert_YOLACT.md @@ -1,4 +1,4 @@ -# Convert PyTorch* YOLACT to the Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_YOLACT} +# Convert PyTorch* YOLACT Model {#openvino_docs_MO_DG_prepare_model_convert_model_pytorch_specific_Convert_YOLACT} You Only Look At CoefficienTs (YOLACT) is a simple, fully convolutional model for real-time instance segmentation. The PyTorch\* implementation is publicly available in [this GitHub* repository](https://github.com/dbolya/yolact). @@ -29,7 +29,7 @@ index 547bc0a..bde0680 100644 +++ b/eval.py @@ -593,9 +593,12 @@ def badhash(x): return x - + def evalimage(net:Yolact, path:str, save_path:str=None): - frame = torch.from_numpy(cv2.imread(path)).cuda().float() + frame = torch.from_numpy(cv2.imread(path)).float() @@ -38,9 +38,9 @@ index 547bc0a..bde0680 100644 batch = FastBaseTransform()(frame.unsqueeze(0)) preds = net(batch) + torch.onnx.export(net, batch, "yolact.onnx", opset_version=11) - + img_numpy = prep_display(preds, frame, None, None, undo_transform=False) - + diff --git a/utils/augmentations.py b/utils/augmentations.py index cc7a73a..2420603 100644 --- a/utils/augmentations.py @@ -48,7 +48,7 @@ index cc7a73a..2420603 100644 @@ -623,8 +623,11 @@ class FastBaseTransform(torch.nn.Module): def __init__(self): super().__init__() - + - self.mean = torch.Tensor(MEANS).float().cuda()[None, :, None, None] - self.std = torch.Tensor( STD ).float().cuda()[None, :, None, None] + self.mean = torch.Tensor(MEANS).float()[None, :, None, None] @@ -57,7 +57,7 @@ index cc7a73a..2420603 100644 + self.mean.cuda() + self.std.cuda() self.transform = cfg.backbone.transform - + def forward(self, img): diff --git a/yolact.py b/yolact.py index d83703b..f8c787c 100644 @@ -66,7 +66,7 @@ index d83703b..f8c787c 100644 @@ -17,19 +17,22 @@ import torch.backends.cudnn as cudnn from utils import timer from utils.functions import MovingAverage, make_net - + -# This is required for Pytorch 1.0.1 on Windows to initialize Cuda on some driver versions. -# See the bug report here: https://github.com/pytorch/pytorch/issues/17108 -torch.cuda.current_device() @@ -76,26 +76,26 @@ index d83703b..f8c787c 100644 -if not use_jit: - print('Multiple GPUs detected! Turning off JIT.') +use_jit = False - + ScriptModuleWrapper = torch.jit.ScriptModule if use_jit else nn.Module script_method_wrapper = torch.jit.script_method if use_jit else lambda fn, _rcn=None: fn - - + + +def decode(loc, priors): + variances = [0.1, 0.2] + boxes = torch.cat((priors[:, :2] + loc[:, :, :2] * variances[0] * priors[:, 2:], priors[:, 2:] * torch.exp(loc[:, :, 2:] * variances[1])), 2) + + boxes_result1 = boxes[:, :, :2] - boxes[:, :, 2:] / 2 -+ boxes_result2 = boxes[:, :, 2:] + boxes[:, :, :2] ++ boxes_result2 = boxes[:, :, 2:] + boxes_result1 + boxes_result = torch.cat((boxes_result1, boxes_result2), 2) + + return boxes_result + - + class Concat(nn.Module): def __init__(self, nets, extra_params): @@ -476,7 +479,10 @@ class Yolact(nn.Module): - + def load_weights(self, path): """ Loads weights from a compressed save file. """ - state_dict = torch.load(path) @@ -103,23 +103,23 @@ index d83703b..f8c787c 100644 + state_dict = torch.load(path) + else: + state_dict = torch.load(path, map_location=torch.device('cpu')) - + # For backward compatability, remove these (the new variable is called layers) for key in list(state_dict.keys()): @@ -673,8 +679,11 @@ class Yolact(nn.Module): else: pred_outs['conf'] = F.softmax(pred_outs['conf'], -1) - + - return self.detect(pred_outs, self) + pred_outs['boxes'] = decode(pred_outs['loc'], pred_outs['priors']) # decode output boxes - + + pred_outs.pop('priors') # remove unused in postprocessing layers + pred_outs.pop('loc') # remove unused in postprocessing layers + return pred_outs - - - --- + + + +-- ``` 3. Save and close the file. diff --git a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_AttentionOCR_From_Tensorflow.md b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_AttentionOCR_From_Tensorflow.md index 9530ad1b7bc..e87e0fd3786 100644 --- a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_AttentionOCR_From_Tensorflow.md +++ b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_AttentionOCR_From_Tensorflow.md @@ -1,4 +1,4 @@ -# Convert TensorFlow* Attention OCR Model to Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_AttentionOCR_From_Tensorflow} +# Convert TensorFlow Attention OCR Model {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_AttentionOCR_From_Tensorflow} This tutorial explains how to convert the Attention OCR (AOCR) model from the [TensorFlow* Attention OCR repository](https://github.com/emedvedev/attention-ocr) to the Intermediate Representation (IR). @@ -20,7 +20,7 @@ The original AOCR model contains data preprocessing which consists of the follow * Decoding input data to binary format where input data is an image represented as a string. * Resizing binary image to working resolution. -After that, the resized image is sent to the convolution neural network (CNN). The Model Optimizer does not support image decoding so you should cut of preprocessing part of the model using '--input' command line parameter. +After that, the resized image is sent to the convolution neural network (CNN). The Model Optimizer does not support image decoding so you should cut of preprocessing part of the model using '--input' command line parameter. ```sh mo \ --input_model=model/path/frozen_graph.pb \ diff --git a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_BERT_From_Tensorflow.md b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_BERT_From_Tensorflow.md index 6af7bef609e..5b8500ceaab 100644 --- a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_BERT_From_Tensorflow.md +++ b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_BERT_From_Tensorflow.md @@ -1,4 +1,4 @@ -# Convert TensorFlow* BERT Model to the Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_BERT_From_Tensorflow} +# Convert TensorFlow BERT Model {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_BERT_From_Tensorflow} Pre-trained models for BERT (Bidirectional Encoder Representations from Transformers) are [publicly available](https://github.com/google-research/bert). @@ -112,7 +112,7 @@ Run the Model Optimizer with the following command line parameters to generate r ```sh mo \ --input_model inference_graph.pb \ ---input "IteratorGetNext:0{i32}[1 128],IteratorGetNext:1{i32}[1 128],IteratorGetNext:4{i32}[1 128]" +--input "IteratorGetNext:0{i32}[1 128],IteratorGetNext:1{i32}[1 128],IteratorGetNext:4{i32}[1 128]" ``` For other applicable parameters, refer to [Convert Model from TensorFlow](../Convert_Model_From_TensorFlow.md). diff --git a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_CRNN_From_Tensorflow.md b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_CRNN_From_Tensorflow.md index afd098933d9..2da4caa298f 100644 --- a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_CRNN_From_Tensorflow.md +++ b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_CRNN_From_Tensorflow.md @@ -1,4 +1,4 @@ -# Convert CRNN* Models to the Intermediate Representation (IR) {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_CRNN_From_Tensorflow} +# Convert TensorFlow CRNN Model {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_CRNN_From_Tensorflow} This tutorial explains how to convert a CRNN model to Intermediate Representation (IR). diff --git a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_DeepSpeech_From_Tensorflow.md b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_DeepSpeech_From_Tensorflow.md index ad0ffbbb9fd..d5d631eaee6 100644 --- a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_DeepSpeech_From_Tensorflow.md +++ b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_DeepSpeech_From_Tensorflow.md @@ -1,4 +1,4 @@ -# Convert TensorFlow* DeepSpeech Model to the Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_DeepSpeech_From_Tensorflow} +# Convert TensorFlow DeepSpeech Model {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_DeepSpeech_From_Tensorflow} [DeepSpeech project](https://github.com/mozilla/DeepSpeech) provides an engine to train speech-to-text models. @@ -9,7 +9,7 @@ Create a directory where model and metagraph with pretrained weights will be sto mkdir deepspeech cd deepspeech ``` -[Pretrained English speech-to-text model](https://github.com/mozilla/DeepSpeech/releases/tag/v0.8.2) is publicly available. +[Pretrained English speech-to-text model](https://github.com/mozilla/DeepSpeech/releases/tag/v0.8.2) is publicly available. To download the model, follow the instruction below: * For UNIX*-like systems, run the following command: @@ -24,7 +24,7 @@ wget -O - https://github.com/mozilla/DeepSpeech/releases/download/v0.8.2/deepspe ## Freeze the Model into a *.pb File -After unpacking the archives above, you have to freeze the model. Note that this requires +After unpacking the archives above, you have to freeze the model. Note that this requires TensorFlow* version 1 which is not available under Python 3.8, so you need Python 3.7 or lower. Before freezing, deploy a virtual environment and install the required packages: ``` @@ -37,29 +37,29 @@ Freeze the model with the following command: ``` python3 DeepSpeech.py --checkpoint_dir ../deepspeech-0.8.2-checkpoint --export_dir ../ ``` -After that, you will get the pretrained frozen model file `output_graph.pb` in the directory `deepspeech` created at -the beginning. The model contains the preprocessing and main parts. The first preprocessing part performs conversion of input -spectrogram into a form useful for speech recognition (mel). This part of the model is not convertible into +After that, you will get the pretrained frozen model file `output_graph.pb` in the directory `deepspeech` created at +the beginning. The model contains the preprocessing and main parts. The first preprocessing part performs conversion of input +spectrogram into a form useful for speech recognition (mel). This part of the model is not convertible into IR because it contains unsupported operations `AudioSpectrogram` and `Mfcc`. -The main and most computationally expensive part of the model converts the preprocessed audio into text. -There are two specificities with the supported part of the model. +The main and most computationally expensive part of the model converts the preprocessed audio into text. +There are two specificities with the supported part of the model. -The first is that the model contains an input with sequence length. So the model can be converted with -a fixed input length shape, thus the model is not reshapeable. +The first is that the model contains an input with sequence length. So the model can be converted with +a fixed input length shape, thus the model is not reshapeable. Refer to the [Using Shape Inference](../../../../OV_Runtime_UG/ShapeInference.md). -The second is that the frozen model still has two variables: `previous_state_c` and `previous_state_h`, figure -with the frozen *.pb model is below. It means that the model keeps training these variables at each inference. +The second is that the frozen model still has two variables: `previous_state_c` and `previous_state_h`, figure +with the frozen *.pb model is below. It means that the model keeps training these variables at each inference. ![DeepSpeech model view](../../../img/DeepSpeech-0.8.2.png) -At the first inference the variables are initialized with zero tensors. After executing, the results of the `BlockLSTM` +At the first inference the variables are initialized with zero tensors. After executing, the results of the `BlockLSTM` are assigned to cell state and hidden state, which are these two variables. ## Convert the Main Part of DeepSpeech Model into IR -Model Optimizer assumes that the output model is for inference only. That is why you should cut `previous_state_c` +Model Optimizer assumes that the output model is for inference only. That is why you should cut `previous_state_c` and `previous_state_h` variables off and resolve keeping cell and hidden states on the application level. There are certain limitations for the model conversion: @@ -75,7 +75,7 @@ mo \ ``` Where: -* `input_lengths->[16]` Replaces the input node with name "input_lengths" with a constant tensor of shape [1] with a +* `input_lengths->[16]` Replaces the input node with name "input_lengths" with a constant tensor of shape [1] with a single integer value 16. This means that the model now can consume input sequences of length 16 only. * `input_node[1 16 19 26],previous_state_h[1 2048],previous_state_c[1 2048]` replaces the variables with a placeholder. * `--output ".../GatherNd_1,.../GatherNd,logits" ` output node names. diff --git a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_EfficientDet_Models.md b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_EfficientDet_Models.md index 4ec00886f59..a0e6c2b6cf9 100644 --- a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_EfficientDet_Models.md +++ b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_EfficientDet_Models.md @@ -1,11 +1,11 @@ -# Converting EfficientDet Models from TensorFlow {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_EfficientDet_Models} +# Convert TensorFlow EfficientDet Models {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_EfficientDet_Models} -This tutorial explains how to convert EfficientDet\* public object detection models to the Intermediate Representation (IR). +This tutorial explains how to convert EfficientDet\* public object detection models to the Intermediate Representation (IR). ## Convert EfficientDet Model to IR -On GitHub*, you can find several public versions of EfficientDet model implementation. This tutorial explains how to -convert models from the [https://github.com/google/automl/tree/master/efficientdet](https://github.com/google/automl/tree/master/efficientdet) +On GitHub*, you can find several public versions of EfficientDet model implementation. This tutorial explains how to +convert models from the [https://github.com/google/automl/tree/master/efficientdet](https://github.com/google/automl/tree/master/efficientdet) repository (commit 96e1fee) to IR. ### Get Frozen TensorFlow\* Model @@ -60,15 +60,15 @@ dictionary in the [hparams_config.py](https://github.com/google/automl/blob/96e1 The attribute `image_size` specifies the shape to be specified for the model conversion. The `transformations_config` command line parameter specifies the configuration json file containing hints -to the Model Optimizer on how to convert the model and trigger transformations implemented in the +to the Model Optimizer on how to convert the model and trigger transformations implemented in the `/openvino/tools/mo/front/tf/AutomlEfficientDet.py`. The json file contains some parameters which must be changed if you train the model yourself and modified the `hparams_config` file or the parameters are different from the ones used for EfficientDet-D4. The attribute names are self-explanatory or match the name in the `hparams_config` file. > **NOTE**: The color channel order (RGB or BGR) of an input data should match the channel order of the model training dataset. If they are different, perform the `RGB<->BGR` conversion specifying the command-line parameter: `--reverse_input_channels`. Otherwise, inference results may be incorrect. For more information about the parameter, refer to **When to Reverse Input Channels** section of [Converting a Model to Intermediate Representation (IR)](../Converting_Model.md). -OpenVINO™ toolkit provides samples that can be used to infer EfficientDet model. For more information, refer to -[Open Model Zoo Demos](@ref omz_demos) and +OpenVINO™ toolkit provides samples that can be used to infer EfficientDet model. For more information, refer to +[Open Model Zoo Demos](@ref omz_demos) and ## Interpreting Results of the TensorFlow Model and the IR @@ -90,9 +90,4 @@ The output of the IR is a list of 7-element tuples: `[image_id, class_id, confid * `x_max` -- normalized `x` coordinate of the upper right corner of the detected object. * `y_max` -- normalized `y` coordinate of the upper right corner of the detected object. -The first element with `image_id = -1` means end of data. - ---- -## See Also - -* [Sub-Graph Replacement in Model Optimizer](../../customize_model_optimizer/Subgraph_Replacement_Model_Optimizer.md) +The first element with `image_id = -1` means end of data. \ No newline at end of file diff --git a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_FaceNet_From_Tensorflow.md b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_FaceNet_From_Tensorflow.md index 6f2ec6595c5..931b1d9aa0a 100644 --- a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_FaceNet_From_Tensorflow.md +++ b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_FaceNet_From_Tensorflow.md @@ -1,4 +1,4 @@ -# Convert TensorFlow* FaceNet Models to Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_FaceNet_From_Tensorflow} +# Convert TensorFlow FaceNet Models {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_FaceNet_From_Tensorflow} [Public pre-trained FaceNet models](https://github.com/davidsandberg/facenet#pre-trained-models) contain both training and inference part of graph. Switch between this two states is manageable with placeholder value. diff --git a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_GNMT_From_Tensorflow.md b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_GNMT_From_Tensorflow.md index 236dd4aac90..a50f2dcacce 100644 --- a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_GNMT_From_Tensorflow.md +++ b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_GNMT_From_Tensorflow.md @@ -1,4 +1,4 @@ -# Convert GNMT* Model to the Intermediate Representation (IR) {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_GNMT_From_Tensorflow} +# Convert TensorFlow GNMT Model {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_GNMT_From_Tensorflow} This tutorial explains how to convert Google\* Neural Machine Translation (GNMT) model to the Intermediate Representation (IR). @@ -17,20 +17,20 @@ index 2cbef07..e185490 100644 +++ b/nmt/inference.py @@ -17,9 +17,11 @@ from __future__ import print_function - + import codecs +import os import time - + import tensorflow as tf +from tensorflow.python.framework import graph_io - + from . import attention_model from . import gnmt_model @@ -105,6 +107,29 @@ def start_sess_and_load_model(infer_model, ckpt_path): return sess, loaded_infer_model - - + + +def inference_dump_graph(ckpt_path, path_to_dump, hparams, scope=None): + model_creator = get_model_creator(hparams) + infer_model = model_helper.create_infer_model(model_creator, hparams, scope) @@ -64,7 +64,7 @@ index f5823d8..a733748 100644 @@ -310,6 +310,13 @@ def add_arguments(parser): parser.add_argument("--num_intra_threads", type=int, default=0, help="number of intra_op_parallelism_threads") - + + # Special argument for inference model dumping without inference + parser.add_argument("--dump_inference_model", type="bool", nargs="?", + const=True, default=False, @@ -72,7 +72,7 @@ index f5823d8..a733748 100644 + + parser.add_argument("--path_to_dump", type=str, default="", + help="Path to dump inference graph.") - + def create_hparams(flags): """Create training hparams.""" @@ -396,6 +403,9 @@ def create_hparams(flags): @@ -83,12 +83,12 @@ index f5823d8..a733748 100644 + dump_inference_model=flags.dump_inference_model, + path_to_dump=flags.path_to_dump, ) - - + + @@ -613,7 +623,7 @@ def create_or_load_hparams( return hparams - - + + -def run_main(flags, default_hparams, train_fn, inference_fn, target_session=""): +def run_main(flags, default_hparams, train_fn, inference_fn, inference_dump, target_session=""): """Run main.""" @@ -97,7 +97,7 @@ index f5823d8..a733748 100644 @@ -653,8 +663,26 @@ def run_main(flags, default_hparams, train_fn, inference_fn, target_session=""): out_dir, default_hparams, flags.hparams_path, save_hparams=(jobid == 0)) - + - ## Train / Decode - if flags.inference_input_file: + # Dumping inference model @@ -130,8 +130,8 @@ index f5823d8..a733748 100644 - run_main(FLAGS, default_hparams, train_fn, inference_fn) + inference_dump = inference.inference_dump_graph + run_main(FLAGS, default_hparams, train_fn, inference_fn, inference_dump) - - + + if __name__ == "__main__": ``` @@ -224,7 +224,7 @@ For more information about model cutting, refer to [Cutting Off Parts of a Model Inputs of the model: * `IteratorGetNext/placeholder_out_port_0` input with shape `[batch_size, max_sequence_length]` contains `batch_size` decoded input sentences. Every sentence is decoded the same way as indices of sentence elements in vocabulary and padded with index of `eos` (end of sentence symbol). If the length of the sentence is less than `max_sequence_length`, remaining elements are filled with index of `eos` token. - + * `IteratorGetNext/placeholder_out_port_1` input with shape `[batch_size]` contains sequence lengths for every sentence from the first input. \ For example, if `max_sequence_length = 50`, `batch_size = 1` and the sentence has only 30 elements, then the input tensor for `IteratorGetNext/placeholder_out_port_1` should be `[30]`. @@ -244,7 +244,7 @@ python3 benchmark_app.py -m -d CPU ``` -2. With Inference Engine Python API: +2. With OpenVINO Runtime Python API: > **NOTE**: Before running the example, insert a path to your GNMT `.xml` and `.bin` files into `MODEL_PATH` and `WEIGHTS_PATH`, and fill `input_data_tensor` and `seq_lengths` tensors according to your input data. @@ -274,4 +274,4 @@ exec_net = ie.load_network(network=net, device_name="CPU") result_ie = exec_net.infer(input_data) ``` -For more information about Python API, refer to [Inference Engine Python API](ie_python_api/api.html). +For more information about Python API, refer to [OpenVINO Runtime Python API](ie_python_api/api.html). diff --git a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_NCF_From_Tensorflow.md b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_NCF_From_Tensorflow.md index 7fd53b5effa..97d3c3556c1 100644 --- a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_NCF_From_Tensorflow.md +++ b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_NCF_From_Tensorflow.md @@ -1,12 +1,12 @@ -# Convert Neural Collaborative Filtering Model from TensorFlow* to the Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_NCF_From_Tensorflow} +# Convert TensorFlow Neural Collaborative Filtering Model {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_NCF_From_Tensorflow} This tutorial explains how to convert Neural Collaborative Filtering (NCF) model to Intermediate Representation (IR). [Public TensorFlow NCF model](https://github.com/tensorflow/models/tree/master/official/recommendation) does not contain pre-trained weights. To convert this model to the IR: - 1. Use [the instructions](https://github.com/tensorflow/models/tree/master/official/recommendation#train-and-evaluate-model) from this repository to train the model. - 2. Freeze the inference graph you get on previous step in `model_dir` following -the instructions from the Freezing Custom Models in Python* section of -[Converting a TensorFlow* Model](../Convert_Model_From_TensorFlow.md). + 1. Use [the instructions](https://github.com/tensorflow/models/tree/master/official/recommendation#train-and-evaluate-model) from this repository to train the model. + 2. Freeze the inference graph you get on previous step in `model_dir` following +the instructions from the Freezing Custom Models in Python* section of +[Converting a TensorFlow* Model](../Convert_Model_From_TensorFlow.md). Run the following commands: ```python import tensorflow as tf @@ -22,12 +22,12 @@ graph_io.write_graph(frozen, './', 'inference_graph.pb', as_text=False) ``` where `rating/BiasAdd` is an output node. - 3. Convert the model to the IR.If you look at your frozen model, you can see that + 3. Convert the model to the IR.If you look at your frozen model, you can see that it has one input that is split into four `ResourceGather` layers. (Click image to zoom in.) ![NCF model beginning](../../../img/NCF_start.png) - But as the Model Optimizer does not support such data feeding, you should skip it. Cut + But as the Model Optimizer does not support such data feeding, you should skip it. Cut the edges incoming in `ResourceGather`s port 1: ```sh mo --input_model inference_graph.pb \ diff --git a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_Object_Detection_API_Models.md b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_Object_Detection_API_Models.md index 56324c1facb..0662fff1663 100644 --- a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_Object_Detection_API_Models.md +++ b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_Object_Detection_API_Models.md @@ -1,9 +1,9 @@ -# Converting TensorFlow* Object Detection API Models {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_Object_Detection_API_Models} +# Convert TensorFlow Object Detection API Models {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_Object_Detection_API_Models} > **NOTES**: > * Starting with the 2022.1 release, the Model Optimizer can convert the TensorFlow\* Object Detection API Faster and Mask RCNNs topologies differently. By default, the Model Optimizer adds operation "Proposal" to the generated IR. This operation needs an additional input to the model with name "image_info" which should be fed with several values describing the pre-processing applied to the input image (refer to the [Proposal](../../../../ops/detection/Proposal_4.md) operation specification for more information). However, this input is redundant for the models trained and inferred with equal size images. Model Optimizer can generate IR for such models and insert operation [DetectionOutput](../../../../ops/detection/DetectionOutput_1.md) instead of `Proposal`. The `DetectionOutput` operation does not require additional model input "image_info" and moreover, for some models the produced inference results are closer to the original TensorFlow\* model. In order to trigger new behaviour the attribute "operation_to_add" in the corresponding JSON transformation configuration file should be set to value "DetectionOutput" instead of default one "Proposal". -> * Starting with the 2021.1 release, the Model Optimizer converts the TensorFlow\* Object Detection API SSDs, Faster and Mask RCNNs topologies keeping shape-calculating sub-graphs by default, so topologies can be re-shaped in the Inference Engine using dedicated reshape API. Refer to [Using Shape Inference](../../../../OV_Runtime_UG/ShapeInference.md) for more information on how to use this feature. It is possible to change the both spatial dimensions of the input image and batch size. -> * To generate IRs for TF 1 SSD topologies, the Model Optimizer creates a number of `PriorBoxClustered` operations instead of a constant node with prior boxes calculated for the particular input image size. This change allows you to reshape the topology in the Inference Engine using dedicated Inference Engine API. The reshaping is supported for all SSD topologies except FPNs which contain hardcoded shapes for some operations preventing from changing topology input shape. +> * Starting with the 2021.1 release, the Model Optimizer converts the TensorFlow\* Object Detection API SSDs, Faster and Mask RCNNs topologies keeping shape-calculating sub-graphs by default, so topologies can be re-shaped in the OpenVINO Runtime using dedicated reshape API. Refer to [Using Shape Inference](../../../../OV_Runtime_UG/ShapeInference.md) for more information on how to use this feature. It is possible to change the both spatial dimensions of the input image and batch size. +> * To generate IRs for TF 1 SSD topologies, the Model Optimizer creates a number of `PriorBoxClustered` operations instead of a constant node with prior boxes calculated for the particular input image size. This change allows you to reshape the topology in the OpenVINO Runtime using dedicated API. The reshaping is supported for all SSD topologies except FPNs which contain hardcoded shapes for some operations preventing from changing topology input shape. ## How to Convert a Model @@ -45,7 +45,7 @@ To convert a TensorFlow\* Object Detection API model, go to the `/t * `--tensorflow_object_detection_api_pipeline_config ` --- A special configuration file that describes the topology hyper-parameters and structure of the TensorFlow Object Detection API model. For the models downloaded from the TensorFlow\* Object Detection API zoo, the configuration file is named `pipeline.config`. If you plan to train a model yourself, you can find templates for these files in the [models repository](https://github.com/tensorflow/models/tree/master/research/object_detection/samples/configs). * `--input_shape` (optional) --- A custom input image shape. Refer to [Custom Input Shape](#tf_od_custom_input_shape) for more information how the `--input_shape` parameter is handled for the TensorFlow* Object Detection API models. -> **NOTE**: The color channel order (RGB or BGR) of an input data should match the channel order of the model training dataset. If they are different, perform the `RGB<->BGR` conversion specifying the command-line parameter: `--reverse_input_channels`. Otherwise, inference results may be incorrect. If you convert a TensorFlow\* Object Detection API model to use with the Inference Engine sample applications, you must specify the `--reverse_input_channels` parameter. For more information about the parameter, refer to **When to Reverse Input Channels** section of [Converting a Model to Intermediate Representation (IR)](../Converting_Model.md). +> **NOTE**: The color channel order (RGB or BGR) of an input data should match the channel order of the model training dataset. If they are different, perform the `RGB<->BGR` conversion specifying the command-line parameter: `--reverse_input_channels`. Otherwise, inference results may be incorrect. If you convert a TensorFlow\* Object Detection API model to use with the OpenVINO sample applications, you must specify the `--reverse_input_channels` parameter. For more information about the parameter, refer to **When to Reverse Input Channels** section of [Converting a Model to Intermediate Representation (IR)](../Converting_Model.md). Additionally to the mandatory parameters listed above you can use optional conversion parameters if needed. A full list of parameters is available in the [Converting a TensorFlow* Model](../Convert_Model_From_TensorFlow.md) topic. @@ -57,24 +57,24 @@ mo --input_model=/tmp/ssd_inception_v2_coco_2018_01_28/frozen_inference_graph.pb ## OpenVINO&; Toolkit Samples and Open Model Zoo Demos -Inference Engine comes with a number of samples to demonstrate use of OpenVINO API, additionally, +OpenVINO comes with a number of samples to demonstrate use of OpenVINO Runtime API, additionally, Open Model Zoo provides set of demo applications to show implementation of close to real life applications based on deep learning in various tasks, including Image Classifiacton, Visual Object Detection, Text Recognition, Speech Recognition, Natural Language Processing and others. Refer to the links below for more details. -* [Inference Engine Samples](../../../../OV_Runtime_UG/Samples_Overview.md) +* [OpenVINO Samples](../../../../OV_Runtime_UG/Samples_Overview.md) * [Open Model Zoo Demos](@ref omz_demos) ## Important Notes About Feeding Input Images to the Samples There are several important notes about feeding input images to the samples: -1. Inference Engine samples stretch input image to the size of the input operation without preserving aspect ratio. This behavior is usually correct for most topologies (including SSDs), but incorrect for other models like Faster R-CNN, Mask R-CNN and R-FCN. These models usually use keeps aspect ratio resizer. The type of pre-processing is defined in the pipeline configuration file in the section `image_resizer`. If keeping aspect ratio is used, then it is necessary to resize image before passing it to the sample and optionally pad the resized image with 0s (if the attribute "pad_to_max_dimension" in the pipeline.config is equal to "true"). +1. OpenVINO samples stretch input image to the size of the input operation without preserving aspect ratio. This behavior is usually correct for most topologies (including SSDs), but incorrect for other models like Faster R-CNN, Mask R-CNN and R-FCN. These models usually use keeps aspect ratio resizer. The type of pre-processing is defined in the pipeline configuration file in the section `image_resizer`. If keeping aspect ratio is used, then it is necessary to resize image before passing it to the sample and optionally pad the resized image with 0s (if the attribute "pad_to_max_dimension" in the pipeline.config is equal to "true"). -2. TensorFlow\* implementation of image resize may be different from the one implemented in the sample. Even reading input image from compressed format (like `.jpg`) could give different results in the sample and TensorFlow\*. So, if it is necessary to compare accuracy between the TensorFlow\* and the Inference Engine it is recommended to pass pre-resized input image in a non-compressed format (like `.bmp`). +2. TensorFlow\* implementation of image resize may be different from the one implemented in the sample. Even reading input image from compressed format (like `.jpg`) could give different results in the sample and TensorFlow\*. So, if it is necessary to compare accuracy between the TensorFlow\* and the OpenVINO it is recommended to pass pre-resized input image in a non-compressed format (like `.bmp`). -3. If you want to infer the model with the Inference Engine samples, convert the model specifying the `--reverse_input_channels` command line parameter. The samples load images in BGR channels order, while TensorFlow* models were trained with images in RGB order. When the `--reverse_input_channels` command line parameter is specified, the Model Optimizer performs first convolution or other channel dependent operation weights modification so the output will be like the image is passed with RGB channels order. +3. If you want to infer the model with the OpenVINO samples, convert the model specifying the `--reverse_input_channels` command line parameter. The samples load images in BGR channels order, while TensorFlow* models were trained with images in RGB order. When the `--reverse_input_channels` command line parameter is specified, the Model Optimizer performs first convolution or other channel dependent operation weights modification so the output will be like the image is passed with RGB channels order. 4. Read carefully messaged printed by the Model Optimizer during a model conversion. They contain important instructions on how to prepare input data before running the inference and how to interpret the output. @@ -128,7 +128,7 @@ Models with `keep_aspect_ratio_resizer` were trained to recognize object in real ## Detailed Explanations of Model Conversion Process -This section is intended for users who want to understand how the Model Optimizer performs Object Detection API models conversion in details. The knowledge given in this section is also useful for users having complex models that are not converted with the Model Optimizer out of the box. It is highly recommended to read [Sub-Graph Replacement in Model Optimizer](../../customize_model_optimizer/Subgraph_Replacement_Model_Optimizer.md) chapter first to understand sub-graph replacement concepts which are used here. +This section is intended for users who want to understand how the Model Optimizer performs Object Detection API models conversion in details. The knowledge given in this section is also useful for users having complex models that are not converted with the Model Optimizer out of the box. It is highly recommended to read the **Graph Transformation Extensions** section in the [Model Optimizer Extensibility](../../customize_model_optimizer/Customize_Model_Optimizer.md) documentation first to understand sub-graph replacement concepts which are used here. It is also important to open the model in the [TensorBoard](https://www.tensorflow.org/guide/summaries_and_tensorboard) to see the topology structure. Model Optimizer can create an event file that can be then fed to the TensorBoard* tool. Run the Model Optimizer with providing two command line parameters: * `--input_model ` --- Path to the frozen model diff --git a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_RetinaNet_From_Tensorflow.md b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_RetinaNet_From_Tensorflow.md index 29eac09e444..510ff2f5862 100644 --- a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_RetinaNet_From_Tensorflow.md +++ b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_RetinaNet_From_Tensorflow.md @@ -1,8 +1,8 @@ -# Converting RetinaNet Model from TensorFlow* to the Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_RetinaNet_From_Tensorflow} +# Converting TensorFlow RetinaNet Model {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_RetinaNet_From_Tensorflow} This tutorial explains how to convert RetinaNet model to the Intermediate Representation (IR). -[Public RetinaNet model](https://github.com/fizyr/keras-retinanet) does not contain pretrained TensorFlow\* weights. +[Public RetinaNet model](https://github.com/fizyr/keras-retinanet) does not contain pretrained TensorFlow\* weights. To convert this model to the TensorFlow\* format, you can use [Reproduce Keras* to TensorFlow* Conversion tutorial](https://docs.openvino.ai/latest/omz_models_model_retinanet_tf.html). After you convert the model to TensorFlow* format, run the Model Optimizer command below: diff --git a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_Slim_Library_Models.md b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_Slim_Library_Models.md index 518fe816893..388962115b8 100644 --- a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_Slim_Library_Models.md +++ b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_Slim_Library_Models.md @@ -1,13 +1,13 @@ -# Converting TensorFlow*-Slim Image Classification Model Library Models {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_Slim_Library_Models} +# Convert TensorFlow Slim Image Classification Model Library Models {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_Slim_Library_Models} -TensorFlow\*-Slim Image Classification Model Library is a library to define, train and evaluate classification models in TensorFlow\*. The library contains Python scripts defining the classification topologies together with checkpoint files for several pre-trained classification topologies. To convert a TensorFlow\*-Slim library model, complete the following steps: +TensorFlow\*-Slim Image Classification Model Library is a library to define, train and evaluate classification models in TensorFlow\*. The library contains Python scripts defining the classification topologies together with checkpoint files for several pre-trained classification topologies. To convert a TensorFlow\*-Slim library model, complete the following steps: 1. Download the TensorFlow\*-Slim models [git repository](https://github.com/tensorflow/models). 2. Download the pre-trained model [checkpoint](https://github.com/tensorflow/models/tree/master/research/slim#pre-trained-models). 3. Export the inference graph. 4. Convert the model using the Model Optimizer. -The [Example of an Inception V1 Model Conversion](#example_of_an_inception_v1_model_conversion) section below illustrates the process of converting an Inception V1 Model. +The [Example of an Inception V1 Model Conversion](#example_of_an_inception_v1_model_conversion) section below illustrates the process of converting an Inception V1 Model. ## Example of an Inception V1 Model Conversion This example demonstrates how to convert the model on Linux\* OSes, but it could be easily adopted for the Windows\* OSes. @@ -39,7 +39,7 @@ python3 tf_models/research/slim/export_inference_graph.py \ ``` Model Optimizer comes with the summarize graph utility, which identifies graph input and output nodes. Run the utility to determine input/output nodes of the Inception V1 model: - + ```sh python3 /openvino/tools/mo/utils/summarize_graph.py --input_model ./inception_v1_inference_graph.pb ``` @@ -64,9 +64,9 @@ The `-b` command line parameter is required because the Model Optimizer cannot c Refer to the [Mean and Scale Values for TensorFlow\*-Slim Models](#tf_slim_mean_scale_values) for the information why `--mean_values` and `--scale` command line parameters are used. ## Mean and Scale Values for TensorFlow\*-Slim Models -The TensorFlow\*-Slim Models were trained with normalized input data. There are several different normalization algorithms used in the Slim library. Inference Engine classification sample does not perform image pre-processing except resizing to the input layer size. It is necessary to pass mean and scale values to the Model Optimizer so they are embedded into the generated IR in order to get correct classification results. +The TensorFlow\*-Slim Models were trained with normalized input data. There are several different normalization algorithms used in the Slim library. OpenVINO classification sample does not perform image pre-processing except resizing to the input layer size. It is necessary to pass mean and scale values to the Model Optimizer so they are embedded into the generated IR in order to get correct classification results. -The file [preprocessing_factory.py](https://github.com/tensorflow/models/blob/master/research/slim/preprocessing/preprocessing_factory.py) contains a dictionary variable `preprocessing_fn_map` defining mapping between the model type and pre-processing function to be used. The function code should be analyzed to figure out the mean/scale values. +The file [preprocessing_factory.py](https://github.com/tensorflow/models/blob/master/research/slim/preprocessing/preprocessing_factory.py) contains a dictionary variable `preprocessing_fn_map` defining mapping between the model type and pre-processing function to be used. The function code should be analyzed to figure out the mean/scale values. The [inception_preprocessing.py](https://github.com/tensorflow/models/blob/master/research/slim/preprocessing/inception_preprocessing.py) file defines the pre-processing function for the Inception models. The `preprocess_for_eval` function contains the following code: @@ -83,7 +83,7 @@ The [inception_preprocessing.py](https://github.com/tensorflow/models/blob/maste Firstly, the `image` is converted to data type `tf.float32` and the values in the tensor are scaled to the `[0, 1]` range using the [tf.image.convert_image_dtype](https://www.tensorflow.org/api_docs/python/tf/image/convert_image_dtype) function. Then the `0.5` is subtracted from the image values and values multiplied by `2.0`. The final image range of values is `[-1, 1]`. -Inference Engine classification sample reads an input image as a three-dimensional array of integer values from the range `[0, 255]`. In order to scale them to `[-1, 1]` range, the mean value `127.5` for each image channel should be specified as well as scale factor `127.5`. +OpenVINO classification sample reads an input image as a three-dimensional array of integer values from the range `[0, 255]`. In order to scale them to `[-1, 1]` range, the mean value `127.5` for each image channel should be specified as well as scale factor `127.5`. Similarly, the mean/scale values can be determined for other Slim models. diff --git a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_WideAndDeep_Family_Models.md b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_WideAndDeep_Family_Models.md index 9ca2cfd64df..eac98293e71 100644 --- a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_WideAndDeep_Family_Models.md +++ b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_WideAndDeep_Family_Models.md @@ -1,4 +1,4 @@ -# Converting TensorFlow* Wide and Deep Family Models to the Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_WideAndDeep_Family_Models} +# Convert TensorFlow Wide and Deep Family Models {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_WideAndDeep_Family_Models} The Wide and Deep models is a combination of wide and deep parts for memorization and generalization of object features respectively. These models can contain different types of object features such as numerical, categorical, sparse and sequential features. These feature types are specified @@ -24,7 +24,7 @@ The Wide and Deep model is no longer in the master branch of the repository but **Step 2**. Train the model -As the OpenVINO™ toolkit does not support the categorical with hash and crossed features, such feature types must be switched off in the model +As the OpenVINO™ toolkit does not support the categorical with hash and crossed features, such feature types must be switched off in the model by changing the `build_model_columns()` function in `census_dataset.py` as follows: ```python @@ -61,7 +61,7 @@ def build_model_columns(): age, boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65]) # Wide columns and deep columns. base_columns = [ - education, marital_status, relationship, workclass, + education, marital_status, relationship, workclass, age_buckets, ] crossed_columns = [] @@ -92,7 +92,7 @@ python census_main.py Use the following command line to convert the saved model file with the checkpoint: ```sh - mo + mo --input_checkpoint checkpoint --input_meta_graph model.ckpt.meta --input "IteratorGetNext:0[2], IteratorGetNext:1[2], @@ -122,7 +122,7 @@ Use the following command line to convert the saved model file with the checkpoi dnn/input_from_feature_columns/input_layer/relationship_indicator/to_sparse_input/dense_shape:0[2]{i64}->[2 50], dnn/input_from_feature_columns/input_layer/workclass_indicator/to_sparse_input/indices:0[10 2]{i64}, dnn/input_from_feature_columns/input_layer/workclass_indicator/hash_table_Lookup/LookupTableFindV2:0[10]{i64}, - dnn/input_from_feature_columns/input_layer/workclass_indicator/to_sparse_input/dense_shape:0[2]{i64}->[2 50]" + dnn/input_from_feature_columns/input_layer/workclass_indicator/to_sparse_input/dense_shape:0[2]{i64}->[2 50]" --output head/predictions/probabilities ``` diff --git a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_XLNet_From_Tensorflow.md b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_XLNet_From_Tensorflow.md index f87e8385dae..ac68d55644f 100644 --- a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_XLNet_From_Tensorflow.md +++ b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_XLNet_From_Tensorflow.md @@ -1,4 +1,4 @@ -# Convert TensorFlow* XLNet Model to the Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_XLNet_From_Tensorflow} +# Convert TensorFlow XLNet Model {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_XLNet_From_Tensorflow} Pre-trained models for XLNet (Bidirectional Encoder Representations from Transformers) are [publicly available](https://github.com/zihangdai/xlnet). @@ -16,8 +16,8 @@ Download and unzip an archive with the [XLNet-Base, Cased](https://storage.googl After the archive is unzipped, the directory `cased_L-12_H-768_A-12` is created and contains the following files: * TensorFlow checkpoint (`xlnet_model.ckpt`) containing the pre-trained weights (which is actually 3 files) -* sentence piece model (`spiece.model`) used for (de)tokenization -* config file (`xlnet_config.json`) which specifies the hyperparameters of the model +* sentence piece model (`spiece.model`) used for (de)tokenization +* config file (`xlnet_config.json`) which specifies the hyperparameters of the model To get pb-file from the archive contents, you need to do the following. @@ -33,7 +33,7 @@ To get pb-file from the archive contents, you need to do the following. mkdir try_save ``` - + 2. Save and run the following Python script in `~/XLNet-Base/xlnet`: @@ -102,8 +102,8 @@ Download and unzip an archive with the [XLNet-Large, Cased](https://storage.goog After the archive is unzipped, the directory `cased_L-12_H-1024_A-16` is created and contains the following files: * TensorFlow checkpoint (`xlnet_model.ckpt`) containing the pre-trained weights (which is actually 3 files) -* sentence piece model (`spiece.model`) used for (de)tokenization -* config file (`xlnet_config.json`) which specifies the hyperparameters of the model +* sentence piece model (`spiece.model`) used for (de)tokenization +* config file (`xlnet_config.json`) which specifies the hyperparameters of the model To get pb-file from the archive contents, you need to do the following. diff --git a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_YOLO_From_Tensorflow.md b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_YOLO_From_Tensorflow.md index 7abe6bc74f9..395745c26a9 100644 --- a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_YOLO_From_Tensorflow.md +++ b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_YOLO_From_Tensorflow.md @@ -1,7 +1,7 @@ -# Converting YOLO* Models to the Intermediate Representation (IR) {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_YOLO_From_Tensorflow} +# Convert TensorFlow YOLO Models {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_YOLO_From_Tensorflow} This document explains how to convert real-time object detection YOLOv1\*, YOLOv2\*, YOLOv3\* and YOLOv4\* public models to the Intermediate Representation (IR). All YOLO\* models are originally implemented in the DarkNet\* framework and consist of two files: -* `.cfg` file with model configurations +* `.cfg` file with model configurations * `.weights` file with model weights Depending on a YOLO model version, the Model Optimizer converts it differently: @@ -35,9 +35,9 @@ python keras-YOLOv3-model-set/tools/model_converter/convert.py **NOTE:** Before you run the conversion, make sure you have installed all the Model Optimizer dependencies for TensorFlow 2. +> **NOTE**: Before you run the conversion, make sure you have installed all the Model Optimizer dependencies for TensorFlow 2. ```sh -mo --saved_model_dir yolov4 --output_dir models/IRs --input_shape [1,608,608,3] --model_name yolov4 +mo --saved_model_dir yolov4 --output_dir models/IRs --input_shape [1,608,608,3] --model_name yolov4 ``` ## Convert YOLOv3 Model to IR @@ -115,7 +115,7 @@ It consists of several attributes:
where: - `id` and `match_kind` are parameters that you cannot change. - `custom_attributes` is a parameter that stores all the YOLOv3 specific attributes: - - `classes`, `coords`, `num`, and `masks` are attributes that you should copy from the configuration + - `classes`, `coords`, `num`, and `masks` are attributes that you should copy from the configuration file that was used for model training. If you used DarkNet officially shared weights, you can use `yolov3.cfg` or `yolov3-tiny.cfg` configuration file from https://github.com/david8862/keras-YOLOv3-model-set/tree/master/cfg. Replace the default values in `custom_attributes` with the parameters that follow the `[yolo]` titles in the configuration file. @@ -184,8 +184,8 @@ To convert YOLOv1 or YOLOv2 model to TensorFlow, go to the root directory of the python3 flow --model yolov1.cfg --load yolov1.weights --savepb ``` -- For YOLOv2 with VOC dataset `--labels` argument should be specified and additional changes in the original exporting script are required. -In the file [https://github.com/thtrieu/darkflow/blob/b187c65/darkflow/utils/loader.py#L121](https://github.com/thtrieu/darkflow/blob/b187c65630f9aa1bb8b809c33ec67c8cc5d60124/darkflow/utils/loader.py#L121) +- For YOLOv2 with VOC dataset `--labels` argument should be specified and additional changes in the original exporting script are required. +In the file [https://github.com/thtrieu/darkflow/blob/b187c65/darkflow/utils/loader.py#L121](https://github.com/thtrieu/darkflow/blob/b187c65630f9aa1bb8b809c33ec67c8cc5d60124/darkflow/utils/loader.py#L121) change line 121 from `self.offset = 16` to `self.offset = 20`. Then run: ```sh python3 flow --model yolov2-voc.cfg --load yolov2-voc.weights --labels voc-labels.txt --savepb @@ -204,7 +204,7 @@ File `.pb` is a TensorFlow representation of the YOLO model. #### Convert TensorFlow YOLOv1 or YOLOv2 Model to the IR Converted TensorFlow YOLO model is missing `Region` layer and its parameters. Original YOLO `Region` layer parameters are stored in the configuration `/.cfg` -file under the `[region]` title. +file under the `[region]` title. To recreate the original model structure, use the corresponding yolo `.json` configuration file with custom operations and `Region` layer parameters when converting the model to the IR. This file is located in the `/tools/model_optimizer/extensions/front/tf` directory. @@ -223,7 +223,7 @@ To generate the IR of the YOLOv1 model, provide TensorFlow YOLOv1 or YOLOv2 mode where: * `--batch` defines shape of model input. In the example, `--batch` is equal to 1, but you can also specify other integers larger than 1. -* `--scale` specifies scale factor that input values will be divided by. +* `--scale` specifies scale factor that input values will be divided by. The model was trained with input values in the range `[0,1]`. OpenVINO™ toolkit samples read input images as values in `[0,255]` range, so the scale 255 must be applied. * `--transformations_config` adds missing `Region` layers to the model. In the IR, the `Region` layer has name `RegionYolo`. For other applicable parameters, refer to [Convert Model from TensorFlow](../Convert_Model_From_TensorFlow.md). diff --git a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_lm_1b_From_Tensorflow.md b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_lm_1b_From_Tensorflow.md index 9d26618945f..5bf4b50a8f0 100644 --- a/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_lm_1b_From_Tensorflow.md +++ b/docs/MO_DG/prepare_model/convert_model/tf_specific/Convert_lm_1b_From_Tensorflow.md @@ -1,4 +1,4 @@ -# Converting TensorFlow* Language Model on One Billion Word Benchmark to the Intermediate Representation {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_lm_1b_From_Tensorflow} +# Convert TensorFlow Language Model on One Billion Word Benchmark {#openvino_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_lm_1b_From_Tensorflow} ## Download the Pre-trained Language Model on One Billion Word Benchmark @@ -51,14 +51,14 @@ lm_1b/ ckpt-char-embedding ckpt-lstm ckpt-softmax0 - ckpt-softmax1 - ckpt-softmax2 - ckpt-softmax3 - ckpt-softmax4 - ckpt-softmax5 - ckpt-softmax6 - ckpt-softmax7 - ckpt-softmax8 + ckpt-softmax1 + ckpt-softmax2 + ckpt-softmax3 + ckpt-softmax4 + ckpt-softmax5 + ckpt-softmax6 + ckpt-softmax7 + ckpt-softmax8 ``` diff --git a/docs/MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md b/docs/MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md index d3252704549..ca0f8b1815d 100644 --- a/docs/MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md +++ b/docs/MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md @@ -10,7 +10,7 @@ @endsphinxdirective -Model Optimizer extensibility mechanism enables support of new operations and custom transformations to generate the optimized intermediate representation (IR) as described in the +Model Optimizer extensibility mechanism enables support of new operations and custom transformations to generate the optimized intermediate representation (IR) as described in the [Deep Learning Network Intermediate Representation and Operation Sets in OpenVINO™](../../IR_and_opsets.md). This mechanism is a core part of the Model Optimizer. The Model Optimizer itself uses it under the hood, being a huge set of examples on how to add custom logic to support your model. @@ -144,7 +144,7 @@ OpenVINO™ [TopK](../../../ops/sort/TopK_3.md) operation semantic, which re It is important to mention that sometimes it seems like transformation cannot be implemented during the front phase because the actual values of inputs or shapes are needed. But in fact shapes or values manipulations can be implemented -using operations that are added to the graph. Consider the +using operations that are added to the graph. Consider the `extensions/front/onnx/flattenONNX_to_reshape.py` transformation, which replaces an ONNX\* operation [Flatten](https://github.com/onnx/onnx/blob/master/docs/Operators.md#Flatten) with a sub-graph of operations performing the following (for the case when `axis` is not equal to 0 and 1): @@ -177,9 +177,9 @@ defined as a mathematical expression using the [ShapeOf](../../../ops/shape/Shap Model Optimizer calculates output shapes for all operations in a model to write them to Intermediate Representation files. -> **NOTE**: This is a legacy requirement because starting from IR version 10 Inference Engine needs to know shapes of +> **NOTE**: This is a legacy requirement because starting from IR version 10 OpenVINO Runtime needs to know shapes of > the [Const](../../../ops/infrastructure/Constant_1.md) and the [Parameter](../../../ops/infrastructure/Parameter_1.md) -> operations only. The nGraph component of the Inference Engine calculates output shapes for all operations in a model +> operations only. The OpenVINO Runtime calculates output shapes for all operations in a model > using shapes of [Parameter](../../../ops/infrastructure/Parameter_1.md) and > [Const](../../../ops/infrastructure/Constant_1.md) operations defined with respective operation attributes. @@ -257,11 +257,13 @@ More information on how to develop middle transformations and dedicated API desc [Middle Phase Transformations](#middle-phase-transformations). ### NHWC to NCHW Layout Change -There are several middle transformations responsible for changing model layout from NHWC to NCHW. These transformations -are triggered by default for TensorFlow\* models only because it is the only framework with Convolution operations in -NHWC layout. This layout change is disabled if the model does not have operations that OpenVINO&trade needs to execute in -NCHW layout, for example, Convolutions in NHWC layout. It is still possible to force Model Optimizer to do layout change -using `--disable_nhwc_to_nchw` command-line parameter. + +There are several middle transformations responsible for changing model layout from NHWC to NCHW. These transformations are triggered by default for TensorFlow models as TensorFlow supports Convolution operations in the NHWC layout. + +This layout change is disabled automatically if the model does not have operations that OpenVINO&trade needs to execute in the NCHW layout, for example, Convolutions in NHWC layout. + +It is still possible to force Model Optimizer to do layout change, using `--disable_nhwc_to_nchw` command-line parameter, although it is not advised. + The layout change is a complex problem and detailed explanation of it is out of this document scope. A very brief explanation of this process is provided below: @@ -285,7 +287,7 @@ The back phase starts after the layout change to NCHW. This phase contains mostl 1. Transformations that should work with a graph in the NCHW layout and thus cannot be implemented in the middle phase. -2. Transformations that replace nodes corresponding to internal Model Optimizer operations with nodes corresponding to the +2. Transformations that replace nodes corresponding to internal Model Optimizer operations with nodes corresponding to the [opset](@ref openvino_docs_ops_opset) operations. 3. Transformations that normalize operations inputs according to the specification. 4. Final optimization transformations. @@ -301,7 +303,7 @@ The last phase of a model conversion is the Intermediate Representation emitting steps: 1. Iterates over all operation nodes in the graph and checks that all nodes have the `type` attribute set. This attribute -defines the operation type and is used in the Inference Engine to instantiate proper operation from the +defines the operation type and is used in the OpenVINO to instantiate proper operation from the [opset](@ref openvino_docs_ops_opset) specified in the `version` attribute of the node. If some node does not have attribute `type` or its values is equal to `None`, the Model Optimizer exits with an error. 2. Performs type inference of graph operations similar to the shape inference. Inferred data types are saved to a port @@ -701,7 +703,7 @@ to enable or disable execution of the transformation during a model conversion. 2. Attribute `id` specifies a unique transformation string identifier. This transformation identifier can be used to enable (disable) the transformation by setting environment variable `MO_ENABLED_TRANSFORMS` (`MO_DISABLED_TRANSFORMS`) with a comma separated list of `id`s. The environment variables override the value of the `enabled` attribute of the -transformation. Instead of using `id` attribute value you can add fully defined class name to `MO_ENABLED_TRANSFORMS` +transformation. Instead of using `id` attribute value you can add fully defined class name to `MO_ENABLED_TRANSFORMS` (`MO_DISABLED_TRANSFORMS`) variable, `extensions.back.NonmalizeToNormalizeL2.NormalizeToNormalizeL2` for example. Optional attribute. 3. Attribute `run_not_recursively` specifies whether the transformation should be executed in the sub-graphs, for example, body of the [TensorIterator](../../../ops/infrastructure/TensorIterator_1.md) and @@ -741,8 +743,7 @@ sub-graph of the original graph isomorphic to the specified pattern. 2. [Specific Operation Front Phase Transformations](#specific-operation-front-phase-transformations) triggered for the node with a specific `op` attribute value. 3. [Generic Front Phase Transformations](#generic-front-phase-transformations). -4. Manually enabled transformation defined with a JSON configuration file (for TensorFlow\*, ONNX\* and MXNet\* models -only) specified using the `--transformations_config` command line parameter: +4. Manually enabled transformation defined with a JSON configuration file (for TensorFlow, ONNX, MXNet, and PaddlePaddle models) specified using the `--transformations_config` command line parameter: 1. [Node Name Pattern Front Phase Transformations](#node-name-pattern-front-phase-transformation). 2. [Front Phase Transformations Using Start and End Points](#start-end-points-front-phase-transformations). 3. [Generic Front Phase Transformations Enabled with Transformations Configuration File](#generic-transformations-config-front-phase-transformations). @@ -1260,5 +1261,5 @@ Refer to the `extensions/back/GatherNormalizer.py` for the example of a such typ * [Deep Learning Network Intermediate Representation and Operation Sets in OpenVINO™](../../IR_and_opsets.md) * [Converting a Model to Intermediate Representation (IR)](../convert_model/Converting_Model.md) * [OpenVINO Model Representation](../../../OV_Runtime_UG/model_representation.md) -* [Inference Engine Extensibility Mechanism](../../../OV_Runtime_UG/Extensibility_DG/Intro.md) +* [OpenVINO™ Extensibility Mechanism](../../../Extensibility_UG/Intro.md) * [Extending the Model Optimizer with Caffe* Python Layers](Extending_Model_Optimizer_with_Caffe_Python_Layers.md) diff --git a/docs/MO_DG/prepare_model/customize_model_optimizer/Subgraph_Replacement_Model_Optimizer.md b/docs/MO_DG/prepare_model/customize_model_optimizer/Subgraph_Replacement_Model_Optimizer.md deleted file mode 100644 index 4883d2f3e09..00000000000 --- a/docs/MO_DG/prepare_model/customize_model_optimizer/Subgraph_Replacement_Model_Optimizer.md +++ /dev/null @@ -1,4 +0,0 @@ -# [DEPRECATED] Sub-Graph Replacement in the Model Optimizer {#openvino_docs_MO_DG_prepare_model_customize_model_optimizer_Subgraph_Replacement_Model_Optimizer} - -The document has been deprecated. Refer to the [Model Optimizer Extensibility](Customize_Model_Optimizer.md) -for the up-to-date documentation. diff --git a/docs/OV_Runtime_UG/API_Changes.md b/docs/OV_Runtime_UG/API_Changes.md deleted file mode 100644 index 0490f9228f2..00000000000 --- a/docs/OV_Runtime_UG/API_Changes.md +++ /dev/null @@ -1,10 +0,0 @@ -# OpenVINO™ Runtime API Changes History {#openvino_docs_OV_Runtime_API_Changes} - -The sections below contain detailed list of changes made to the OpenVINO™ Runtime API in recent releases. - -## 2022.1 - -### New API - -* The OpenVINO™ 2.0 API was introduced. - diff --git a/docs/OV_Runtime_UG/Bfloat16Inference.md b/docs/OV_Runtime_UG/Bfloat16Inference.md deleted file mode 100644 index 8b0e8ff6779..00000000000 --- a/docs/OV_Runtime_UG/Bfloat16Inference.md +++ /dev/null @@ -1,214 +0,0 @@ -# Bfloat16 Inference {#openvino_docs_IE_DG_Bfloat16Inference} - -## Bfloat16 Inference Usage (C++) - -@sphinxdirective -.. raw:: html - -
C++
-@endsphinxdirective - -### Disclaimer - -Inference Engine with the bfloat16 inference implemented on CPU must support the native *avx512_bf16* instruction and therefore the bfloat16 data format. It is possible to use bfloat16 inference in simulation mode on platforms with Intel® Advanced Vector Extensions 512 (Intel® AVX-512), but it leads to significant performance degradation in comparison with FP32 or native *avx512_bf16* instruction usage. - -### Introduction -Bfloat16 computations (referred to as BF16) is the Brain Floating-Point format with 16 bits. This is a truncated 16-bit version of the 32-bit IEEE 754 single-precision floating-point format FP32. BF16 preserves 8 exponent bits as FP32 but reduces precision of the sign and mantissa from 24 bits to 8 bits. - -![bf16_format] - -Preserving the exponent bits keeps BF16 to the same range as the FP32 (~1e-38 to ~3e38). This simplifies conversion between two data types: you just need to skip or flush to zero 16 low bits. Truncated mantissa leads to occasionally less precision, but according to [investigations](https://cloud.google.com/blog/products/ai-machine-learning/bfloat16-the-secret-to-high-performance-on-cloud-tpus), neural networks are more sensitive to the size of the exponent than the mantissa size. Also, in lots of models, precision is needed close to zero but not so much at the maximum range. Another useful feature of BF16 is possibility to encode INT8 in BF16 without loss of accuracy, because INT8 range completely fits in BF16 mantissa field. It reduces data flow in conversion from INT8 input image data to BF16 directly without intermediate representation in FP32, or in combination of [INT8 inference](Int8Inference.md) and BF16 layers. - -See the [BFLOAT16 – Hardware Numerics Definition white paper](https://software.intel.com/content/dam/develop/external/us/en/documents/bf16-hardware-numerics-definition-white-paper.pdf) for more bfloat16 format details. - -There are two ways to check if CPU device can support bfloat16 computations for models: - -1. Query the instruction set using one of these system commands: - * `lscpu | grep avx512_bf16` - * `cat /proc/cpuinfo | grep avx512_bf16` -2. Use the [Query API](InferenceEngine_QueryAPI.md) with `METRIC_KEY(OPTIMIZATION_CAPABILITIES)`, which should return `BF16` in the list of CPU optimization options: - -@snippet snippets/Bfloat16Inference0.cpp part0 - -The current Inference Engine solution for bfloat16 inference uses the Intel® Math Kernel Library for Deep Neural Networks (Intel® MKL-DNN) and supports inference of the significant number of layers in BF16 computation mode. - -### Lowering Inference Precision - -Lowering precision to increase performance is [widely used](https://software.intel.com/content/www/us/en/develop/articles/lower-numerical-precision-deep-learning-inference-and-training.html) for optimization of inference. The bfloat16 data type usage on CPU for the first time opens the possibility of default optimization approach. The embodiment of this approach is to use the optimization capabilities of the current platform to achieve maximum performance while maintaining the accuracy of calculations within the acceptable range. - -Using Bfloat16 precision provides the following performance benefits: - -1. Faster multiplication of two BF16 numbers because of shorter mantissa of bfloat16 data. -2. No need to support denormals and handling exceptions as this is a performance optimization. -3. Fast conversion of float32 to bfloat16 and vice versa. -4. Reduced size of data in memory, as a result, larger models fit in the same memory bounds. -5. Reduced amount of data that must be transferred, as a result, reduced data transition time. - -For default optimization on CPU, the source model is converted from FP32 or FP16 to BF16 and executed internally on platforms with native BF16 support. In this case, `KEY_ENFORCE_BF16` is set to `YES` in the `PluginConfigParams` for `GetConfig()`. The code below demonstrates how to check if the key is set: - -@snippet snippets/Bfloat16Inference1.cpp part1 - -To disable BF16 internal transformations in C++ API, set the `KEY_ENFORCE_BF16` to `NO`. In this case, the model infers as is without modifications with precisions that were set on each layer edge. - -@snippet snippets/Bfloat16Inference2.cpp part2 - -To disable BF16 in C API: - -``` -ie_config_t config = { "ENFORCE_BF16", "NO", NULL}; -ie_core_load_network(core, network, device_name, &config, &exe_network); -``` - -An exception with the message `Platform doesn't support BF16 format` is formed in case of setting `KEY_ENFORCE_BF16` to `YES` on CPU without native BF16 support or BF16 simulation mode. - -Low-Precision 8-bit integer models cannot be converted to BF16, even if bfloat16 optimization is set by default. - -### Bfloat16 Simulation Mode - -Bfloat16 simulation mode is available on CPU and Intel® AVX-512 platforms that do not support the native `avx512_bf16` instruction. The simulator does not guarantee good performance. Note that the CPU must still support the AVX-512 extensions. - -To enable the simulation of Bfloat16: -* In the [Benchmark App](../../samples/cpp/benchmark_app/README.md), add the `-enforcebf16=true` option -* In C++ API, set `KEY_ENFORCE_BF16` to `YES` -* In C API: -``` -ie_config_t config = { "ENFORCE_BF16", "YES", NULL}; -ie_core_load_network(core, network, device_name, &config, &exe_network); -``` - -### Performance Counters - -Information about layer precision is stored in the performance counters that are available from the Inference Engine API. The layers have the following marks: - -* Suffix `BF16` for layers that had bfloat16 data type input and were computed in BF16 precision -* Suffix `FP32` for layers computed in 32-bit precision - -For example, the performance counters table for the Inception model can look as follows: - -``` -pool5 EXECUTED layerType: Pooling realTime: 143 cpu: 143 execType: jit_avx512_BF16 -fc6 EXECUTED layerType: FullyConnected realTime: 47723 cpu: 47723 execType: jit_gemm_BF16 -relu6 NOT_RUN layerType: ReLU realTime: 0 cpu: 0 execType: undef -fc7 EXECUTED layerType: FullyConnected realTime: 7558 cpu: 7558 execType: jit_gemm_BF16 -relu7 NOT_RUN layerType: ReLU realTime: 0 cpu: 0 execType: undef -fc8 EXECUTED layerType: FullyConnected realTime: 2193 cpu: 2193 execType: jit_gemm_BF16 -prob EXECUTED layerType: SoftMax realTime: 68 cpu: 68 execType: jit_avx512_FP32 -``` - -The **execType** column of the table includes inference primitives with specific suffixes. - -## Bfloat16 Inference Usage (Python) - -@sphinxdirective -.. raw:: html - -
Python
-@endsphinxdirective - -### Disclaimer - -Inference Engine with the bfloat16 inference implemented on CPU must support the native *avx512_bf16* instruction and therefore the bfloat16 data format. It is possible to use bfloat16 inference in simulation mode on platforms with Intel® Advanced Vector Extensions 512 (Intel® AVX-512), but it leads to significant performance degradation in comparison with FP32 or native *avx512_bf16* instruction usage. - -### Introduction -Bfloat16 computations (referred to as BF16) is the Brain Floating-Point format with 16 bits. This is a truncated 16-bit version of the 32-bit IEEE 754 single-precision floating-point format FP32. BF16 preserves 8 exponent bits as FP32 but reduces precision of the sign and mantissa from 24 bits to 8 bits. - -![bf16_format] - -Preserving the exponent bits keeps BF16 to the same range as the FP32 (~1e-38 to ~3e38). This simplifies conversion between two data types: you just need to skip or flush to zero 16 low bits. Truncated mantissa leads to occasionally less precision, but according to investigations, neural networks are more sensitive to the size of the exponent than the mantissa size. Also, in lots of models, precision is needed close to zero but not so much at the maximum range. Another useful feature of BF16 is possibility to encode INT8 in BF16 without loss of accuracy, because INT8 range completely fits in BF16 mantissa field. It reduces data flow in conversion from INT8 input image data to BF16 directly without intermediate representation in FP32, or in combination of [INT8 inference](Int8Inference.md) and BF16 layers. - -See the [BFLOAT16 – Hardware Numerics Definition white paper](https://software.intel.com/content/dam/develop/external/us/en/documents/bf16-hardware-numerics-definition-white-paper.pdf) for more bfloat16 format details. - -There are two ways to check if CPU device can support bfloat16 computations for models: - -1. Query the instruction set using one of these system commands: - * `lscpu | grep avx512_bf16` - * `cat /proc/cpuinfo | grep avx512_bf16` -2. Use the Query API with METRIC_KEY(OPTIMIZATION_CAPABILITIES), which should return BF16 in the list of CPU optimization options: - -```python -from openvino.inference_engine import IECore - -ie = IECore() -net = ie.read_network(path_to_xml_file) -cpu_caps = ie.get_metric(metric_name="OPTIMIZATION_CAPABILITIES", device_name="CPU") -``` - -The current Inference Engine solution for bfloat16 inference uses the Intel® Math Kernel Library for Deep Neural Networks (Intel® MKL-DNN) and supports inference of the significant number of layers in BF16 computation mode. - -### Lowering Inference Precision - -Lowering precision to increase performance is widely used for optimization of inference. The bfloat16 data type usage on CPU for the first time opens the possibility of default optimization approach. The embodiment of this approach is to use the optimization capabilities of the current platform to achieve maximum performance while maintaining the accuracy of calculations within the acceptable range. - -Using Bfloat16 precision provides the following performance benefits: - -1. Faster multiplication of two BF16 numbers because of shorter mantissa of bfloat16 data. -2. No need to support denormals and handling exceptions as this is a performance optimization. -3. Fast conversion of float32 to bfloat16 and vice versa. -4. Reduced size of data in memory, as a result, larger models fit in the same memory bounds. -5. Reduced amount of data that must be transferred, as a result, reduced data transition time. - -For default optimization on CPU, the source model is converted from FP32 or FP16 to BF16 and executed internally on platforms with native BF16 support. In this case, ENFORCE_BF16 is set to YES. The code below demonstrates how to check if the key is set: - -```python -from openvino.inference_engine import IECore - -ie = IECore() -net = ie.read_network(path_to_xml_file) -exec_net = ie.load_network(network=net, device_name="CPU") -exec_net.get_config("ENFORCE_BF16") -``` - -To enable BF16 internal transformations, set the key "ENFORCE_BF16" to "YES" in the ExecutableNetwork configuration. - -```python -bf16_config = {"ENFORCE_BF16" : "YES"} -exec_net = ie.load_network(network=net, device_name="CPU", config = bf16_config) -``` - -To disable BF16 internal transformations, set the key "ENFORCE_BF16" to "NO". In this case, the model infers as is without modifications with precisions that were set on each layer edge. - -An exception with the message `Platform doesn't support BF16 format` is formed in case of setting "ENFORCE_BF16" to "YES"on CPU without native BF16 support or BF16 simulation mode. - -Low-Precision 8-bit integer models cannot be converted to BF16, even if bfloat16 optimization is set by default. - -### Bfloat16 Simulation Mode - -Bfloat16 simulation mode is available on CPU and Intel® AVX-512 platforms that do not support the native avx512_bf16 instruction. The simulator does not guarantee good performance. Note that the CPU must still support the AVX-512 extensions. - -#### To Enable the simulation of Bfloat16: - -* In the Benchmark App, add the -enforcebf16=true option -* In Python, use the following code as an example: - -```python -from openvino.inference_engine import IECore - -ie = IECore() -net = ie.read_network(path_to_xml_file) -bf16_config = {"ENFORCE_BF16" : "YES"} -exec_net = ie.load_network(network=net, device_name="CPU", config=bf16_config) -``` - -### Performance Counters - -Information about layer precision is stored in the performance counters that are available from the Inference Engine API. The layers have the following marks: - -* Suffix *BF16* for layers that had bfloat16 data type input and were computed in BF16 precision -* Suffix *FP32* for layers computed in 32-bit precision - -For example, the performance counters table for the Inception model can look as follows: - -``` -pool5 EXECUTED layerType: Pooling realTime: 143 cpu: 143 execType: jit_avx512_BF16 -fc6 EXECUTED layerType: FullyConnected realTime: 47723 cpu: 47723 execType: jit_gemm_BF16 -relu6 NOT_RUN layerType: ReLU realTime: 0 cpu: 0 execType: undef -fc7 EXECUTED layerType: FullyConnected realTime: 7558 cpu: 7558 execType: jit_gemm_BF16 -relu7 NOT_RUN layerType: ReLU realTime: 0 cpu: 0 execType: undef -fc8 EXECUTED layerType: FullyConnected realTime: 2193 cpu: 2193 execType: jit_gemm_BF16 -prob EXECUTED layerType: SoftMax realTime: 68 cpu: 68 execType: jit_avx512_FP32 -``` - - -The **execType** column of the table includes inference primitives with specific suffixes. - -[bf16_format]: img/bf16_format.png diff --git a/docs/OV_Runtime_UG/DynamicBatching.md b/docs/OV_Runtime_UG/DynamicBatching.md deleted file mode 100644 index d962aa2a664..00000000000 --- a/docs/OV_Runtime_UG/DynamicBatching.md +++ /dev/null @@ -1,106 +0,0 @@ -# Using Dynamic Batching {#openvino_docs_IE_DG_DynamicBatching} - -## Using Dynamic Batching (C++) - -@sphinxdirective -.. raw:: html - -
C++
-@endsphinxdirective - -The Dynamic Batching feature allows you to dynamically change batch size for inference calls -within a preset batch size limit. This feature might be useful when batch size is unknown beforehand and using an extra-large batch size is undesirable or impossible due to resource limitations. For example, applying face detection and then mood labeling to a video, you won't know in advance how many frames will contain a face when you pass inferencing results to a secondary model. - - -You can activate Dynamic Batching by setting `KEY_DYN_BATCH_ENABLED` flag to `YES` in a configuration map that is -passed to the plugin while loading a network. -This configuration creates an `ExecutableNetwork` object that will allow setting batch size -dynamically in all of its infer requests using `SetBatch()` method. -The batch size that was set in the passed `CNNNetwork` object will be used as a maximum batch size limit. - -Here is a code example: - -@snippet snippets/DynamicBatching.cpp part0 - - -### Limitations - -Currently, there are certain limitations for the use of Dynamic Batching exist: - -* Use Dynamic Batching with CPU and GPU plugins only. -* Use Dynamic Batching on topologies that consist of certain layers only: - * Convolution - * Deconvolution - * Activation - * LRN - * Pooling - * FullyConnected - * SoftMax - * Split - * Concatenation - * Power - * Eltwise - * Crop - * BatchNormalization - * Copy - -The following types of layers are not supported: - -* Layers that might arbitrary change tensor shape (such as Flatten, Permute, Reshape) -* Layers specific to object detection topologies (ROIPooling, ProirBox, DetectionOutput) -* Custom layers - -Topology analysis is performed during the process of loading a network into plugin, and if the topology is not supported, an exception is generated. - -## Using Dynamic Batching (Python) - -@sphinxdirective -.. raw:: html - -
Python
-@endsphinxdirective - -Dynamic Batching is a feature that allows you to dynamically change batch size for inference calls within a preset batch size limit. This feature might be useful when batch size is unknown beforehand, and using extra large batch size is not desired or impossible due to resource limitations. For example, face detection with person age, gender, or mood recognition is a typical usage scenario. - -You can activate Dynamic Batching by setting the "DYN_BATCH_ENABLED" flag to "YES" in a configuration map that is passed to the plugin while loading a network. This configuration creates an `ExecutableNetwork` object that will allow setting batch size dynamically in all of its infer requests using the [ie_api.batch_size](api/ie_python_api/_autosummary/openvino.inference_engine.IENetwork.html#openvino.inference_engine.IENetwork.batch_size) method. The batch size that was set in the passed CNNNetwork object will be used as a maximum batch size limit. - -```python -from openvino.inference_engine import IECore - -ie = IECore() -dyn_config = {"DYN_BATCH_ENABLED": "YES"} -ie.set_config(config=dyn_config, device_name=device) -# Read a network in IR or ONNX format -net = ie.read_network(path_to_model) -net.batch_size = 32 # set the maximum batch size to 32 -exec_net = ie.load_network(network=net, device_name=device) -``` - -### Limitations - -Currently, certain limitations for the use of Dynamic Batching exist: - -* Use Dynamic Batching with CPU and GPU plugins only. -* Use Dynamic Batching on topologies that consist of certain layers only: - * Convolution - * Deconvolution - * Activation - * LRN - * Pooling - * FullyConnected - * SoftMax - * Split - * Concatenation - * Power - * Eltwise - * Crop - * BatchNormalization - * Copy - -The following types of layers are not supported: - -* Layers that might arbitrary change tensor shape (such as Flatten, Permute, Reshape) -* Layers specific to object detection topologies (ROIPooling, ProirBox, DetectionOutput) -* Custom layers - -Topology analysis is performed during the process of loading a network into plugin, and if the topology is not supported, an exception is generated. \ No newline at end of file diff --git a/docs/OV_Runtime_UG/Extensibility_DG/AddingNGraphOps.md b/docs/OV_Runtime_UG/Extensibility_DG/AddingNGraphOps.md deleted file mode 100644 index 79ab802a5a1..00000000000 --- a/docs/OV_Runtime_UG/Extensibility_DG/AddingNGraphOps.md +++ /dev/null @@ -1,82 +0,0 @@ -# Custom nGraph Operations {#openvino_docs_IE_DG_Extensibility_DG_AddingNGraphOps} - -Inference Engine Extension API allows you to register operation sets (opsets) with custom nGraph operations to support models with operations which OpenVINO™ does not support out-of-the-box. - -Besides creating custom nGraph operations, to [support custom operations](../../HOWTO/Custom_Layers_Guide.md) in your model you must also create a Model Optimizer extension for the custom operations and an Inference Engine device plugin extension for the device you will use for inference. - -## Operation Class - -To add your custom nGraph operation, create a new class that extends `ngraph::Op`, which is in turn derived from `ngraph::Node`, the base class for all graph operations in nGraph. Follow the steps below to add a custom nGraph operation: - -1. Add the `NGRAPH_RTTI_DECLARATION` and `NGRAPH_RTTI_DEFINITION` macros which define a `NodeTypeInfo` object that identifies the type of the operation to the graph users and helps with dynamic type resolution. The type info of an nGraph operation currently consists of a string identifier and a version number, but this may change in the future. - -2. Implement constructors that optionally take the operation inputs and attributes as parameters. - -3. Override the shape inference method `validate_and_infer_types`. This method is called multiple times during graph manipulations to determine the shapes and element types of the operations outputs. To access the input shapes and input element types, use the `get_input_partial_shape()` and `get_input_element_type()` methods of `ngraph::Node`. Set the inferred shape and element type of the output using `set_output_type`. - -4. Override the `clone_with_new_inputs` method, which enables graph manipulation routines to create copies of this operation and connect it to different nodes during optimization. - -5. Override the `visit_attributes` method, which enables serialization and deserialization of operation attributes. An `AttributeVisitor` is passed to the method, and the implementation is expected to walk over all the attributes in the op using the type-aware `on_attribute` helper. Helpers are already implemented for standard C++ types like `int64_t`, `float`, `bool`, `vector`, and for existing nGraph defined types. - -6. Override `evaluate`, which is an optional method that enables the application of constant folding if there is a custom operation on the constant branch. If your operation contains `evaluate` method you also need to override the `has_evaluate` method, this method allow to get information about availability of `evaluate` method for the operation. - -Based on that, declaration of an operation class can look as follows: - -@snippet template_extension/old/op.hpp op:header - -### Class Fields - -The provided implementation has several fields: - - * `add` of type `int64_t` is an attribute of a custom operation - * `type_info` of type `ngraph::NodeTypeInfo` defines type and version of an operation - -### Operation Constructors - -nGraph operation contains two constructors: -* Default constructor, which enables you to create an operation without attributes -* Constructor that creates and validates an operation with specified inputs and attributes - -@snippet template_extension/old/op.cpp op:ctor - -### `validate_and_infer_types()` - -`ngraph::Node::validate_and_infer_types` method validates operation attributes and calculates output shapes using attributes of the operation. - -@snippet template_extension/old/op.cpp op:validate - -### `clone_with_new_inputs()` - -`ngraph::Node::clone_with_new_inputs` method creates a copy of the nGraph operation with new inputs. - -@snippet template_extension/old/op.cpp op:copy - -### `visit_attributes()` - -`ngraph::Node::visit_attributes` method enables you to visit all operation attributes. - -@snippet template_extension/old/op.cpp op:visit_attributes - -### `evaluate()` and `has_evaluate()` - -`ngraph::Node::evaluate` method enables you to apply constant folding to an operation. - -@snippet template_extension/old/op.cpp op:evaluate - -## Register Custom Operations in Extension Class - -To add custom operations to the [Extension](Extension.md) class, create an operation set with custom operations and implement the `InferenceEngine::IExtension::getOpSets` method: - -@snippet template_extension/old/extension.cpp extension:getOpSets - -This method returns a map of opsets that exist in the [extension library](Extension.md). -nGraph provides an opset mechanism to group operations into clusters. Different opsets distinguish between different versions of one operation. - -When specifying opset names, follow the rules below: -* Use unique opset names. -* Do not use the following built-in opset names: `extension`, `experimental`, `opset1`, `opset2`, `opset3`, ... , `opsetN`. -* [Make sure that the Model Optimizer](../../HOWTO/Custom_Layers_Guide.md) and your extension use the same opset names. -* IR v10 operations have the mandatory `version` attribute specifying the opset. -Operations from the default opset cannot be redefined. - -Use a custom opset to create a new operation or extend functionality of an existing operation from another opset. diff --git a/docs/OV_Runtime_UG/Extensibility_DG/Building.md b/docs/OV_Runtime_UG/Extensibility_DG/Building.md deleted file mode 100644 index b4cf5a1f84d..00000000000 --- a/docs/OV_Runtime_UG/Extensibility_DG/Building.md +++ /dev/null @@ -1,19 +0,0 @@ -# Build Extension Library Using CMake* {#openvino_docs_IE_DG_Extensibility_DG_Building} - -Inference Engine build infrastructure provides the Inference Engine Package for application development. - -To configure the build of your extension library, use the following CMake script: - -@snippet template_extension/old/CMakeLists.txt cmake:extension - -This CMake script finds the Inference Engine and nGraph using the `find_package` CMake command. - -To build the extension library, run the commands below: - -```sh -$ cd template_extension/old -$ mkdir build -$ cd build -$ cmake -DOpenVINO_DIR=[OpenVINO_DIR] ../ -$ cmake --build . -``` diff --git a/docs/OV_Runtime_UG/Extensibility_DG/CPU_Kernel.md b/docs/OV_Runtime_UG/Extensibility_DG/CPU_Kernel.md deleted file mode 100644 index 223a1401600..00000000000 --- a/docs/OV_Runtime_UG/Extensibility_DG/CPU_Kernel.md +++ /dev/null @@ -1,71 +0,0 @@ -# CPU Kernel Custom Operations {#openvino_docs_IE_DG_Extensibility_DG_CPU_Kernel} - -To enable operations not supported by OpenVINO™ out of the box, you need a custom extension for Model Optimizer, a custom nGraph operation set, and a custom kernel for the device you will target. This page describes custom kernel support for the CPU device. - -The primary means of the performance of the CPU codepath in the Inference Engine is the Intel® Math Kernel Library for Deep Neural Networks (Intel® MKL-DNN), and new CPU kernels extend the Inference Engine plugin for the Intel MKL-DNN. Implementing the InferenceEngine::ILayerExecImpl API call defines a general CPU-side extension. There are no Intel MKL-DNN specifics in the way you need to implement a kernel. - -## Implementation Class - -All custom kernels for the CPU plugin should be inherited from the InferenceEngine::ILayerExecImpl interface. -Based on that, declaration of a kernel implementation class can look as follows: - -@snippet template_extension/old/cpu_kernel.hpp cpu_implementation:header - -### Class Fields - -The provided implementation has several fields: - - * `add` of the type `int64_t` is an attribute of a custom operation. - * `inShape` of the type `ngraph::Shape` is an input shape. - * `outShape` of the type `ngraph::Shape` is an output shape. - * `error` of the type `std::string` is a field to handle errors from a constructor. - -### Constructor of Implementation - -An implementation constructor checks parameters of an nGraph operation, stores required attributes, and stores an error message in case of an error. - -@snippet template_extension/old/cpu_kernel.cpp cpu_implementation:ctor - -### `getSupportedConfigurations` - -The InferenceEngine::ILayerExecImpl::getSupportedConfigurations method returns all supported configuration formats (input/output tensor layouts) for your implementation. To specify formats of data, use InferenceEngine::TensorDesc. - -@snippet template_extension/old/cpu_kernel.cpp cpu_implementation:getSupportedConfigurations - -### `init` - -The InferenceEngine::ILayerExecImpl::init method gets a runtime-selected configuration from a vector that is populated from the `getSupportedConfigurations` method and checks the parameters: - -@snippet template_extension/old/cpu_kernel.cpp cpu_implementation:init - -### `execute` - -The InferenceEngine::ILayerExecImpl::execute method accepts and processes the actual tensors as input/output blobs: - -@snippet template_extension/old/cpu_kernel.cpp cpu_implementation:execute - -## Register Implementation in `Extension` Class - -To register custom kernel implementation in the [Extension](Extension.md) class, implement the following methods: - -* getImplTypes -* getImplementation - -### getImplTypes - -InferenceEngine::IExtension::getImplTypes returns a vector of implementation types for an operation. - -@snippet template_extension/old/extension.cpp extension:getImplTypes - -### getImplementation - -InferenceEngine::IExtension::getImplementation returns the kernel implementation with a specified type for an operation. - -@snippet template_extension/old/extension.cpp extension:getImplementation - - -## Load Extension with Executable Kernels to Plugin - -Use the `AddExtension` method of the general plugin interface to load your primitives: - -@snippet snippets/CPU_Kernel.cpp part0 diff --git a/docs/OV_Runtime_UG/Extensibility_DG/Custom_ONNX_Ops.md b/docs/OV_Runtime_UG/Extensibility_DG/Custom_ONNX_Ops.md deleted file mode 100644 index 772bfb9da90..00000000000 --- a/docs/OV_Runtime_UG/Extensibility_DG/Custom_ONNX_Ops.md +++ /dev/null @@ -1,78 +0,0 @@ -# Custom ONNX* Operators {#openvino_docs_IE_DG_Extensibility_DG_Custom_ONNX_Ops} - -The ONNX\* importer provides a mechanism to register custom ONNX operators based on predefined or custom nGraph operations. -The function responsible for registering a new operator is called `ngraph::onnx_import::register_operator` and defined in the `onnx_import/onnx_utils.hpp` file. - -## Register Custom ONNX Operator Based on Predefined nGraph Operations - -The steps below explain how to register a custom ONNX operator, for example, CustomRelu, in a domain called `com.example`. -CustomRelu is defined as follows: -``` -x >= 0 => f(x) = x * alpha -x < 0 => f(x) = x * beta -``` -where `alpha` and `beta` are float constants. - -1. Include headers: - -@snippet onnx_custom_op/onnx_custom_op.cpp onnx_custom_op:headers - -2. Register the CustomRelu operator in the ONNX importer: - -@snippet onnx_custom_op/onnx_custom_op.cpp onnx_custom_op:register_operator - -The `register_operator` function takes four arguments: op_type, opset version, domain, and a function object. -The function object is a user-defined function that takes `ngraph::onnx_import::Node` as an input and based on that, returns a graph with nGraph operations. -The `ngraph::onnx_import::Node` class represents a node in an ONNX model. It provides functions to fetch input node(s) using `get_ng_inputs`, attribute value using `get_attribute_value`, and many more. See the `onnx_import/core/node.hpp` file for the full class declaration. - -New operator registration must happen before an ONNX model is read. For example, if an model uses the `CustomRelu` operator, call `register_operator("CustomRelu", ...)` before InferenceEngine::Core::ReadNetwork. -Reregistering ONNX operators within the same process is supported. If you register an existing operator, you get a warning. - -The example below demonstrates an exemplary model that requires a previously created `CustomRelu` operator: -``` -@include onnx_custom_op/custom_relu_model.prototxt -``` - -This model is in text format, so before it can be passed to Inference Engine, it has to be converted to binary using: -```py -from google.protobuf import text_format -import onnx - -with open("custom_relu_model.prototxt") as in_file: - proto = onnx.ModelProto() - text_format.Parse(in_file.read(), proto, allow_field_number=True) - s = onnx._serialize(proto) - onnx._save_bytes(s, "custom_relu_model.onnx") -``` - - -To create a graph with nGraph operations, visit [Custom nGraph Operations](AddingNGraphOps.md). -For a complete list of predefined nGraph operators, visit [Available Operations Sets](../../ops/opset.md). - -If you do not need an operator anymore, unregister it by calling `unregister_operator`. The function takes three arguments: `op_type`, `version`, and `domain`. - -@snippet onnx_custom_op/onnx_custom_op.cpp onnx_custom_op:unregister_operator - -## Register Custom ONNX Operator Based on Custom nGraph Operations - -The same principles apply when registering a custom ONNX operator based on custom nGraph operations. -This example shows how to register a custom ONNX operator based on `Operation` presented in [this tutorial](AddingNGraphOps.md), which is used in [TemplateExtension](Extension.md): - -@snippet template_extension/old/extension.cpp extension:ctor - -Here, the `register_operator` function is called in the constructor of Extension. The constructor makes sure that the function is called before InferenceEngine::Core::ReadNetwork, because InferenceEngine::Core::AddExtension must be called before a model with a custom operator is read. - -The example below demonstrates how to unregister an operator from the destructor of Extension: -@snippet template_extension/old/extension.cpp extension:dtor - -> **REQUIRED**: It is mandatory to unregister a custom ONNX operator if it is defined in a dynamic shared library. - -## Requirements for Building with CMake - -A program that uses the `register_operator` functionality requires `openvino::core` and `openvino::frontend::onnx` libraries in addition to the OpenVINO Inference Runtime. -The `openvino::frontend::onnx` is a component of the `OpenVINO` package , so `find_package(OpenVINO REQUIRED COMPONENTS ONNX)` can find both. -Those libraries need to be passed to the `target_link_libraries` command in the CMakeLists.txt file. - -See CMakeLists.txt below for reference: - -@snippet onnx_custom_op/CMakeLists.txt cmake:onnx_custom_op diff --git a/docs/OV_Runtime_UG/Extensibility_DG/Extension.md b/docs/OV_Runtime_UG/Extensibility_DG/Extension.md deleted file mode 100644 index 1925ce9e25e..00000000000 --- a/docs/OV_Runtime_UG/Extensibility_DG/Extension.md +++ /dev/null @@ -1,29 +0,0 @@ -# Extension Library {#openvino_docs_IE_DG_Extensibility_DG_Extension} - -Inference Engine provides an InferenceEngine::IExtension interface, which defines the interface for Inference Engine Extension libraries. -Inherit all extension libraries from this interface. The example below contains an implementation of two operations: `Template` -used as an example in this document and `FFT` used as a more complex example from the [Custom Operations Guide](../../HOWTO/Custom_Layers_Guide.md). - -> **NOTE**: `FFT` operation is implemented using the OpenCV library functions `cv::dft` and `cv::idft`. - -Based on that, the declaration of an extension class can look as follows: - -@snippet template_extension/old/extension.hpp extension:header - -The extension library should use `IE_DEFINE_EXTENSION_CREATE_FUNCTION` macro to export a function, which creates an `Extension` class: - -@snippet template_extension/old/extension.cpp extension:CreateExtension - -Also, an `Extension` object should implement the following methods: - -* InferenceEngine::IExtension::Release deletes an extension object. - -* InferenceEngine::IExtension::GetVersion returns information about the version of the library. - -@snippet template_extension/old/extension.cpp extension:GetVersion - -Implement the InferenceEngine::IExtension::getOpSets method if the extension contains custom layers. -Read [Custom nGraph Operation](AddingNGraphOps.md) for more information. - -To integrate execution kernels to the extension library, read [How to Implement Custom CPU Operations](CPU_Kernel.md). -To register a custom ONNX\* operator to the extension library, read [Custom ONNX Operators](Custom_ONNX_Ops.md). diff --git a/docs/OV_Runtime_UG/Extensibility_DG/Intro.md b/docs/OV_Runtime_UG/Extensibility_DG/Intro.md deleted file mode 100644 index ca3217a26ce..00000000000 --- a/docs/OV_Runtime_UG/Extensibility_DG/Intro.md +++ /dev/null @@ -1,60 +0,0 @@ -# Inference Engine Extensibility Mechanism {#openvino_docs_IE_DG_Extensibility_DG_Intro} - -@sphinxdirective - -.. toctree:: - :maxdepth: 1 - :hidden: - - openvino_docs_IE_DG_Extensibility_DG_AddingNGraphOps - openvino_docs_IE_DG_Extensibility_DG_Custom_ONNX_Ops - CPU Kernels Extensibility - GPU Kernels Extensibility - VPU Kernels Extensibility - openvino_docs_IE_DG_Extensibility_DG_Extension - openvino_docs_IE_DG_Extensibility_DG_Building - -@endsphinxdirective - -If your model contains operations not normally supported by OpenVINO, the Inference Engine Extensibility API lets you add support for those custom operations in a library containing custom nGraph operation sets, corresponding extensions to the Model Optimizer, and a device plugin extension. See the overview in the [Custom Operations Guide](../../HOWTO/Custom_Layers_Guide.md) to learn how these work together. - -To load the Extensibility library to the `InferenceEngine::Core` object, use the `InferenceEngine::Core::AddExtension` method. - -## Inference Engine Extension Library - -An Inference Engine Extension dynamic library contains the following components: - - * [Extension Library](Extension.md): - - Contains custom operation sets - - Provides CPU implementations for custom operations - * [Custom nGraph Operation](AddingNGraphOps.md): - - Enables the use of `InferenceEngine::Core::ReadNetwork` to read Intermediate Representation (IR) with unsupported - operations - - Enables the creation of `ngraph::Function` with unsupported operations - - Provides a shape inference mechanism for custom operations - -> **NOTE**: This documentation is written based on the [Template extension](https://github.com/openvinotoolkit/openvino/tree/master/docs/template_extension), which demonstrates extension development details. You can review the complete code, which is fully compilable and up-to-date, to see how it works. - -## Execution Kernels - -The Inference Engine workflow involves the creation of custom kernels and either custom or existing operations. - -An _operation_ is a network building block implemented in the training framework, for example, `Convolution` in Caffe*. -A _kernel_ is defined as the corresponding implementation in the Inference Engine. - -Refer to the [Model Optimizer Extensibility](../../MO_DG/prepare_model/customize_model_optimizer/Customize_Model_Optimizer.md) -for details on how a mapping between framework operations and Inference Engine kernels is registered. - -In short, you can plug your own kernel implementations into the Inference Engine and map them to the operations in the original framework. - -The following pages describe how to integrate custom _kernels_ into the Inference Engine: - - * [Introduction to development of custom CPU kernels](CPU_Kernel.md) - * [Introduction to development of custom GPU kernels](GPU_Kernel.md) - * [Introduction to development of custom VPU kernels](VPU_Kernel.md) - -## See Also - -* [Build an extension library using CMake*](Building.md) -* [Using Inference Engine Samples](../Samples_Overview.md) -* [Hello Shape Infer SSD sample](../../../samples/cpp/hello_reshape_ssd/README.md) diff --git a/docs/OV_Runtime_UG/Extensibility_DG/VPU_Kernel.md b/docs/OV_Runtime_UG/Extensibility_DG/VPU_Kernel.md deleted file mode 100644 index 86b2ad092bc..00000000000 --- a/docs/OV_Runtime_UG/Extensibility_DG/VPU_Kernel.md +++ /dev/null @@ -1,682 +0,0 @@ -# How to Implement Custom Layers for VPU (Intel® Neural Compute Stick 2) {#openvino_docs_IE_DG_Extensibility_DG_VPU_Kernel} - -To enable operations not supported by OpenVINO™ out of the box, you need a custom extension for Model Optimizer, a custom nGraph operation set, and a custom kernel for the device you will target. This page describes custom kernel support for one the VPU, the Intel® Neural Compute Stick 2 device, which uses the MYRIAD device plugin. - -> **NOTES:** -> * OpenCL\* custom layer support is available in the preview mode. -> * This section assumes you are familiar with developing kernels using OpenCL. - -To customize your topology with an OpenCL layer, carry out the tasks described on this page: - -1. Write and compile your OpenCL code with the standalone offline OpenCL compiler (`clc`). -2. Write a configuration file to bind the OpenCL kernel to the topology file (`.xml`) of the model IR. -3. Pass the configuration file to the Inference Engine with the model IR. - -## Compile OpenCL code for VPU (Intel® Neural Compute Stick 2) - -> **NOTE**: OpenCL compiler, targeting Intel® Neural Compute Stick 2 for the SHAVE* processor only, is redistributed with OpenVINO. -OpenCL support is provided by ComputeAorta* and is distributed under a license agreement between Intel® and Codeplay* Software Ltd. - -The OpenCL toolchain for the Intel® Neural Compute Stick 2 supports offline compilation only, so first compile OpenCL C code using the standalone `clc` compiler. You can find the compiler binary at `/tools/cl_compiler`. - -> **NOTE**: By design, custom OpenCL layers support any OpenCL kernels written assuming OpenCL version 1.2. It also supports half float extension and is optimized for this type, because it is a native type for Intel® Movidius™ VPUs. - -1. Prior to running a compilation, make sure that the following variables are set: - * `SHAVE_MA2X8XLIBS_DIR=/tools/cl_compiler/lib/` - * `SHAVE_LDSCRIPT_DIR=/tools/cl_compiler/ldscripts/` - * `SHAVE_MYRIAD_LD_DIR=/tools/cl_compiler/bin/` - * `SHAVE_MOVIASM_DIR=/tools/cl_compiler/bin/` -2. Run the compilation with the command below. You should use `--strip-binary-header` to make an OpenCL runtime-agnostic binary runnable with the Inference Engine. - ```bash - cd /tools/cl_compiler/bin - ./clc --strip-binary-header custom_layer.cl -o custom_layer.bin - ``` - -## Write a Configuration File - -To tie the topology IR for a layer you customize, prepare a configuration file, so that the Inference Engine can find parameters for your kernel and the execution work grid is described. -For example, consider the following OpenCL kernel signature: -```cpp -__kernel void reorg_nhwc(__global const half *src, __global half *out, int w, int h, int c, int stride); -``` -A configuration file for this kernel might be the following: -```xml - - - - - - - - - - - - - - -``` -Each custom layer is described with the `CustomLayer` node. It has the following nodes and attributes: - - Root node `CustomLayer` contains the following attributes: - - `name` – (Required) The name of the Inference Engine layer to bind the kernel with. - - `type` and `version` – (Required) Reserved for future use. Set them to `MVCL` and `1` respectively. - - `max-shaves` – (Optional) The maximum number of SHAVE cores that should be dedicated for the layer. It is useful for debugging concurrency issues or for resource saving that memory bound kernel does not scale well with the number of cores, so more resources can be left for the rest of a topology. - - Sub-node `Kernel` must contain the following attributes: - - `entry` – The name of your kernel function as you defined it in a source file. In the example above, it is `reorg_nhwc`. - - Node `Source` must contain the following attributes: - - `filename` – The path to a compiled binary relative to the XML configuration file. - - Sub-node `Parameters` – Describes parameters bindings. For more information, see the description below. - - Sub-node `WorkSizes` – Describes local and global work group sizes and the source for dimension deduction as a pair `direction,port`. In the example above, the work group is described relatively to the dimension of the input tensor that comes through port 0 in the IR. `global` and `local` work group configurations support any simple math expressions with +,-,\*,/, and () from `B`(batch), `Y`(height), `X`(width) and `F`(channels). - - Sub-node `Where` – Allows to customize bindings with the `key="value"` attribute. For example, to substitute only 3x3 convolutions, write `` in the binding xml. - - Parameter description supports `Tensor` of one of tensor types such as `input`, `output`, `input_buffer`, `output_buffer` or `data`, `Scalar`, or `Data` nodes and has the following format: - - Each `Tensor` node of `input` or `output` type must contain the following attributes: - - `arg-name` – The name of a kernel parameter in the kernel signature. - - `type` – Node type: `input` or `output` as specified in the IR. - - `port-index` – A number of input/output ports as specified in the IR. - - `format` – The channel order in the tensor. Optional conversion layers are generated if the custom layer format is not compatible with formats of neighboring layers. `BFXY`, `BYXF`, and `ANY` formats are supported currently. - - Each `Tensor` node of `input_buffer` or `output_buffer` type must contain the following attributes: - - `arg-name` – The name of a kernel parameter in the kernel signature. - - `type` – Node type: `input_buffer` or `output_buffer`. Use the appropriate type to bind multiple kernels that correspond to different stages of the same layer. - - `port-index` – The unique identifier to bind by. - - `dim` – The dim source with the same `direction,port` format used for `WorkSizes` bindings. - - `size` – Amount of bytes needed. Current expression syntax supports only expression over dimensions of over selected input/output tensor or constants and might be expended in the future. - - Here is an example of multi-stage MVN layer binding: - ```xml - - - - - - - - - - - - - - - - - - - - - - - - - - ``` - - Each `Tensor` node that has the type `data` must contain the following attributes: - - `source` – A name of the blob as it is in the IR. Typical example is `weights` for convolution. - - `format` – Specifies the channel order in the tensor. Optional conversion layers are generated if the custom layer format is not. - ```xml - - - - - - - - - - - - - ``` - - Each `Scalar` node must contain the following attributes: - - `arg-name` – The name of a kernel parameter in the kernel signature. - - `type` – `int` or `float` value. It is used for correct argument extraction from IR parameters. - - `source` – Contains the name of the parameter in the IR file or input/output (`I`/`O`, `In`/`On`, where `n` is a port number) - followed by dimension `B`(batch), `Y`(height), `X`(width), or `F`(channels). - - - Each `Data` node must contain the following attributes: - - `arg-name` – The name of a kernel parameter in the kernel signature. - - `type` – Node type. Currently, `local_data` is the only supported value, which defines buffer allocated in fast local on-chip memory. It is limited to 100KB for all `__local` and - `__private` arrays defined inside the kernel as well as all `__local` parameters passed to the kernel. Note that a manual-DMA extension requires double buffering. - If the custom layer is detected to run out of local memory, the inference fails. - - `dim` – The dim source with the same `direction,port` format used for `WorkSizes` bindings. - - `size` – Amount of bytes needed. The current expression syntax supports only expression over dimensions of over selected input/output tensor or constants and may be extended in the future. - The example binding below illustrates a kernel with two local buffers passed to the kernel. - ```xml - - - - - - - - - - - - - - -``` - -## Pass Configuration File to Inference Runtime - -> **NOTE**: If both native and custom layer implementations are present, the custom kernel has a priority over the native one. - -Before loading the network that features the custom layers, provide a separate configuration file and load it using the InferenceEngine::Core::SetConfig() method with the PluginConfigParams::KEY_CONFIG_FILE key and the configuration file name as a value: -```cpp -InferenceEngine::Core core; -// Load custom layers -core.SetConfig({ { InferenceEngine::PluginConfigParams::KEY_CONFIG_FILE, "" } }, "MYRIAD"); -``` -Optionally, set a path to a custom layers description with a pair of `VPU_CUSTOM_LAYERS` and `/path/to/your/customLayers.xml` -as a network configuration: -```cpp -InferenceEngine::Core core; -std::map networkConfig; -config["VPU_CUSTOM_LAYERS"] = "/path/to/your/customLayers.xml"; -// Load custom layers in network config -auto exeNetwork = core.LoadNetwork(cnnNetwork, "MYRIAD", networkConfig); -``` - -## Optimizing Kernels with OpenCL for VPU (Intel® Neural Compute Stick 2) - -This section provides optimization guidelines on writing custom layers with OpenCL for VPU devices. Knowledge about general OpenCL -programming model and OpenCL kernel language is assumed and not a subject of this section. The OpenCL model mapping to VPU is described in the table below. - -| OpenCL Model | VPU Mapping| -|-----|----| -| Device code | Executed on SHAVE cores | -| Private memory | Mapped to CMX internal memory, limited to 100KB per work group, valid only while the work group is executed | -| Local memory | Mapped to CMX internal memory, limited to 100KB per work group, valid only while the work group is executed | -| Global memory | Mapped to DDR, used to pass execution preserved parameters for inputs, outputs, and blobs | -| Work group | Executed on a single SHAVE core iterating over multiple work items | - -Note that by the OpenCL specification, the work group execution order is not specified. This means that it is your -responsibility to ensure that race conditions among work groups are not introduced. Custom layer runtime spits evenly -work grid among available compute resources and executes them in an arbitrary order. This static scheduling approach works best if the load is evenly spread out across work groups, which is a typical case for Deep Learning kernels. The following guidelines are recommended to use for work group partitioning: - -1. Split work evenly across work groups. -2. Adjust work group granularity to maintain equal workload for all compute codes. -3. Set the maximum number of cores using the `max-shaves` attribute for the `CustomLayer` node. This keeps more resources for the rest of topology. It is also useful if the kernel scalability reached its limits, which may happen while optimizing memory bound kernels or kernels with poor parallelization. -4. Try an alternate data layout (`BFXY`/`BYXF`) for the kernel if it improves work group partitioning or data access patterns. -Consider not just specific layer boost, but full topology performance because data conversion layers would be automatically inserted -as appropriate. - -Offline OpenCL compiler (`clc`) features automatic vectorization over `get_global_id(0)` usage, if uniform access is detected. -For example, the kernel below could be automatically vectorized: -```cpp -__kernel void cvtf32f16(__global float* restrict inImage, __global half* restrict outImage, - float scale, float bais) -{ - int idx = get_global_id(0) + get_global_id(1) * get_global_size(0) + get_global_id(2) * get_global_size(0) * get_global_size(1); - outImage[idx] = convert_half(inImage[idx]*scale+bais); -} -``` -However, this work-group based vectorizer (WGV) conflicts with the default LLVM vectorizer based on superword level parallelism -(SLP) for the current compiler version. Manual vectorization is recommended to provide the best performance for non-uniform code -patterns. WGV works if and only if vector types are not used in the code. - -Here is a short list of optimization tips: - -1. Help auto-vectorizer ensure non-aliasing pointers for kernel parameters by putting `restrict` where possible. - - This can give a performance boost, especially for kernels with unrolling, like `ocl_grn` from the example below. - - Place `restrict` markers for kernels with manually vectorized codes. In the `ocl_grn` kernel below, the unrolled version without `restrict` is up to 20% slower than the most optimal one, which combines unrolling and `restrict`. -2. Put `#‍pragma unroll N` to your loop header. The compiler does not trigger unrolling by default, so it is your responsibility to -annotate the code with pragmas as appropriate. The `ocl_grn` version with `#‍pragma unroll 4` is up to 50% faster, most of which comes from unrolling the first loop, because LLVM, in general, is better in scheduling 3-stage loops (load-compute-store), while the fist loop - `variance += (float)(src_data[c*H*W + y*W + x] * src_data[c*H*W + y*W + x]);` is only 2-stage (load-compute). Pay -attention to unrolling such cases first. Unrolling factor is loop-dependent. Choose the smallest number that -still improves performance as an optimum between the kernel size and execution speed. For this specific kernel, changing the unroll factor from `4` to `6` results in the same performance, so unrolling factor equal to 4 is an optimum. For Intel® Neural Compute Stick 2, unrolling is conjugated with the automatic software pipelining for load, store, and compute stages: -```cpp -__kernel void ocl_grn(__global const half* restrict src_data, __global half* restrict dst_data, int C, float bias) -{ - int x = get_global_id(0); - int W = get_global_size(0); - int y = get_global_id(1); - int H = get_global_size(1); - - float variance = bias + 1e-9f; - - #pragma unroll 4 - for (int c = 0; c < C; c++) - variance += (float)(src_data[c*H*W + y*W + x] * src_data[c*H*W + y*W + x]); - - variance = 1.f / native_sqrt(variance); - - #pragma unroll 4 - for (int c = 0; c < C; c++) - dst_data[c*H*W + y*W + x] = (half)((float)src_data[c*H*W + y*W + x] * variance); -} -``` -To check the efficiency of WGV, you can compare performance of the kernel above with the kernel below, which is manually vectorized over width: -```cpp -__kernel void ocl_grn_line(__global const half* restrict src_data, __global half* restrict dst_data, int C, int W, float bias) -{ - int y = get_global_id(1); - int H = get_global_size(1); - - for (int x = 0; x < W/8; x++) - { - float8 variance = (float8)(bias+1e-9f); - - #pragma unroll 4 - for (int c = 0; c < C; c++) - { - __global const half8* restrict src_line = ((__global const half8 * restrict)(src_data + c*H*W + y*W)); - half8 sh = src_line[x]; - variance += convert_float8(sh*sh); - } - - variance = 1.f/native_sqrt(variance); - - #pragma unroll 4 - for (int c = 0; c < C; c++) - { - __global const half8* restrict src_line = ((__global const half8 * restrict)(src_data + c*H*W + y*W)); - __global half8* restrict dst_line = ((__global half8 * restrict)(dst_data + c*H*W + y*W)); - - dst_line[x] = convert_half8(convert_float8(src_line[x])*variance); - } - } - for (int x = W/8*8; x < W; x++) - { - float variance = bias+1e-9f; - #pragma unroll 4 - for (int c = 0; c < C; c++) - variance += (float)(src_data[c*H*W + y*W + x]*src_data[c*H*W + y*W + x]); - - variance = 1.f/native_sqrt(variance); - - #pragma unroll 4 - for (int c = 0; c < C; c++) - dst_data[c*H*W + y*W + x] = (float)src_data[c*H*W + y*W + x]*variance; - } -} -``` -Both versions perform the same, but the second one has more complex code. - -3. If it is easy to predict the work group size, you can also use the `reqd_work_group_size` kernel attribute to ask the compiler -to unroll the code up to the local size of the work group. Note that if the kernel is actually executed with the -different work group configuration, the result is undefined. - -4. Prefer to use the `half` compute if it keeps reasonable accuracy. 16-bit float is a native type for Intel® Neural Compute Stick 2, most of the functions `half_*` are mapped to a single hardware instruction. -Use the standard `native_*` function for the rest of types. - -5. Prefer to use the `convert_half` function over `vstore_half` if conversion to 32-bit float is required. `convert_half` is mapped to a single hardware instruction. For the `cvtf32f16` kernel above, the line `outImage[idx] = convert_half(inImage[idx]*scale+bais);` is eight times slower than the code with `vstore_half`. - -6. Mind early exits. Early exit can be extremely costly for the current version of the `clc` compiler due to conflicts with the -auto-vectorizer. The generic advice would be to setup local size by `x` dimension equal to inputs or/and outputs width. -If it is impossible to define the work grid that exactly matches inputs or/and outputs to eliminate checks, for example, -`if (get_global_id(0) >= width) return`, use line-wise kernel variant with manual vectorization. -The kernel example below demonstrates the impact of early exits on kernel performance. - ```cpp - // Initial version - __kernel void reorg(const __global half* restrict src, __global half* restrict out, int stride) - { - int w = get_global_id(0); - int W = get_global_size(0); - - int h = get_global_id(1); - int H = get_global_size(1); - - int c = get_global_id(2); - int C = get_global_size(2); - - int C2 = C/(stride*stride); - int offset = c / C2; - int c2 = c - C2 * offset; - - int H2 = H*stride; - int W2 = W*stride; - - int h2 = h*stride + offset / stride; - int w2 = w*stride + offset - stride * (offset / stride); - - out[W*H*c + W*h + w] = src[W2*H2*c2 + W2*h2 + w2]; - } - ``` -This `reorg` kernel is auto-vectorizable, but an input for YOLO v2 topology is `NCHW=<1,64,26,26>` and it is not multiple of vector width, which is `8` for `half` data type. As a result, the Inference Engine does not select the auto-vectorized kernel. -To compare performance of auto-vectorized and scalar version of the kernel, change the input size to`NCHW=<1,64,26,32>`. This enables the auto-vectorized version to be selected by the Inference Engine and can give you about 30% uplift. -Since the auto-vectorized version is faster, it makes sense to enable it for the YOLO v2 topology input size by setting the local size multiple of vector, for example, 32, and adjust global sizes accordingly. As a result, the execution work grid exceeds actual input dimension, so out-of-bound checks should be inserted. See the updated kernel version below: - ```cpp - // Version with out-of-bound checks added - __kernel void reorg(const __global half* restrict src, __global half* restrict out, int W, int stride) - { - int w = get_global_id(0); - w = min(w, W-1); - - int h = get_global_id(1); - int H = get_global_size(1); - - int c = get_global_id(2); - int C = get_global_size(2); - - int C2 = C/(stride*stride); - int offset = c / C2; - int c2 = c - C2 * offset; - - int H2 = H*stride; - int W2 = W*stride; - - int h2 = h*stride + offset / stride; - int w2 = w*stride + offset - stride * (offset / stride); - - out[W*H*c + W*h + w] = src[W2*H2*c2 + W2*h2 + w2]; - } - ``` -This code performs the same as the initial kernel above (scalar) due to branching overhead. If you replace min/max expression `w = min(w, W-1);` with `if (w >= W) return;`, runtime increases up to 2x against to code without branching (initial version).
-If branching is inevitable for your element-based kernel, it is recommended to change the scheme to line-based. See the kernel variant below: -```cpp -// Line-wise version -__kernel void reorg(const __global half* restrict src, __global half* restrict out, int H, int W, int stride) -{ - int h = min((int)get_global_id(0), H-1); - - int c = get_global_id(1); - int C = get_global_size(1); - int C2 = C/(stride*stride); - int offset = c / C2; - int c2 = c - C2 * offset; - - int H2 = H*stride; - int W2 = W*stride; - - for (int w = 0; w < W; ++w) - { - int h2 = h*stride + offset / stride; - int w2 = w*stride + offset - stride * (offset / stride); - - out[W*H*c + W*h + w] = src[W2*H2*c2 + W2*h2 + w2]; - } -} -``` -This decreases the execution time up to 40% against the best performing vectorized kernel without early exits (initial version). -7. Reuse computations among work items by using line-based kernels or sharing values though `__local` memory. -8. Improve data access locality. Most of custom kernels are memory bound while convolution and fully connected layers are hardware-implemented. The code below demonstrates a further optimized version of the `reorg` kernel unrolled by `stride`: - ```cpp - // Unrolled line-wise version - __kernel void reorg_unrolled_by_stride(const __global half* restrict src, __global half* restrict dst, - int H, int W, int stride) - { - int h = min((int)get_global_id(0), H-1); - - int c2 = get_global_id(1); - int C2 = get_global_size(1); - int C = C2*stride*stride; - - int H2 = H*stride; - int W2 = W*stride; - - for (int stride_y = 0; stride_y < stride; stride_y++) - for (int stride_x = 0; stride_x < stride; stride_x++) - for (int w2 = 0, w = 0; w < W; w2 += stride, w++) - dst[W*H*C2*(stride_y*stride+stride_x) + W*H*c2 + W*h + w] = src[W2*H2*c2 + W2*h*stride + W2*stride_y + w2 + stride_x]; - } - ``` -`scr` data in this case loaded only once. As the result, the cycle count drops up to 45% against the line-wise version. - -9. Copy data from `__dlobal` to `__local` or `__private` memory if the data is accessed more than once. Access to -`__dlobal` memory is orders of magnitude slower than access to `__local`/`__private` due to statically scheduled pipeline, which -stalls completely on memory access without any prefetch. The same recommendation is applicable for scalar load/store -from/to a `__blobal` pointer since work-group copying could be done in a vector fashion. - -10. Use a manual DMA extension. Local (on-chip) memory throughput is up to 24x higher than DDR throughput. Starting from OpenVINO™ 2020.1, VPU OpenCL features manual-DMA kernel extension to copy sub-tensor used by work group into local memory and performing compute without DDR evolved. Here is the simple GRN kernel implementation that runs over DDR. Local size is in the form (width of the input tensor, 1, 1) to define a large enough work group to get code automatically vectorized and unrolled, while global size is (width of the input tensor, height of the input tensor, 1): - ```cpp - __kernel void grn_NCHW( - __global const half* restrict src_data, - __global half* restrict dst_data, - int C, - float bias) - { - float variance = bias + 1e-9f; - - #pragma unroll 4 - for (int c = 0; c < C; c++) - { - float val = (float) src_data[c*get_global_size(1)*get_global_size(0) + get_global_id(1)*get_global_size(0) + get_global_id(0)]; - variance += val*val; - } - - half hvariance = (half)(native_rsqrt((half)(variance/16.f))*0.25f); - - #pragma unroll 4 - for (int c = 0; c < C; c++) - { - dst_data[c*get_global_size(1)*get_global_size(0) + get_global_id(1)*get_global_size(0) + get_global_id(0)] - = src_data[c*get_global_size(1)*get_global_size(0) + get_global_id(1)*get_global_size(0) + get_global_id(0)] * hvariance; - } - } - ``` - -This kernel can be rewritten to introduce special data binding `__dma_preload` and `__dma_postwrite intrinsics`. This means that instead of one kernel, a group of three kernels should be implemented: `kernelName`, `__dma_preload_kernelName`, and `__dma_postwrite_kernelName`. `__dma_preload_kernelName` for a particular work group `n` is guaranteed to be executed before the `n`-th work group itself, while `__dma_postwrite_kernelName` is guaranteed to be executed after a corresponding work group. You can define one of those functions that are intended to be used to copy data from-to `__global` and `__local` memory. The syntactics requires exact functional signature match. The example below illustrates how to prepare your kernel for manual-DMA. - - ```cpp - __kernel void __dma_preload_grn_NCHW( - __global const half* restrict src, - __global half* restrict dst, - __local half* restrict local_src, - __local half* restrict local_dst, - int C, - float bias) - { - // ToDO: copy required piece of src tensor into local_src - } - - __kernel void __dma_postwrite_grn_NCHW( - __global const half* restrict src, - __global half* restrict dst, - __local const half* restrict local_src, - __local half* restrict local_dst, - int C, - float bias) - { - // ToDO: copy back computed piece of local_dst into dst - } - - __kernel void grn_NCHW( - __global const half* restrict src_data, - __global half* restrict dst_data, - __local half* restrict src, - __local half* restrict dst, - int C, - float bias) - { - // same as the example above - } - ``` -The GRN kernel operates on channel-major tensors to compute average over full channel range and then normalizes input elements to produce the output. -As a part of the manual DMA extension, a group of work group copy functions are introduced in addition to `async_work_group_copy`, which is also mapped to a DMA call. - -Here is the list of supported functions: -```cpp -// 2D sub-tensor copy -event_t WorkGroupDmaCreateStrideTransaction( - const local T *src, - global T *dst, - size_t src_width, // width of the line of source in bytes - size_t dst_width, // width of the line of destination in bytes - size_t src_stride, // stride between corresponding 2 consecutive lines of source in bytes - size_t dst_stride, // stride between corresponding 2 consecutive lines of destination in bytes - size_t size, // total number of bytes loaded for all lines from source to destination - event_t event) __OVERLOAD; - - -event_t WorkGroupDmaCreateStrideTransaction( - const global T *src, - local T *dst, - size_t src_width, // width of the line of source in bytes - size_t dst_width, // width of the line of destination in bytes - size_t src_stride, // stride between corresponding 2 consecutive lines of source in bytes - size_t dst_stride, // stride between corresponding 2 consecutive lines of destination in bytes - size_t size, // total number of bytes loaded for all lines from source to destination - event_t event) __OVERLOAD; - -// 3D sub-tensor copy -event_t WorkGroupDmaCreate3DTransaction( - const local T *src, - global T *dst, - size_t src_width, // width of the line of source in bytes - size_t dst_width, // width of the line of destination in bytes - size_t src_stride, // stride between corresponding 2 consecutive lines of source in bytes - size_t dst_stride, // stride between corresponding 2 consecutive lines of destination in bytes - size_t num_planes, // number of planes to be copied - size_t src_plane_stride, // stride between corresponding 2 consecutive planes of source in bytes - size_t dst_plane_stride, // stride between corresponding 2 consecutive planes of destination in bytes - size_t size, // size of the loaded plane in bytes, analogues to the size in 2D case - event_t event) __OVERLOAD; - -event_t WorkGroupDmaCreate3DTransaction( - const global T *src, - local T *dst, - size_t src_width, // width of the line of source in bytes - size_t dst_width, // width of the line of destination in bytes - size_t src_stride, // stride between corresponding 2 consecutive lines of source in bytes - size_t dst_stride, // stride between corresponding 2 consecutive lines of destination in bytes - size_t num_planes, // number of planes to be copied - size_t src_plane_stride, // stride between corresponding 2 consecutive planes of source in bytes - size_t dst_plane_stride, // stride between corresponding 2 consecutive planes of destination in bytes - size_t size, // size of the loaded plane in bytes, analogues to the size in 2D case - event_t event) __OVERLOAD; -``` -where `T` can be `uchar`, `char`, `short`, `ushort`, `int`, `uint`, `long`, `ulong`, `half` or `float`. - -Modified version of the GRN kernel could be the following: -```cpp -__kernel void __dma_preload_grn_NCHW( - __global const half* restrict src, - __global half* restrict dst, - __local half* restrict local_src, - __local half* restrict local_dst, - int C, - float bias) -{ - WorkGroupDmaCreate3DTransaction( - src + get_group_id(0)*get_local_size(0) - + get_group_id(1)*get_local_size(1)*get_global_size(0), // src - local_src, // dst - get_local_size(0) * sizeof(half), // src width - get_local_size(0) * sizeof(half), // dst width - get_global_size(0) * sizeof(half), // src stride - get_local_size(0) * sizeof(half), // dst stride - C, // num planes - get_global_size(0) * get_global_size(1) * sizeof(half), // src plane stride - get_local_size(0) * get_local_size(1) * sizeof(half), // dst plane stride - get_local_size(0) * get_local_size(1) * sizeof(half), // plane size - 0); -} - -__kernel void __dma_postwrite_grn_NCHW( - __global const half* restrict src, - __global half* restrict dst, - __local const half* restrict local_src, - __local half* restrict local_dst, - int C, - float bias) -{ - WorkGroupDmaCreate3DTransaction( - local_dst, // src - dst + get_group_id(0)*get_local_size(0) - + get_group_id(1)*get_local_size(1)*get_global_size(0), // dst - get_local_size(0) * sizeof(half), // src width - get_local_size(0) * sizeof(half), // dst width - get_local_size(0) * sizeof(half), // src stride - get_global_size(0) * sizeof(half), // dst stride - C, // num planes - get_local_size(0) * get_local_size(1) * sizeof(half), // src plane stride - get_global_size(0) * get_global_size(1) * sizeof(half), // dst plane stride - get_local_size(0) * get_local_size(1) * sizeof(half), // plane size - 0); -} - -__kernel void grn_NCHW( - __global const half* restrict src_data, - __global half* restrict dst_data, - __local half* restrict src, - __local half* restrict dst, - int C, - float bias) -{ - float variance = bias + 1e-9f; - - #pragma unroll 8 - for (int c = 0; c < C; c++) - { - float val = (float) src[c*get_local_size(1)*get_local_size(0) + get_local_id(1)*get_local_size(0) + get_local_id(0)]; - variance += val*val; - } - - half hvariance = (half)(native_rsqrt((half)(variance/16.f))*0.25f); - - #pragma unroll 8 - for (int c = 0; c < C; c++) - { - dst[c*get_local_size(1)*get_local_size(0) + get_local_id(1)*get_local_size(0) + get_local_id(0)] - = src[c*get_local_size(1)*get_local_size(0) + get_local_id(1)*get_local_size(0) + get_local_id(0)] * hvariance; - } -} -``` - -Note the `get_local_size` and `get_local_id` usage inside the kernel. 21x speedup is expected for a kernel on enet-curbs setup because it was completely limited by memory usage. - -An alternative method to using DMA is to use work item copy extension. Those functions are executed inside a kernel and requires work groups equal to single work item. - -Here is the list of supported work item functions: -```cpp -item_dma_event_t WorkItemDmaCreateTransaction( - const global T *src, - private T *dst, - size_t size, - item_dma_event_t event) __OVERLOAD; - -item_dma_event_t WorkItemDmaCreateTransaction( - const private T *src, - global T *dst, - size_t size, - item_dma_event_t event) __OVERLOAD; - -item_dma_event_t WorkItemDmaCreateStrideTransaction( - const global T *src, - private T *dst, - size_t src_width, - size_t dst_width, - size_t src_stride, - size_t dst_stride, - size_t size, - item_dma_event_t event) __OVERLOAD; - -item_dma_event_t WorkItemDmaCreateStrideTransaction( - const private T *src, - global T *dst, - size_t src_width, - size_t dst_width, - size_t src_stride, - size_t dst_stride, - size_t size, - item_dma_event_t event) __OVERLOAD; - -item_dma_event_t WorkItemDmaCreate3DTransaction( - const global T *src, - private T *dst, - size_t src_width, - size_t dst_width, - size_t src_stride, - size_t dst_stride, - size_t num_planes, - size_t src_plane_stride, - size_t dst_plane_stride, - size_t size, - item_dma_event_t event) __OVERLOAD; - -item_dma_event_t WorkItemDmaCreate3DTransaction( - const private T *src, - global T *dst, - size_t src_width, - size_t dst_width, - size_t src_stride, - size_t dst_stride, - size_t num_planes, - size_t src_plane_stride, - size_t dst_plane_stride, - size_t size, - item_dma_event_t event) __OVERLOAD; -``` -where `T` can be `uchar`, `char`, `short`, `ushort`, `int`, `uint`, `long`, `ulong`, `half` or `float`. diff --git a/docs/OV_Runtime_UG/InferenceEngine_QueryAPI.md b/docs/OV_Runtime_UG/InferenceEngine_QueryAPI.md deleted file mode 100644 index eacdffe186d..00000000000 --- a/docs/OV_Runtime_UG/InferenceEngine_QueryAPI.md +++ /dev/null @@ -1,235 +0,0 @@ -# Introduction to Inference Engine Device Query API {#openvino_docs_IE_DG_InferenceEngine_QueryAPI} - -## Inference Engine Query API (C++) - -@sphinxdirective -.. raw:: html - -
C++
-@endsphinxdirective - -The OpenVINO™ toolkit supports inferencing with several types of devices (processors or accelerators). -This section provides a high-level description of the process of querying of different device properties and configuration values at runtime. Refer to the [Hello Query Device С++ Sample](../../samples/cpp/hello_query_device/README.md) sources and the [Multi-Device Plugin documentation](supported_plugins/MULTI.md) for examples of using the Inference Engine Query API in user applications. - -### Using the Inference Engine Query API in Your Code - -The `InferenceEngine::Core` class provides the following API to query device information, set or get different device configuration properties: - -* `InferenceEngine::Core::GetAvailableDevices` - Provides a list of available devices. If there are more than one instance of a specific device, the devices are enumerated with `.suffix` where `suffix` is a unique string identifier. The device name can be passed to all methods of the `InferenceEngine::Core` class that work with devices, for example `InferenceEngine::Core::LoadNetwork`. -* `InferenceEngine::Core::GetMetric` - Provides information about specific device. - `InferenceEngine::Core::GetConfig` - Gets the current value of a specific configuration key. -* `InferenceEngine::Core::SetConfig` - Sets a new value for the configuration key. - -The `InferenceEngine::ExecutableNetwork` class is also extended to support the Query API: - -* `InferenceEngine::ExecutableNetwork::GetMetric` -* `InferenceEngine::ExecutableNetwork::GetConfig` -* `InferenceEngine::ExecutableNetwork::SetConfig` - -### Query API in the Core Class - -#### GetAvailableDevices - -@snippet snippets/InferenceEngine_QueryAPI0.cpp part0 - -The function returns a list of available devices, for example: - -``` -MYRIAD.1.2-ma2480 -MYRIAD.1.4-ma2480 -CPU -GPU.0 -GPU.1 -``` - -Each device name can then be passed to: - -* `InferenceEngine::Core::LoadNetwork` to load the network to a specific device. -* `InferenceEngine::Core::GetMetric` to get common or device specific metrics. -* All other methods of the `InferenceEngine::Core` class that accept `deviceName`. - -#### GetConfig() - -The code below demonstrates how to understand whether the `HETERO` device dumps GraphViz `.dot` files with split graphs during the split stage: - -@snippet snippets/InferenceEngine_QueryAPI1.cpp part1 - -For documentation about common configuration keys, refer to `ie_plugin_config.hpp`. Device specific configuration keys can be found in corresponding plugin folders. - -#### GetMetric() - -* To extract device properties such as available device, device name, supported configuration keys, and others, use the `InferenceEngine::Core::GetMetric` method: - -@snippet snippets/InferenceEngine_QueryAPI2.cpp part2 - -A returned value appears as follows: `Intel(R) Core(TM) i7-8700 CPU @ 3.20GHz`. - -> **NOTE**: All metrics have a type, which is specified during metric instantiation. The list of common device-agnostic metrics can be found in `ie_plugin_config.hpp`. Device specific metrics (for example, for HDDL or MYRIAD devices) can be found in corresponding plugin folders. - -### Query API in the ExecutableNetwork Class - -#### GetMetric() - -The method is used to get an executable network specific metric such as `METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS)`: - -@snippet snippets/InferenceEngine_QueryAPI3.cpp part3 - -Or the current temperature of the `MYRIAD` device: - -@snippet snippets/InferenceEngine_QueryAPI4.cpp part4 - -#### GetConfig() - -The method is used to get information about configuration values the executable network has been created with: - -@snippet snippets/InferenceEngine_QueryAPI5.cpp part5 - -#### SetConfig() - -The only device that supports this method is [Multi-Device](supported_plugins/MULTI.md). - -## Inference Engine Query API (Python) - -@sphinxdirective -.. raw:: html - -
Python
-@endsphinxdirective - -This section provides a high-level description of the process of querying of different device properties and configuration values. Refer to the [Hello Query Device Python Sample](../../samples/python/hello_query_device/README.md) sources and the [Multi-Device Plugin documentation](supported_plugins/MULTI.md) for examples of using the Inference Engine Query API in user applications. - -### Using the Inference Engine Query API in Your Code - -The Inference Engine [Core](api/ie_python_api/_autosummary/openvino.inference_engine.IECore.html#openvino-inference-engine-iecore) class provides the following API to query device information, set or get different device configuration properties: - -* [ie_api.IECore.available_devices](api/ie_python_api/_autosummary/openvino.inference_engine.IECore.html#openvino.inference_engine.IECore.available_devices) - Provides a list of available devices. If there are more than one instance of a specific device, the devices are enumerated with .suffix where suffix is a unique string identifier. The device name can be passed to all methods of the IECore class that work with devices, for example [ie_api.IECore.load_network](api/ie_python_api/_autosummary/openvino.inference_engine.IECore.html#openvino.inference_engine.IECore.load_network). -* [ie_api.ieCore.get_metric](api/ie_python_api/_autosummary/openvino.inference_engine.IECore.html#openvino.inference_engine.IECore.get_metric) - Provides information about specific device. -* [ie_api.IECore.get_config](api/ie_python_api/_autosummary/openvino.inference_engine.IECore.html#openvino.inference_engine.IECore.get_config) - Gets the current value of a specific configuration key. -* [ie_api.IECore.set_config](api/ie_python_api/_autosummary/openvino.inference_engine.IECore.html#openvino.inference_engine.IECore.set_config) - Sets a new value for the configuration key. - -The [ie_api.ExecutableNetwork](api/ie_python_api/_autosummary/openvino.inference_engine.ExecutableNetwork.html) class is also extended to support the Query API: -* [ie_api.ExecutableNetwork.get_metric](api/ie_python_api/_autosummary/openvino.inference_engine.ExecutableNetwork.html#openvino.inference_engine.ExecutableNetwork.get_metric) -* [ie_api.ExecutableNetwork.get_config](latest/api/ie_python_api/_autosummary/openvino.inference_engine.ExecutableNetwork.html#openvino.inference_engine.ExecutableNetwork.get_config) -* There is no method to call for set_config, but the equivalent action is described below. - -### Query API in the IECore Class - -#### Get Available Devices - -```python -from openvino.inference_engine import IECore - -ie = IECore() -print(ie.available_devices) -``` - -This code prints a list of available devices, for example: - -``` -MYRIAD.1.2-ma2480 -MYRIAD.1.4-ma2480 -FPGA.0 -FPGA.1 -CPU -GPU.0 -GPU.1 -``` - -Each device name can then be passed to: - -* `IECore.load_network` to load the network to a specific device. -* `IECore.get_metric` to get common or device specific metrics. -* All other methods of the `IECore` class that accept a device name. - -#### Get Metric - -To extract device properties such as available device, device name, supported configuration keys, and others, use the [IECore.get_metric](api/ie_python_api/_autosummary/openvino.inference_engine.IECore.html#openvino.inference_engine.IECore.get_metric) method: - -```python -from openvino.inference_engine import IECore - -ie = IECore() -ie.get_metric(device_name="CPU", metric_name="FULL_DEVICE_NAME") -``` - -A returned value appears as follows: `Intel(R) Core(TM) i7-8700 CPU @ 3.20GHz`. - -To list all supported metrics for a device: - -```python -from openvino.inference_engine import IECore - -ie = IECore() -ie.get_metric(device_name="GPU", metric_name="SUPPORTED_METRICS") -``` - -#### Get Configuration - -The code below uses the [IECore.get_config](api/ie_python_api/_autosummary/openvino.inference_engine.IECore.html#openvino.inference_engine.IECore.get_config) method and demonstrates how to understand whether the HETERO device dumps .dot files with split graphs during the split stage: - -```python -from openvino.inference_engine import IECore - -ie = IECore() -ie.get_config(device_name="HETERO", config_name="HETERO_DUMP_GRAPH_DOT") -``` - -To list all supported configuration keys for a device: - -```python -from openvino.inference_engine import IECore - -ie = IECore() -ie.get_metric(device_name=device, metric_name="SUPPORTED_CONFIG_KEYS") -``` - -For documentation about common configuration keys, refer to `ie_plugin_config.hpp`. Device specific configuration keys can be found in corresponding plugin folders. - - -### Query API in the ExecutableNetwork Class - -#### Get Metric - -To get the name of the loaded network: - -```python -from openvino.inference_engine import IECore - -ie = IECore() -net = ie.read_network(model=path_to_xml_file) -exec_net = ie.load_network(network=net, device_name=device) -exec_net.get_metric("NETWORK_NAME") -``` - -Use `exec_net.get_metric("SUPPORTED_METRICS")` to list all supported metrics for an ExecutableNetwork instance. - - -#### Get Configuration - -The [IECore.get_config](api/ie_python_api/_autosummary/openvino.inference_engine.IECore.html#openvino.inference_engine.IECore.get_config) method is used to get information about configuration values the executable network has been created with: - -```python -from openvino.inference_engine import IECore - -ie = IECore() -net = ie.read_network(model=path_to_xml_file) -exec_net = ie.load_network(network=net, device_name="CPU") -exec_net.get_config("CPU_THREADS_NUM") -``` - -Or the current temperature of MYRIAD device: - -```python -from openvino.inference_engine import IECore - -ie = IECore() -net = ie.read_network(model=path_to_xml_file) -exec_net = ie.load_network(network=net, device_name="MYRIAD") -exec_net.get_config("DEVICE_THERMAL") -``` - -Use `exec_net.get_metric("SUPPORTED_CONFIG_KEYS")` to list all supported configuration keys. - -#### Set Configuration - -The only device that supports this method in the ExecutableNetwork class is the [Multi-Device](supported_plugins/MULTI.md), where you can change the priorities of the devices for the Multi plugin in real time: `exec_net.set_config({{"MULTI_DEVICE_PRIORITIES", "GPU,CPU"}})`. See the Multi-Device documentation for more details. \ No newline at end of file diff --git a/docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md b/docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md deleted file mode 100644 index ebbec0e3c6e..00000000000 --- a/docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md +++ /dev/null @@ -1,490 +0,0 @@ -# Integrate Inference Engine {#openvino_docs_IE_DG_Integrate_with_customer_application_new_API} - -## Integrate Inference Engine with Your C++ Application - -@sphinxdirective -.. raw:: html - -
C++
-@endsphinxdirective - -The following diagram illustrates the typical Inference Engine С++ API workflow: - -![ie_api_flow_cpp] - -Read the sections below to learn about each item. - -> **NOTE**: Before start using Inference Engine, make sure you set all environment variables during the installation. If you did not, follow the instructions from the _Set the Environment Variables_ section in the installation guides: -> * [For Windows* 10](../install_guides/installing-openvino-windows.md) -> * [For Linux*](../install_guides/installing-openvino-linux.md) -> * [For macOS*](../install_guides/installing-openvino-macos.md) -> * To build an open source version, use the [Inference Engine Build Instructions](https://github.com/openvinotoolkit/openvino/wiki/BuildingCode). - -### Link with Inference Library - -1. **Create a structure** for the project: - ``` sh - project/ - ├── CMakeLists.txt - CMake file to build - ├── ... - Additional folders like includes/ - └── src/ - source folder - └── main.cpp - build/ - build directory - ... - ``` - -2. **Include Inference Engine, nGraph and OpenCV libraries** in `project/CMakeLists.txt` -[OpenCV](https://docs.opencv.org/master/db/df5/tutorial_linux_gcc_cmake.html) integration is needed mostly for pre-processing input data and model representation in OpenVINO™ Runtime for more complex applications using [OpenVINO Model API](../OV_Runtime_UG/model_representation.md). - ``` cmake - cmake_minimum_required(VERSION 3.0.0) - project(project_name) - find_package(OpenVINO REQUIRED) - add_executable(${PROJECT_NAME} src/main.cpp) - target_link_libraries(${PROJECT_NAME} PRIVATE openvino::runtime) - ``` - -### Use Inference Engine API to Implement Inference Pipeline - -This section provides step-by-step instructions to implement a typical inference pipeline with the Inference Engine C++ API: - -![ie_api_use_cpp] -#### Step 1. Create Inference Engine Core - -Use the following code to create Inference Engine Core to manage available devices and read network objects: - -@snippet snippets/Integrate_with_customer_application_new_API.cpp part0 - -#### Step 2 (Optional). Configure Input and Output of the Model - -@sphinxdirective -.. raw:: html - -
-@endsphinxdirective - - -Optionally, configure input and output of the model using the steps below: - -1. Load a model to a Core object: - @sphinxdirective - - .. tab:: IR - - .. code-block:: c - - auto network = core.ReadNetwork("model.xml"); - - .. tab:: ONNX - - .. code-block:: c - - auto network = core.ReadNetwork("model.onnx"); - - .. tab:: nGraph - - .. code-block:: c - - std::shared_ptr createNetwork() { - // To construct a network, please follow - // https://docs.openvino.ai/latest/openvino_docs_nGraph_DG_build_function.html - } - auto network = CNNNetwork(createNetwork()); - - @endsphinxdirective - -2. Request input and output information using `InferenceEngine::CNNNetwork::getInputsInfo()`, and `InferenceEngine::CNNNetwork::getOutputsInfo()` methods: - ```cpp - /** Take information about all topology inputs **/ - InferenceEngine::InputsDataMap input_info = network.getInputsInfo(); - /** Iterate over all input info**/ - for (auto &item : input_info) { - auto input_data = item.second; - // Add your input configuration steps here - } - - /** Take information about all topology outputs **/ - InferenceEngine::OutputsDataMap output_info = network.getOutputsInfo(); - /** Iterate over all output info**/ - for (auto &item : output_info) { - auto output_data = item.second; - // Add your output configuration steps here - } - ``` - Configuring options: - 1. **Set precision** (number format): FP16, FP32, INT8, etc. Refer to the Supported Configurations section on the [Supported Devices](supported_plugins/Supported_Devices.md) page to choose the relevant configuration.
- For input (*iterate over all input info*): - ```cpp - input_data->setPrecision(InferenceEngine::Precision::U8); - ``` - For output (*iterate over all output info*): - ```cpp - output_data->setPrecision(InferenceEngine::Precision::FP32); - ``` - **By default**, the input and output precision is set to `Precision::FP32`. - - 2. **Set layout** (NCHW, ).
- For input (*iterate over all input info*): - ```cpp - input_data->setLayout(InferenceEngine::Layout::NCHW); - ``` - **By default**, the input layout is set to `Layout::NCHW`.
- For output (*iterate over all output info*): - ```cpp - output_data->setLayout(InferenceEngine::Layout::NC); - ``` - **By default**, the output layout depends on a number of its dimensions:
- |Number of dimensions | 5 | 4 | 3 | 2 | 1 | - |:--------------------|-------|------|-----|----|----| - |Layout | NCDHW | NCHW | CHW | NC | C | - 3. **Set resize algorithm for inputs** (Bilinear). You can allow input of any size. To do this, mark each input as resizable by setting a desired resize algorithm (e.g. `BILINEAR`) inside of the appropriate input info (*Iterate over all input info*): - ```cpp - input_data->getPreProcess().setResizeAlgorithm(InferenceEngine::RESIZE_BILINEAR); - ``` - **By default**, no resize algorithm is set for inputs. - - 4. **Set color format** (BGR, RGB, NV12). Basic color format conversions are supported as well. **By default**, the Inference Engine assumes that the input color format is BGR and color format conversions are disabled. Set `ColorFormat::RAW` input color format if the input does not need color conversions. The Inference Engine supports the following color format conversions: - * RGB->BGR - * RGBX->BGR - * BGRX->BGR - * NV12->BGR - where X is a channel that will be ignored during inference. To enable the conversions, set a desired color format (for example, RGB) for each input inside of the appropriate input info (*iterate over all input info*): - ```cpp - input_data->getPreProcess().setColorFormat(InferenceEngine::ColorFormat::RGB); - ``` - > **NOTE**: NV12 input color format pre-processing differs from other color conversions. In case of NV12, Inference Engine expects two separate image planes (Y and UV). You must use a specific `InferenceEngine::NV12Blob` object instead of default blob object and set this blob to the Inference Engine Infer Request using `InferenceEngine::InferRequest::SetBlob()`. Refer to [Hello NV12 Input Classification C++ Sample](../../samples/cpp/hello_nv12_input_classification/README.md) for more details. - - 5. **Run on multiple images** with setting batch. If you want to run inference for multiple images at once, you can use the built-in batch pre-processing functionality. - - **NOTE** : Batch pre-processing is not supported if input color format is set to `ColorFormat::NV12`. - -@sphinxdirective -.. raw:: html - -
-@endsphinxdirective - -#### Step 3. Load the Model to the Device - -Load the model to the device using `InferenceEngine::Core::LoadNetwork()`: - - -@sphinxdirective - -.. tab:: IR - - .. code-block:: c - - executable_network = core.LoadNetwork("model.xml", "CPU"); - -.. tab:: ONNX - - .. code-block:: c - - executable_network = core.LoadNetwork("model.onnx", "CPU"); - -.. tab:: nGraph - - .. code-block:: c - - std::shared_ptr createNetwork() { - // To construct a network, please follow - // https://docs.openvino.ai/latest/openvino_docs_nGraph_DG_build_function.html - } - auto network = CNNNetwork(createNetwork()); - executable_network = core.LoadNetwork(network, "CPU"); - -.. tab:: Model From Step 2 - - Follow this step only if you went through optional "Step 2 (Optional). Configure Input and Output of the Model", otherwise use another tab for your model type: IR (OpenVINO Intermediate Representation), ONNX or nGraph. - - .. code-block:: c - - executable_network = core.LoadNetwork(network, "CPU"); - -@endsphinxdirective - - -It creates an executable network from a network object. The executable network is associated with single hardware device. -It is possible to create as many networks as needed and to use them simultaneously (up to the limitation of the hardware resources). - -Third parameter is a configuration for plugin. It is map of pairs: (parameter name, parameter value). Choose device from -[Supported devices](supported_plugins/Supported_Devices.md) page for more details about supported configuration parameters. - -@snippet snippets/Integrate_with_customer_application_new_API.cpp part6 - -#### Step 4. Create an Inference Request - -Create an infer request using the following code: - -@snippet snippets/Integrate_with_customer_application_new_API.cpp part7 - -#### Step 5. Prepare Input - -You can use one of the following options to prepare input: - -* **Optimal way for a single network.** Get blobs allocated by an infer request using `InferenceEngine::InferRequest::GetBlob()` and feed an image and the input data to the blobs. In this case, input data must be aligned (resized manually) with a given blob size and have a correct color format. - - @snippet snippets/Integrate_with_customer_application_new_API.cpp part8 - -* **Optimal way for a cascade of networks (output of one network is input for another).** Get output blob from the first request using `InferenceEngine::InferRequest::GetBlob()` and set it as input for the second request using `InferenceEngine::InferRequest::SetBlob()`. - - @snippet snippets/Integrate_with_customer_application_new_API.cpp part9 - -* **Optimal way to handle ROI (a ROI object located inside of input of one network is input for another).** It is possible to re-use shared input by several networks. You do not need to allocate separate input blob for a network if it processes a ROI object located inside of already allocated input of a previous network. For instance, when first network detects objects on a video frame (stored as input blob) and second network accepts detected bounding boxes (ROI inside of the frame) as input. In this case, it is allowed to re-use pre-allocated input blob (used by first network) by second network and just crop ROI without allocation of new memory using `InferenceEngine::make_shared_blob()` with passing of `InferenceEngine::Blob::Ptr` and `InferenceEngine::ROI` as parameters. - - @snippet snippets/Integrate_with_customer_application_new_API.cpp part10 - - Make sure that shared input is kept valid during execution of each network. Otherwise, ROI blob may be corrupted if the original input blob (that ROI is cropped from) has already been rewritten. - -* Allocate input blobs of the appropriate types and sizes, feed an image and the input data to the blobs, and call `InferenceEngine::InferRequest::SetBlob()` to set these blobs for an infer request: - - @snippet snippets/Integrate_with_customer_application_new_API.cpp part11 - -A blob can be filled before and after `SetBlob()`. - -> **NOTE**: -> -> * The `SetBlob()` method compares precision and layout of an input blob with the ones defined in step 3 and -> throws an exception if they do not match. It also compares a size of the input blob with input -> size of the read network. But if input was configured as resizable, you can set an input blob of -> any size (for example, any ROI blob). Input resize will be invoked automatically using resize -> algorithm configured on step 3. Similarly to the resize, color format conversions allow the color -> format of an input blob to differ from the color format of the read network. Color format -> conversion will be invoked automatically using color format configured on step 3. -> -> * `GetBlob()` logic is the same for pre-processable and not pre-processable input. Even if it is -> called with input configured as resizable or as having specific color format, a blob allocated by -> an infer request is returned. Its size and color format are already consistent with the -> corresponding values of the read network. No pre-processing will happen for this blob. If you -> call `GetBlob()` after `SetBlob()`, you will get the blob you set in `SetBlob()`. - -#### Step 6. Start Inference - -Start inference in asynchronous or synchronous mode. Async API usage can improve overall frame-rate of the application, because rather than wait for inference to complete, the app can continue doing things on the host, while accelerator is busy. - -* For synchronous inference request: - ```cpp - infer_request.Infer(); - ``` - -* For asynchronous inference request: - ```cpp - infer_request.StartAsync(); - infer_request.Wait(InferenceEngine::InferRequest::WaitMode::RESULT_READY); - ``` - `StartAsync` returns immediately and starts inference without blocking main thread, `Infer` blocks main thread and returns when inference is completed. Call `Wait` for waiting result to become available for asynchronous request. - - There are three ways to use it: - * specify maximum duration in milliseconds to block for. The method is blocked until the specified timeout has elapsed, or the result becomes available, whichever comes first. - * `InferenceEngine::InferRequest::WaitMode::RESULT_READY` - waits until inference result becomes available - * `InferenceEngine::InferRequest::WaitMode::STATUS_ONLY` - immediately returns request status.It does not - block or interrupts current thread. - - -Both requests are thread-safe: can be called from different threads without fearing corruption and failures. - -Multiple requests for single `ExecutableNetwork` are executed sequentially one by one in FIFO order. - -While request is ongoing, all its methods except `InferenceEngine::InferRequest::Wait` would throw an -exception. - -#### Step 7. Process the Inference Results - -Go over the output blobs and process the inference results. Note that casting `Blob` to `TBlob` via `std::dynamic_pointer_cast` is not the recommended way. It's better to access data via the `buffer()` and `as()` methods as follows: - -@snippet snippets/Integrate_with_customer_application_new_API.cpp part14 - -### Build Your Application - -For details about building your application, refer to the CMake files for the sample applications. -All samples source code is located in the `/samples` directory, where `INSTALL_DIR` is the OpenVINO™ installation directory. - -To build your project using CMake with the default build tools currently available on your machine, execute the following commands: - -> **NOTE**: Make sure you set environment variables first by running `/setupvars.sh` (or `setupvars.bat` for Windows). Otherwise the `InferenceEngine_DIR` and `OpenCV_DIR` variables won't be configured properly to pass `find_package` calls. - -```sh -cd build/ -cmake ../project -cmake --build . -``` -It's allowed to specify additional build options (e.g. to build CMake project on Windows with a specific build tools). Please refer to the [CMake page](https://cmake.org/cmake/help/latest/manual/cmake.1.html#manual:cmake(1)) for details. - -### Run Your Application - -> **NOTE**: Before running, make sure you completed **Set the Environment Variables** section in [OpenVINO Installation](../../samples/cpp/hello_nv12_input_classification/README.md) document so that the application can find the libraries. - -To run compiled applications on Microsoft* Windows* OS, make sure that Microsoft* Visual C++ 2017 -Redistributable and Intel® C++ Compiler 2017 Redistributable packages are installed and -`/bin/intel64/Release/*.dll` files are placed to the -application folder or accessible via `%PATH%` environment variable. - -## Integrate Inference Engine with Your Python Application - -@sphinxdirective -.. raw:: html - -
Python
-@endsphinxdirective - -This document explains how to integrate and use the Inference Engine API with your Python application. - -The following diagram illustrates the typical Inference Engine Python API workflow: -![ie_api_flow_python] - -Read the sections below to learn about each item. - -### Import Inference Module - -To make use of the Inference Engine functionality, import IECore to your application: - -```py -from openvino.inference_engine import IECore -``` - -### Use Inference Engine API - -This section provides step-by-step instructions to implement a typical inference pipeline with the Inference Engine API: - -![ie_api_use_python] - -#### Step 1. Create Inference Engine Core - -Use the following code to create Inference Engine Core to manage available devices and read network objects: -```py -ie = IECore() -``` -#### Step 2 (Optional). Read model. Configure Input and Output of the Model - -@sphinxdirective -.. raw:: html - -
-@endsphinxdirective - -Optionally, configure input and output of the model using the steps below: - -1. Read model - @sphinxdirective - - .. tab:: IR - - .. code-block:: python - - net = ie.read_network(model="model.xml") - - .. tab:: ONNX - - .. code-block:: python - - net = ie.read_network(model="model.onnx") - - .. tab:: nGraph - - .. code-block:: python - - #Basic example of nGraph model creation - param = Parameter(Type.f32, Shape([1, 3, 22, 22])) - relu = ng.relu(param) - func = Function([relu], [param], 'test') - caps = Function.to_capsule(func) - net = IENetwork(caps) - - @endsphinxdirective - -2. Request input and output information using input_info, outputs - ```py - inputs = net.input_info - input_name = next(iter(net.input_info)) - - outputs = net.outputs - output_name = next(iter(net.outputs)) - ``` - Information for this input layer is stored in input_info. The next cell prints the input layout, precision and shape. - ```py - print("Inputs:") - for name, info in net.input_info.items(): - print("\tname: {}".format(name)) - print("\tshape: {}".format(info.tensor_desc.dims)) - print("\tlayout: {}".format(info.layout)) - print("\tprecision: {}\n".format(info.precision)) - ``` - This cell output tells us that the model expects inputs with a shape of [1,3,224,224], and that this is in NCHW layout. This means that the model expects input data with a batch size (N) of 1, 3 channels (C), and images of a height (H) and width (W) of 224. The input data is expected to be of FP32 (floating point) precision. - - Getting the output layout, precision and shape is similar to getting the input layout, precision and shape. - ```py - print("Outputs:") - for name, info in net.outputs.items(): - print("\tname: {}".format(name)) - print("\tshape: {}".format(info.shape)) - print("\tlayout: {}".format(info.layout)) - print("\tprecision: {}\n".format(info.precision)) - ``` - This cell output shows that the model returns outputs with a shape of [1, 1001], where 1 is the batch size (N) and 1001 the number of classes (C). The output is returned as 32-bit floating point. - -@sphinxdirective -.. raw:: html - -
-@endsphinxdirective - -#### Step 3. Load model to the Device - -Load the model to the device using `load_network()`: - -@sphinxdirective - -.. tab:: IR - - .. code-block:: python - - exec_net = ie.load_network(network= "model.xml", device_name="CPU") -.. tab:: ONNX - - .. code-block:: python - - exec_net = ie.load_network(network= "model.onnx", device_name="CPU") - -.. tab:: Model from step 2 - - .. code-block:: python - - exec_net = ie.load_network(network=net, device_name="CPU") - -@endsphinxdirective - -This example is designed for CPU device, refer to the [Supported Devices](../OV_Runtime_UG/supported_plugins/Supported_Devices.md) page to read about more devices. - -#### Step 4. Prepare input -```py -import cv2 -import numpy as np - -image = cv2.imread("image.png") - -# Resize with OpenCV your image if needed to match with net input shape -# N, C, H, W = net.input_info[input_name].tensor_desc.dims -# image = cv2.resize(src=image, dsize=(W, H)) - -# Converting image to NCHW format with FP32 type -input_data = np.expand_dims(np.transpose(image, (2, 0, 1)), 0).astype(np.float32) -``` - -#### Step 5. Start Inference -```py -result = exec_net.infer({input_name: input_data}) -``` - -#### Step 6. Process the Inference Results -```py -output = result[output_name] -``` - -### Run Your Application - -Congratulations, you have made your first Python application with OpenVINO™ toolkit, now you may run it. - -[ie_api_flow_cpp]: img/BASIC_IE_API_workflow_Cpp.svg -[ie_api_use_cpp]: img/IMPLEMENT_PIPELINE_with_API_C.svg -[ie_api_flow_python]: img/BASIC_IE_API_workflow_Python.svg -[ie_api_use_python]: img/IMPLEMENT_PIPELINE_with_API_Python.svg diff --git a/docs/OV_Runtime_UG/Model_caching_overview.md b/docs/OV_Runtime_UG/Model_caching_overview.md index bf85c4ef680..70505abfc4a 100644 --- a/docs/OV_Runtime_UG/Model_caching_overview.md +++ b/docs/OV_Runtime_UG/Model_caching_overview.md @@ -1,59 +1,95 @@ # Model Caching Overview {#openvino_docs_IE_DG_Model_caching_overview} -## Introduction (C++) +## Introduction -@sphinxdirective -.. raw:: html +As described in the [Integrate OpenVINO™ with Your Application](integrate_with_your_application.md), a common application flow consists of the following steps: -
C++
-@endsphinxdirective +1. **Create a Core object**: First step to manage available devices and read model objects -As described in the [OpenVINO™ Runtime User Guide](OpenVINO_Runtime_User_Guide.md), a common application flow consists of the following steps: - -1. **Create an Inference Engine Core object**: First step to manage available devices and read network objects - -2. **Read the Intermediate Representation**: Read an Intermediate Representation file into an object of the `InferenceEngine::CNNNetwork` +2. **Read the Intermediate Representation**: Read an Intermediate Representation file into an object of the `ov::Model` 3. **Prepare inputs and outputs**: If needed, manipulate precision, memory layout, size or color format 4. **Set configuration**: Pass device-specific loading configurations to the device -5. **Compile and Load Network to device**: Use the `InferenceEngine::Core::LoadNetwork()` method with a specific device +5. **Compile and Load Network to device**: Use the `ov::Core::compile_model()` method with a specific device -6. **Set input data**: Specify input blob +6. **Set input data**: Specify input tensor 7. **Execute**: Carry out inference and process results Step 5 can potentially perform several time-consuming device-specific optimizations and network compilations, and such delays can lead to a bad user experience on application startup. To avoid this, some devices offer import/export network capability, and it is possible to either use the [Compile tool](../../tools/compile_tool/README.md) -or enable model caching to export compiled network automatically. Reusing cached networks can significantly reduce load network time. +or enable model caching to export compiled model automatically. Reusing cached model can significantly reduce compile model time. -### Set "CACHE_DIR" config option to enable model caching +### Set "cache_dir" config option to enable model caching To enable model caching, the application must specify a folder to store cached blobs, which is done like this: -@snippet snippets/InferenceEngine_Caching0.cpp part0 +@sphinxdirective -With this code, if the device specified by `LoadNetwork` supports import/export network capability, a cached blob is automatically created inside the `myCacheFolder` folder. -CACHE_DIR config is set to the Core object. If the device does not support import/export capability, cache is not created and no error is thrown. +.. tab:: C++ -Depending on your device, total time for loading network on application startup can be significantly reduced. -Also note that the very first LoadNetwork (when cache is not yet created) takes slightly longer time to "export" the compiled blob into a cache file: + .. doxygensnippet:: docs/snippets/ov_caching.cpp + :language: cpp + :fragment: [ov:caching:part0] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_caching.py + :language: python + :fragment: [ov:caching:part0] + +@endsphinxdirective + +With this code, if the device specified by `device_name` supports import/export model capability, a cached blob is automatically created inside the `/path/to/cache/dir` folder. +If the device does not support import/export capability, cache is not created and no error is thrown. + +Depending on your device, total time for compiling model on application startup can be significantly reduced. +Also note that the very first `compile_model` (when cache is not yet created) takes slightly longer time to "export" the compiled blob into a cache file: ![caching_enabled] -### Even faster: use LoadNetwork(modelPath) +### Even faster: use compile_model(modelPath) -In some cases, applications do not need to customize inputs and outputs every time. Such an application always -call `cnnNet = ie.ReadNetwork(...)`, then `ie.LoadNetwork(cnnNet, ..)` and it can be further optimized. -For these cases, the 2021.4 release introduces a more convenient API to load the network in a single call, skipping the export step: +In some cases, applications do not need to customize inputs and outputs every time. Such application always +call `model = core.read_model(...)`, then `core.compile_model(model, ..)` and it can be further optimized. +For these cases, there is a more convenient API to compile the model in a single call, skipping the read step: -@snippet snippets/InferenceEngine_Caching1.cpp part1 +@sphinxdirective -With model caching enabled, total load time is even smaller, if ReadNetwork is optimized as well. +.. tab:: C++ -@snippet snippets/InferenceEngine_Caching2.cpp part2 + .. doxygensnippet:: docs/snippets/ov_caching.cpp + :language: cpp + :fragment: [ov:caching:part1] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_caching.py + :language: python + :fragment: [ov:caching:part1] + +@endsphinxdirective + +With model caching enabled, total load time is even smaller, if `read_model` is optimized as well. + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_caching.cpp + :language: cpp + :fragment: [ov:caching:part2] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_caching.py + :language: python + :fragment: [ov:caching:part2] + +@endsphinxdirective ![caching_times] @@ -62,74 +98,23 @@ With model caching enabled, total load time is even smaller, if ReadNetwork is o Not every device supports network import/export capability. For those that don't, enabling caching has no effect. To check in advance if a particular device supports model caching, your application can use the following code: -@snippet snippets/InferenceEngine_Caching3.cpp part3 - -## Introduction (Python) - @sphinxdirective -.. raw:: html -
Python
+.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_caching.cpp + :language: cpp + :fragment: [ov:caching:part3] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_caching.py + :language: python + :fragment: [ov:caching:part3] + @endsphinxdirective -As described in Inference Engine Developer Guide, a common application flow consists of the following steps: - -1. **Create an Inference Engine Core Object** -2. **Read the Intermediate Representation** - Read an Intermediate Representation file into an object of the [ie_api.IENetwork](api/ie_python_api/_autosummary/openvino.inference_engine.IENetwork.html) -3. **Prepare inputs and outputs** -4. **Set configuration** - Pass device-specific loading configurations to the device -5. **Compile and Load Network to device** - Use the `IECore.load_network()` method and specify the target device -6. **Set input data** -7. **Execute the model** - Run inference - -Step #5 can potentially perform several time-consuming device-specific optimizations and network compilations, and such delays can lead to bad user experience on application startup. To avoid this, some devices offer Import/Export network capability, and it is possible to either use the [Compile tool](../../tools/compile_tool/README.md) or enable model caching to export the compiled network automatically. Reusing cached networks can significantly reduce load network time. - -### Set the “CACHE_DIR” config option to enable model caching - -To enable model caching, the application must specify the folder where to store cached blobs. It can be done using [IECore.set_config](api/ie_python_api/_autosummary/openvino.inference_engine.IECore.html#openvino.inference_engine.IECore.set_config). - -``` python -from openvino.inference_engine import IECore - -ie = IECore() -ie.set_config(config={"CACHE_DIR": path_to_cache}, device_name=device) -net = ie.read_network(model=path_to_xml_file) -exec_net = ie.load_network(network=net, device_name=device) -``` - -With this code, if a device supports the Import/Export network capability, a cached blob is automatically created inside the path_to_cache directory `CACHE_DIR` config is set to the Core object. If device does not support Import/Export capability, cache is just not created and no error is thrown - -Depending on your device, total time for loading network on application startup can be significantly reduced. Please also note that very first [IECore.load_network](api/ie_python_api/_autosummary/openvino.inference_engine.IECore.html#openvino.inference_engine.IECore.load_network) (when the cache is not yet created) takes slightly longer time to ‘export’ the compiled blob into a cache file. - -![caching_enabled] - - -### Even Faster: Use IECore.load_network(path_to_xml_file) - -In some cases, applications do not need to customize inputs and outputs every time. These applications always call [IECore.read_network](api/ie_python_api/_autosummary/openvino.inference_engine.IECore.html#openvino.inference_engine.IECore.read_network), then `IECore.load_network(model=path_to_xml_file)` and may be further optimized. For such cases, it's more convenient to load the network in a single call to `ie.load_network()` -A model can be loaded directly to the device, with model caching enabled: - -``` python -from openvino.inference_engine import IECore - -ie = IECore() -ie.set_config(config={"CACHE_DIR" : path_to_cache}, device_name=device) -ie.load_network(network=path_to_xml_file, device_name=device) -``` - -![caching_times] - -### Advanced Examples - -Not every device supports network import/export capability, enabling of caching for such devices does not have any effect. To check in advance if a particular device supports model caching, your application can use the following code: - -```python -all_metrics = ie.get_metric(device_name=device, metric_name="SUPPORTED_METRICS") -# Find the 'IMPORT_EXPORT_SUPPORT' metric in supported metrics -allows_caching = "IMPORT_EXPORT_SUPPORT" in all_metrics -``` - -> **NOTE**: The GPU plugin does not have the IMPORT_EXPORT_SUPPORT capability, and does not support model caching yet. However, the GPU plugin supports caching kernels (see the [GPU plugin documentation](supported_plugins/GPU.md)). Kernel caching for the GPU plugin can be accessed the same way as model caching: by setting the `CACHE_DIR` configuration key to a folder where the cache should be stored. +> **NOTE**: The GPU plugin does not have the EXPORT_IMPORT capability, and does not support model caching yet. However, the GPU plugin supports caching kernels (see the [GPU plugin documentation](supported_plugins/GPU.md)). Kernel caching for the GPU plugin can be accessed the same way as model caching: by setting the `CACHE_DIR` configuration key to a folder where the cache should be stored. [caching_enabled]: ../img/caching_enabled.png diff --git a/docs/OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md b/docs/OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md deleted file mode 100644 index 01617f8b934..00000000000 --- a/docs/OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md +++ /dev/null @@ -1,51 +0,0 @@ -# OpenVINO™ Runtime User Guide {#openvino_docs_OV_Runtime_User_Guide} - -@sphinxdirective - -.. _deep learning inference engine: - -.. toctree:: - :maxdepth: 1 - :hidden: - - openvino_2_0_transition_guide - openvino_docs_IE_DG_Integrate_with_customer_application_new_API - openvino_docs_OV_Runtime_UG_Model_Representation - ngraph_transformation - openvino_docs_deployment_optimization_guide_dldt_optimization_guide - openvino_docs_IE_DG_Device_Plugins - openvino_docs_IE_DG_Int8Inference - openvino_docs_IE_DG_Bfloat16Inference - openvino_docs_IE_DG_DynamicBatching - openvino_docs_IE_DG_ShapeInference - openvino_docs_IE_DG_Model_caching_overview - openvino_docs_IE_DG_Extensibility_DG_Intro - openvino_docs_IE_DG_network_state_intro - openvino_docs_OV_Runtime_API_Changes - -@endsphinxdirective - -## Introduction -Inference Engine is a set of C++ libraries with C and Python bindings providing a common API to deliver inference solutions on the platform of your choice. Use the Inference Engine API to read the Intermediate Representation (IR), ONNX and execute the model on devices. - -Inference Engine uses a plugin architecture. Inference Engine plugin is a software component that contains complete implementation for inference on a certain Intel® hardware device: CPU, GPU, VPU, etc. Each plugin implements the unified API and provides additional hardware-specific APIs. - -The scheme below illustrates the typical workflow for deploying a trained deep learning model: - -![](img/BASIC_FLOW_IE_C.svg) - - -## Video - -@sphinxdirective - -.. list-table:: - - * - .. raw:: html - - - * - **Inference Engine Concept**. Duration: 3:43 - -@endsphinxdirective diff --git a/docs/OV_Runtime_UG/Operations_specifications.md b/docs/OV_Runtime_UG/Operations_specifications.md index 937d5def19b..e7366483166 100644 --- a/docs/OV_Runtime_UG/Operations_specifications.md +++ b/docs/OV_Runtime_UG/Operations_specifications.md @@ -4,7 +4,7 @@ .. toctree:: :maxdepth: 1 - + openvino_docs_ops_arithmetic_Abs_1 openvino_docs_ops_arithmetic_Acos_1 openvino_docs_ops_arithmetic_Acosh_3 @@ -85,6 +85,7 @@ openvino_docs_ops_image_I420toBGR_8 openvino_docs_ops_image_I420toRGB_8 openvino_docs_ops_signals_IDFT_7 + openvino_docs_ops_signals_IRDFT_9 openvino_docs_ops_infrastructure_If_8 openvino_docs_ops_image_Interpolate_1 openvino_docs_ops_image_Interpolate_4 @@ -136,6 +137,7 @@ openvino_docs_ops_generation_RandomUniform_8 openvino_docs_ops_generation_Range_1 openvino_docs_ops_generation_Range_4 + openvino_docs_ops_signals_RDFT_9 openvino_docs_ops_infrastructure_ReadValue_3 openvino_docs_ops_activation_ReLU_1 openvino_docs_ops_reduction_ReduceL1_4 @@ -175,6 +177,7 @@ openvino_docs_ops_activation_SoftMax_1 openvino_docs_ops_activation_SoftMax_8 openvino_docs_ops_activation_SoftPlus_4 + openvino_docs_ops_activation_SoftSign_9 openvino_docs_ops_movement_SpaceToBatch_2 openvino_docs_ops_movement_SpaceToDepth_1 openvino_docs_ops_movement_Split_1 diff --git a/docs/OV_Runtime_UG/PythonPackage_Overview.md b/docs/OV_Runtime_UG/PythonPackage_Overview.md new file mode 100644 index 00000000000..5e03eb3295c --- /dev/null +++ b/docs/OV_Runtime_UG/PythonPackage_Overview.md @@ -0,0 +1,14 @@ +# OpenVINO™ Python* Package + +OpenVINO™ Python\* package includes types to measure model and calibrate to low precision. + +The OpenVINO™ Python\* package available in the `/python/python3.X` directory. + +The OpenVINO™ Python\* package includes the following sub-packages: + + - [openvino.inference_engine](../../src/bindings/python/docs/api_overview.md) - Python\* wrapper on OpenVINO™ Inference Engine. + - `openvino.tools.accuracy_checker` - Measure accuracy. + - `openvino.tools.benchmark` - Measure latency and throughput. + +## See Also +* [Integrate with Customer Application New API](integrate_with_your_application.md) diff --git a/docs/OV_Runtime_UG/Python_API_exclusives.md b/docs/OV_Runtime_UG/Python_API_exclusives.md new file mode 100644 index 00000000000..3d3375acb34 --- /dev/null +++ b/docs/OV_Runtime_UG/Python_API_exclusives.md @@ -0,0 +1,143 @@ +# OpenVINO™ Python API exclusives {#openvino_docs_OV_Runtime_UG_Python_API_exclusives} + +OpenVINO™ Runtime Python API is exposing additional features and helpers to elevate user experience. Main goal of Python API is to provide user-friendly and simple, still powerful, tool for Python users. + +## Easier model compilation + +`CompiledModel` can be easily created with the helper method. It hides `Core` creation and applies `AUTO` device by default. + +@sphinxdirective + +.. doxygensnippet:: docs/snippets/ov_python_exclusives.py + :language: python + :fragment: [auto_compilation] + +@endsphinxdirective + +## Model/CompiledModel inputs and outputs + +Besides functions aligned to C++ API, some of them have their Pythonic counterparts or extensions. For example, `Model` and `CompiledModel` inputs/outputs can be accessed via properties. + +@sphinxdirective + +.. doxygensnippet:: docs/snippets/ov_python_exclusives.py + :language: python + :fragment: [properties_example] + +@endsphinxdirective + +Refer to Python API documentation on which helper functions or properties are available for different classes. + +## Working with Tensor + +Python API allows passing data as tensors. `Tensor` object holds a copy of the data from the given array. `dtype` of numpy arrays is converted to OpenVINO™ types automatically. + +@sphinxdirective + +.. doxygensnippet:: docs/snippets/ov_python_exclusives.py + :language: python + :fragment: [tensor_basics] + +@endsphinxdirective + +### Shared memory mode + +`Tensor` objects can share the memory with numpy arrays. By specifing `shared_memory` argument, a `Tensor` object does not perform copy of data and has access to the memory of the numpy array. + +@sphinxdirective + +.. doxygensnippet:: docs/snippets/ov_python_exclusives.py + :language: python + :fragment: [tensor_shared_mode] + +@endsphinxdirective + +### Slices of array's memory + +One of the `Tensor` class constructors allows to share the slice of array's memory. When `shape` is specified in the constructor that has the numpy array as first argument, it triggers the special shared memory mode. + +@sphinxdirective + +.. doxygensnippet:: docs/snippets/ov_python_exclusives.py + :language: python + :fragment: [tensor_slice_mode] + +@endsphinxdirective + +## Running inference + +Python API supports extra calling methods to synchronous and asynchronous modes for inference. + +All infer methods allow users to pass data as popular numpy arrays, gathered in either Python dicts or lists. + +@sphinxdirective + +.. doxygensnippet:: docs/snippets/ov_python_exclusives.py + :language: python + :fragment: [passing_numpy_array] + +@endsphinxdirective + +Results from inference can be obtained in various ways: + +@sphinxdirective + +.. doxygensnippet:: docs/snippets/ov_python_exclusives.py + :language: python + :fragment: [getting_results] + +@endsphinxdirective + +### Synchronous mode - extended + +Python API provides different synchronous calls to infer model, which block the application execution. Additionally these calls return results of inference: + +@sphinxdirective + +.. doxygensnippet:: docs/snippets/ov_python_exclusives.py + :language: python + :fragment: [sync_infer] + +@endsphinxdirective + +### AsyncInferQueue + +Asynchronous mode pipelines can be supported with wrapper class called `AsyncInferQueue`. This class automatically spawns pool of `InferRequest` objects (also called "jobs") and provides synchronization mechanisms to control flow of the pipeline. + +Each job is distinguishable by unique `id`, which is in the range from 0 up to number of jobs specified in `AsyncInferQueue` constructor. + +Function call `start_async` is not required to be synchronized, it waits for any available job if queue is busy/overloaded. Every `AsyncInferQueue` code block should end with `wait_all` function. It provides "global" synchronization of all jobs in the pool and ensure that access to them is safe. + +@sphinxdirective + +.. doxygensnippet:: docs/snippets/ov_python_exclusives.py + :language: python + :fragment: [asyncinferqueue] + +@endsphinxdirective + +#### Acquire results from requests + +After the call to `wait_all`, jobs and their data can be safely accessed. Acquring of a specific job with `[id]` returns `InferRequest` object, which results in seamless retrieval of the output data. + +@sphinxdirective + +.. doxygensnippet:: docs/snippets/ov_python_exclusives.py + :language: python + :fragment: [asyncinferqueue_access] + +@endsphinxdirective + +#### Setting callbacks + +Another feature of `AsyncInferQueue` is ability of setting callbacks. When callback is set, any job that ends inference, calls upon Python function. Callback function must have two arguments. First is the request that calls the callback, it provides `InferRequest` API. Second one being called "userdata", provides possibility of passing runtime values, which can be of any Python type and later used inside callback function. + +The callback of `AsyncInferQueue` is uniform for every job. When executed, GIL is acquired to ensure safety of data manipulation inside the function. + +@sphinxdirective + +.. doxygensnippet:: docs/snippets/ov_python_exclusives.py + :language: python + :fragment: [asyncinferqueue_set_callback] + +@endsphinxdirective diff --git a/docs/OV_Runtime_UG/Samples_Overview.md b/docs/OV_Runtime_UG/Samples_Overview.md index 2e43e342a26..ddb7a476aca 100644 --- a/docs/OV_Runtime_UG/Samples_Overview.md +++ b/docs/OV_Runtime_UG/Samples_Overview.md @@ -1,4 +1,4 @@ -# Inference Engine Samples {#openvino_docs_IE_DG_Samples_Overview} +# OpenVINO Samples {#openvino_docs_IE_DG_Samples_Overview} @sphinxdirective @@ -19,8 +19,8 @@ openvino_inference_engine_ie_bridges_c_samples_hello_nv12_input_classification_README openvino_inference_engine_samples_hello_query_device_README openvino_inference_engine_ie_bridges_python_sample_hello_query_device_README - openvino_inference_engine_samples_ngraph_function_creation_sample_README - openvino_inference_engine_ie_bridges_python_sample_ngraph_function_creation_sample_README + openvino_inference_engine_samples_model_creation_sample_README + openvino_inference_engine_ie_bridges_python_sample_model_creation_sample_README openvino_inference_engine_samples_speech_sample_README openvino_inference_engine_ie_bridges_python_sample_speech_sample_README openvino_inference_engine_samples_benchmark_app_README @@ -28,14 +28,14 @@ @endsphinxdirective -The Inference Engine sample applications are simple console applications that show how to utilize specific Inference Engine capabilities within an application, assist developers in executing specific tasks such as loading a model, running inference, querying specific device capabilities and etc. +The OpenVINO sample applications are simple console applications that show how to utilize specific OpenVINO API capabilities within an application, assist developers in executing specific tasks such as loading a model, running inference, querying specific device capabilities and etc. After installation of Intel® Distribution of OpenVINO™ toolkit, С, C++ and Python* sample applications are available in the following directories, respectively: * `/samples/c` * `/samples/cpp` * `/samples/python` -Inference Engine sample applications include the following: +OpenVINO sample applications include the following: - **Speech Sample** - Acoustic model inference based on Kaldi neural networks and speech feature vectors. - [Automatic Speech Recognition C++ Sample](../../samples/cpp/speech_sample/README.md) @@ -50,7 +50,7 @@ Inference Engine sample applications include the following: - **Hello NV12 Input Classification Sample** – Input of any size and layout can be provided to an infer request. The sample transforms the input to the NV12 color format and pre-process it automatically during inference. The sample supports only images as inputs. - [Hello NV12 Input Classification C++ Sample](../../samples/cpp/hello_nv12_input_classification/README.md) - [Hello NV12 Input Classification C Sample](../../samples/c/hello_nv12_input_classification/README.md) -- **Hello Query Device Sample** – Query of available Inference Engine devices and their metrics, configuration values. +- **Hello Query Device Sample** – Query of available OpenVINO devices and their metrics, configuration values. - [Hello Query Device C++ Sample](../../samples/cpp/hello_query_device/README.md) - [Hello Query Device Python* Sample](../../samples/python/hello_query_device/README.md) - **Hello Reshape SSD Sample** – Inference of SSD networks resized by ShapeInfer API according to an input size. @@ -59,10 +59,10 @@ Inference Engine sample applications include the following: - **Image Classification Sample Async** – Inference of image classification networks like AlexNet and GoogLeNet using Asynchronous Inference Request API (the sample supports only images as inputs). - [Image Classification Async C++ Sample](../../samples/cpp/classification_sample_async/README.md) - [Image Classification Async Python* Sample](../../samples/python/classification_sample_async/README.md) -- **nGraph Function Creation Sample** – Construction of the LeNet network using the nGraph function creation sample. - - [nGraph Function Creation C++ Sample](../../samples/cpp/ngraph_function_creation_sample/README.md) - - [nGraph Function Creation Python Sample](../../samples/python/ngraph_function_creation_sample/README.md) - +- **OpenVINO Model Creation Sample** – Construction of the LeNet model using the OpenVINO model creation sample. + - [OpenVINO Model Creation C++ Sample](../../samples/cpp/model_creation_sample/README.md) + - [OpenVINO Model Creation Python Sample](../../samples/python/model_creation_sample/README.md) + > **NOTE**: All C++ samples support input paths containing only ASCII characters, except the Hello Classification Sample, that supports Unicode. ## Media Files Available for Samples @@ -79,8 +79,8 @@ To run the sample, you can use [public](@ref omz_models_group_public) or [Intel' The officially supported Linux* build environment is the following: -* Ubuntu* 18.04 LTS 64-bit or CentOS* 7 64-bit -* GCC* 7.5.0 (for Ubuntu* 18.04) or GCC* 4.8.5 (for CentOS* 7.6) +* Ubuntu* 18.04 LTS 64-bit or Ubuntu* 20.04 LTS 64-bit +* GCC* 7.5.0 (for Ubuntu* 18.04) or GCC* 9.3.0 (for Ubuntu* 20.04) * CMake* version 3.10 or higher > **NOTE**: For building samples from the open-source version of OpenVINO™ toolkit, see the [build instructions on GitHub](https://github.com/openvinotoolkit/openvino/wiki/BuildingCode). @@ -102,7 +102,7 @@ You can also build the sample applications manually: ```sh mkdir build ``` -> **NOTE**: If you ran the Image Classification verification script during the installation, the C++ samples build directory was already created in your home directory: `~/inference_engine_samples_build/` +> **NOTE**: If you ran the Image Classification verification script during the installation, the C++ samples build directory was already created in your home directory: `~/inference_engine_cpp_samples_build/` 2. Go to the created directory: ```sh @@ -130,22 +130,17 @@ for the debug configuration — in `/intel64/Debug/`. The recommended Windows* build environment is the following: * Microsoft Windows* 10 -* Microsoft Visual Studio* 2017, or 2019 +* Microsoft Visual Studio* 2019 * CMake* version 3.10 or higher -> **NOTE**: If you want to use Microsoft Visual Studio 2019, you are required to install CMake 3.14. +> **NOTE**: If you want to use Microsoft Visual Studio 2019, you are required to install CMake 3.14 or higher. To build the C or C++ sample applications on Windows, go to the `\samples\c` or `\samples\cpp` directory, respectively, and run the `build_samples_msvc.bat` batch file: ```sh build_samples_msvc.bat ``` -By default, the script automatically detects the highest Microsoft Visual Studio version installed on the machine and uses it to create and build -a solution for a sample code. Optionally, you can also specify the preferred Microsoft Visual Studio version to be used by the script. Supported -versions are `VS2017` and `VS2019`. For example, to build the C++ samples using the Microsoft Visual Studio 2017, use the following command: -```sh -\samples\cpp\build_samples_msvc.bat VS2017 -``` +By default, the script automatically detects the highest Microsoft Visual Studio version installed on the machine and uses it to create and build a solution for a sample code Once the build is completed, you can find sample binaries in the following folders: * C samples: `C:\Users\\Documents\Intel\OpenVINO\inference_engine_c_samples_build\intel64\Release` @@ -159,7 +154,7 @@ directory. The officially supported macOS* build environment is the following: -* macOS* 10.15 64-bit +* macOS* 10.15 64-bit or higher * Clang* compiler from Xcode* 10.1 or higher * CMake* version 3.13 or higher @@ -180,7 +175,7 @@ You can also build the sample applications manually: > **NOTE**: Before proceeding, make sure you have OpenVINO™ environment set correctly. This can be done manually by ```sh -cd /bin +cd / source setupvars.sh ``` @@ -188,7 +183,7 @@ source setupvars.sh ```sh mkdir build ``` -> **NOTE**: If you ran the Image Classification verification script during the installation, the C++ samples build directory was already created in your home directory: `~/inference_engine_samples_build/` +> **NOTE**: If you ran the Image Classification verification script during the installation, the C++ samples build directory was already created in your home directory: `~/inference_engine_cpp_samples_build/` 2. Go to the created directory: ```sh @@ -217,7 +212,7 @@ for the debug configuration — in `/intel64/Debug/`. ### Get Ready for Running the Sample Applications on Linux* Before running compiled binary files, make sure your application can find the -Inference Engine and OpenCV libraries. +OpenVINO Runtime libraries. Run the `setupvars` script to set all necessary environment variables: ```sh source /setupvars.sh @@ -246,7 +241,7 @@ list above. ### Get Ready for Running the Sample Applications on Windows* Before running compiled binary files, make sure your application can find the -Inference Engine and OpenCV libraries. +OpenVINO Runtime libraries. Use the `setupvars` script, which sets all necessary environment variables: ```sh \setupvars.bat @@ -255,13 +250,13 @@ Use the `setupvars` script, which sets all necessary environment variables: To debug or run the samples on Windows in Microsoft Visual Studio, make sure you have properly configured **Debugging** environment settings for the **Debug** and **Release** configurations. Set correct paths to the OpenCV libraries, and -debug and release versions of the Inference Engine libraries. +debug and release versions of the OpenVINO Runtime libraries. For example, for the **Debug** configuration, go to the project's **Configuration Properties** to the **Debugging** category and set the `PATH` variable in the **Environment** field to the following: ```sh -PATH=\runtime\bin;\opencv\bin;%PATH% +PATH=\runtime\bin;%PATH% ``` where `` is the directory in which the OpenVINO toolkit is installed. @@ -270,4 +265,4 @@ sample, read the sample documentation by clicking the sample name in the samples list above. ## See Also -* [OpenVINO™ Runtime User Guide](OpenVINO_Runtime_User_Guide.md) +* [OpenVINO™ Runtime User Guide](openvino_intro.md) diff --git a/docs/OV_Runtime_UG/ShapeInference.md b/docs/OV_Runtime_UG/ShapeInference.md index 43a48ce5e89..4cb274ad827 100644 --- a/docs/OV_Runtime_UG/ShapeInference.md +++ b/docs/OV_Runtime_UG/ShapeInference.md @@ -1,4 +1,4 @@ -# Using the Reshape Inference Feature {#openvino_docs_IE_DG_ShapeInference} +# Changing input shapes {#openvino_docs_IE_DG_ShapeInference} ## Introduction (C++) @@ -112,7 +112,7 @@ To keep the model valid after the reshape, choose a new input shape that satisfi For details, refer to the Tensorflow Object Detection API models resizing techniques. ### Extensibility -The Inference Engine provides a special mechanism that allows adding support of shape inference for custom operations. This mechanism is described in the [Extensibility documentation](Extensibility_DG/Intro.md) +The Inference Engine provides a special mechanism that allows adding support of shape inference for custom operations. This mechanism is described in the [Extensibility documentation](../Extensibility_UG/Intro.md) ## Introduction (Python) @@ -166,7 +166,7 @@ To feed input data of a shape that is different from the model input shape, resh Once the input shape of IENetwork is set, call the `IECore.load_network` method to get an ExecutableNetwork object for inference with updated shapes. -There are other approaches to reshape the model during the stage of IR generation or [nGraph function](https://docs.openvino.ai/latest/openvino_docs_nGraph_DG_PythonAPI.html#create_an_ngraph_function_from_a_graph) creation. +There are other approaches to reshape the model during the stage of IR generation or [OpenVINO model](https://docs.openvino.ai/latest/openvino_docs_nGraph_DG_PythonAPI.html#create_an_ngraph_function_from_a_graph) creation. Practically, some models are not ready to be reshaped. In this case, a new input shape cannot be set with the Model Optimizer or the `IENetwork.reshape` method. @@ -218,7 +218,7 @@ exec_net = ie.load_network(network=net, device_name="CPU") ``` ### Extensibility -The Inference Engine provides a special mechanism that allows adding support of shape inference for custom operations. This mechanism is described in the [Extensibility documentation](Extensibility_DG/Intro.md) +The Inference Engine provides a special mechanism that allows adding support of shape inference for custom operations. This mechanism is described in the [Extensibility documentation](../Extensibility_UG/Intro.md) ### See Also: diff --git a/docs/OV_Runtime_UG/auto_device_selection.md b/docs/OV_Runtime_UG/auto_device_selection.md new file mode 100644 index 00000000000..c3a667a65bc --- /dev/null +++ b/docs/OV_Runtime_UG/auto_device_selection.md @@ -0,0 +1,410 @@ +# Automatic device selection {#openvino_docs_IE_DG_supported_plugins_AUTO} + +@sphinxdirective + +.. toctree:: + :maxdepth: 1 + :hidden: + + Debugging Auto-Device Plugin + +@endsphinxdirective + +The Auto-Device plugin, or AUTO, is a virtual device which automatically selects the processing unit to use for inference with OpenVINO™. It chooses from a list of available devices defined by the user and aims at finding the most suitable hardware for the given model. The best device is chosen using the following logic: + +1. Check which supported devices are available. +2. Check the precision of the input model (for detailed information on precisions read more on the [OPTIMIZATION_CAPABILITIES metric](../IE_PLUGIN_DG/Plugin.md)) +3. From the priority list, select the first device capable of supporting the given precision. +4. If the network’s precision is FP32 but there is no device capable of supporting it, offload the network to a device supporting FP16. + +@sphinxdirective ++----------+-------------------------------------------------+-------------------------------------+ +| Choice | | Supported | | Supported | +| Priority | | Device | | model precision | ++==========+=================================================+=====================================+ +| 1 | | dGPU | FP32, FP16, INT8, BIN | +| | | (e.g. Intel® Iris® Xe MAX) | | ++----------+-------------------------------------------------+-------------------------------------+ +| 2 | | VPUX | INT8 | +| | | (e.g. Intel® Movidius® VPU 3700VE) | | ++----------+-------------------------------------------------+-------------------------------------+ +| 3 | | iGPU | FP32, FP16, BIN, | +| | | (e.g. Intel® UHD Graphics 620 (iGPU)) | | ++----------+-------------------------------------------------+-------------------------------------+ +| 4 | | Intel® Neural Compute Stick 2 (Intel® NCS2) | FP16 | +| | | | ++----------+-------------------------------------------------+-------------------------------------+ +| 5 | | Intel® CPU | FP32, FP16, INT8, BIN | +| | | (e.g. Intel® Core™ i7-1165G7) | | ++----------+-------------------------------------------------+-------------------------------------+ +@endsphinxdirective + +To put it simply, when loading the network to the first device on the list fails, AUTO will try to load it to the next device in line, until one of them succeeds. For example: +If you have dGPU in your system, it will be selected for most jobs (first on the priority list and supports multiple precisions). But if you want to run a WINOGRAD-enabled IR, your CPU will be selected (WINOGRAD optimization is not supported by dGPU). If you have Myriad and IA CPU in your system, Myriad will be selected for FP16 models, but IA CPU will be chosen for FP32 ones. + +What is important, **AUTO always starts inference with the CPU**. CPU provides very low latency and can start inference with no additional delays. While it performs inference, the Auto-Device plugin continues to load the model to the device best suited for the purpose and transfers the task to it when ready. This way, the devices which are much slower in loading the network, GPU being the best example, do not impede inference at its initial stages. + +This mechanism can be easily observed in our Benchmark Application sample ([see here](#Benchmark App Info)), showing how the first-inference latency (the time it takes to load the network and perform the first inference) is reduced when using AUTO. For example: + +@sphinxdirective +.. code-block:: sh + + ./benchmark_app -m ../public/alexnet/FP32/alexnet.xml -d GPU -niter 128 +@endsphinxdirective + +first-inference latency: **2594.29 ms + 9.21 ms** + +@sphinxdirective +.. code-block:: sh + + ./benchmark_app -m ../public/alexnet/FP32/alexnet.xml -d AUTO:CPU,GPU -niter 128 +@endsphinxdirective + +first-inference latency: **173.13 ms + 13.20 ms** + +@sphinxdirective +.. note:: + The realtime performance will be closer to the best suited device the longer the process runs. +@endsphinxdirective + +## Using the Auto-Device Plugin + +Inference with AUTO is configured similarly to other plugins: first you configure devices, then load a network to the plugin, and finally, execute inference. + +Following the OpenVINO™ naming convention, the Auto-Device plugin is assigned the label of “AUTO.” It may be defined with no additional parameters, resulting in defaults being used, or configured further with the following setup options: + +@sphinxdirective ++-------------------------+-----------------------------------------------+-----------------------------------------------------------+ +| Property | Property values | Description | ++=========================+===============================================+===========================================================+ +| | | AUTO: | | Lists the devices available for selection. | +| | | comma-separated, no spaces | | The device sequence will be taken as priority | +| | | | | from high to low. | +| | | | | If not specified, “AUTO” will be used as default | +| | | | | and all devices will be included. | ++-------------------------+-----------------------------------------------+-----------------------------------------------------------+ +| ov::device:priorities | | device names | | Specifies the devices for Auto-Device plugin to select. | +| | | comma-separated, no spaces | | The device sequence will be taken as priority | +| | | | | from high to low. | +| | | | | This configuration is optional. | ++-------------------------+-----------------------------------------------+-----------------------------------------------------------+ +| ov::hint | | THROUGHPUT | | Specifies the performance mode preferred | +| | | LATENCY | | by the application. | ++-------------------------+-----------------------------------------------+-----------------------------------------------------------+ +| ov::hint:model_priority | | MODEL_PRIORITY_HIGH | | Indicates the priority for a network. | +| | | MODEL_PRIORITY_MED | | Importantly! | +| | | MODEL_PRIORITY_LOW | | This property is still not fully supported | ++-------------------------+-----------------------------------------------+-----------------------------------------------------------+ +@endsphinxdirective + +@sphinxdirective +.. dropdown:: Click for information on Legacy APIs + + For legacy APIs like LoadNetwork/SetConfig/GetConfig/GetMetric: + + - replace {ov::device:priorities, "GPU,CPU"} with {"MULTI_DEVICE_PRIORITIES", "GPU,CPU"} + - replace {ov::hint:model_priority, "LOW"} with {"MODEL_PRIORITY", "LOW"} + - InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES is defined as same string "MULTI_DEVICE_PRIORITIES" + - CommonTestUtils::DEVICE_GPU + std::string(",") + CommonTestUtils::DEVICE_CPU is equal to "GPU,CPU" + - InferenceEngine::PluginConfigParams::KEY_MODEL_PRIORITY is defined as same string "MODEL_PRIORITY" + - InferenceEngine::PluginConfigParams::MODEL_PRIORITY_LOW is defined as same string "LOW" +@endsphinxdirective + +### Device candidate list +The device candidate list allows users to customize the priority and limit the choice of devices available to the AUTO plugin. If not specified, the plugin assumes all the devices present in the system can be used. Note, that OpenVINO™ Runtime lets you use “GPU” as an alias for “GPU.0” in function calls. +The following commands are accepted by the API: + +@sphinxdirective +.. tab:: C++ API + + .. code-block:: cpp + + /*** With Inference Engine 2.0 API ***/ + ov::Core core; + + // Read a network in IR, PaddlePaddle, or ONNX format: + std::shared_ptr model = core.read_model("sample.xml"); + + // Load a network to AUTO using the default list of device candidates. + // The following lines are equivalent: + ov::CompiledModel model0 = core.compile_model(model); + ov::CompiledModel model1 = core.compile_model(model, "AUTO"); + ov::CompiledModel model2 = core.compile_model(model, "AUTO", {}); + + // You can also specify the devices to be used by AUTO in its selection process. + // The following lines are equivalent: + ov::CompiledModel model3 = core.compile_model(model, "AUTO:GPU,CPU"); + ov::CompiledModel model4 = core.compile_model(model, "AUTO", {{ov::device::priorities.name(), "GPU,CPU"}}); + + // the AUTO plugin is pre-configured (globally) with the explicit option: + core.set_property("AUTO", ov::device::priorities("GPU,CPU")); + +.. tab:: C++ legacy API + + .. code-block:: cpp + + /*** With API Prior to 2022.1 Release ***/ + InferenceEngine::Core ie; + + // Read a network in IR, PaddlePaddle, or ONNX format: + InferenceEngine::CNNNetwork network = ie.ReadNetwork("sample.xml"); + + // Load a network to AUTO using the default list of device candidates. + // The following lines are equivalent: + InferenceEngine::ExecutableNetwork exec0 = ie.LoadNetwork(network); + InferenceEngine::ExecutableNetwork exec1 = ie.LoadNetwork(network, "AUTO"); + InferenceEngine::ExecutableNetwork exec2 = ie.LoadNetwork(network, "AUTO", {}); + + // You can also specify the devices to be used by AUTO in its selection process. + // The following lines are equivalent: + InferenceEngine::ExecutableNetwork exec3 = ie.LoadNetwork(network, "AUTO:GPU,CPU"); + InferenceEngine::ExecutableNetwork exec4 = ie.LoadNetwork(network, "AUTO", {{"MULTI_DEVICE_PRIORITIES", "GPU,CPU"}}); + + // the AUTO plugin is pre-configured (globally) with the explicit option: + ie.SetConfig({{"MULTI_DEVICE_PRIORITIES", "GPU,CPU"}}, "AUTO"); + +.. tab:: Python API + + .. code-block:: python + + ### New IE 2.0 API ### + + from openvino.runtime import Core + core = Core() + + # Read a network in IR, PaddlePaddle, or ONNX format: + model = core.read_model(model_path) + + # Load a network to AUTO using the default list of device candidates. + # The following lines are equivalent: + model = core.compile_model(model=model) + compiled_model = core.compile_model(model=model, device_name="AUTO") + compiled_model = core.compile_model(model=model, device_name="AUTO", config={}) + + # You can also specify the devices to be used by AUTO in its selection process. + # The following lines are equivalent: + compiled_model = core.compile_model(model=model, device_name="AUTO:CPU,GPU") + compiled_model = core.compile_model(model=model, device_name="AUTO", config={"MULTI_DEVICE_PRIORITIES": "CPU,GPU"}) + + # the AUTO plugin is pre-configured (globally) with the explicit option: + core.set_config(config={"MULTI_DEVICE_PRIORITIES":"CPU,GPU"}, device_name="AUTO") + +.. tab:: Python legacy API + + .. code-block:: python + + ### API before 2022.1 ### + from openvino.inference_engine import IECore + ie = IECore() + + # Read a network in IR, PaddlePaddle, or ONNX format: + net = ie.read_network(model=path_to_model) + + # Load a network to AUTO using the default list of device candidates. + # The following lines are equivalent: + exec_net = ie.load_network(network=net) + exec_net = ie.load_network(network=net, device_name="AUTO") + exec_net = ie.load_network(network=net, device_name="AUTO", config={}) + + # You can also specify the devices to be used by AUTO in its selection process. + # The following lines are equivalent: + exec_net = ie.load_network(network=net, device_name="AUTO:CPU,GPU") + exec_net = ie.load_network(network=net, device_name="AUTO", config={"MULTI_DEVICE_PRIORITIES": "CPU,GPU"}) + + # the AUTO plugin is pre-configured (globally) with the explicit option: + ie.SetConfig(config={"MULTI_DEVICE_PRIORITIES", "CPU,GPU"}, device_name="AUTO"); + +@endsphinxdirective + +To check what devices are present in the system, you can use Device API: + +For C++ API +@sphinxdirective +.. code-block:: sh + + ov::runtime::Core::get_available_devices() (see Hello Query Device C++ Sample) +@endsphinxdirective + +For Python API +@sphinxdirective +.. code-block:: sh + + openvino.runtime.Core.available_devices (see Hello Query Device Python Sample) +@endsphinxdirective + + +### Performance Hints +The `ov::hint` property enables you to specify a performance mode for the plugin to be more efficient for particular use cases. + +#### ov::hint::PerformanceMode::THROUGHPUT +This mode prioritizes high throughput, balancing between latency and power. It is best suited for tasks involving multiple jobs, like inference of video feeds or large numbers of images. + +#### ov::hint::PerformanceMode::LATENCY +This mode prioritizes low latency, providing short response time for each inference job. It performs best for tasks where inference is required for a single input image, like a medical analysis of an ultrasound scan image. It also fits the tasks of real-time or nearly real-time applications, such as an industrial robot's response to actions in its environment or obstacle avoidance for autonomous vehicles. +Note that currently the `ov::hint` property is supported by CPU and GPU devices only. + +To enable Performance Hints for your application, use the following code: +@sphinxdirective +.. tab:: C++ API + + .. code-block:: cpp + + ov::Core core; + + // Read a network in IR, PaddlePaddle, or ONNX format: + std::shared_ptr model = core.read_model("sample.xml"); + + // Load a network to AUTO with Performance Hints enabled: + // To use the “throughput” mode: + ov::CompiledModel compiled_model = core.compile_model(model, "AUTO:GPU,CPU", {{ov::hint::performance_mode.name(), "THROUGHPUT"}}); + + // or the “latency” mode: + ov::CompiledModel compiledModel1 = core.compile_model(model, "AUTO:GPU,CPU", {{ov::hint::performance_mode.name(), "LATENCY"}}); + +.. tab:: Python API + + .. code-block:: python + + from openvino.runtime import Core + + core = Core() + + # Read a network in IR, PaddlePaddle, or ONNX format: + model = core.read_model(model_path) + + # Load a network to AUTO with Performance Hints enabled: + # To use the “throughput” mode: + compiled_model = core.compile_model(model=model, device_name="AUTO:CPU,GPU", config={"PERFORMANCE_HINT":"THROUGHPUT"}) + + # or the “latency” mode: + compiled_model = core.compile_model(model=model, device_name="AUTO:CPU,GPU", config={"PERFORMANCE_HINT":"LATENCY"}) +@endsphinxdirective + +### ov::hint::model_priority +The property enables you to control the priorities of networks in the Auto-Device plugin. A high-priority network will be loaded to a supported high-priority device. A lower-priority network will not be loaded to a device that is occupied by a higher-priority network. + +@sphinxdirective +.. tab:: C++ API + + .. code-block:: cpp + + // Example 1 + // Compile and load networks: + ov::CompiledModel compiled_model0 = core.compile_model(model, "AUTO:GPU,MYRIAD,CPU", {{ov::hint::model_priority.name(), "HIGH"}}); + ov::CompiledModel compiled_model1 = core.compile_model(model, "AUTO:GPU,MYRIAD,CPU", {{ov::hint::model_priority.name(), "MEDIUM"}}); + ov::CompiledModel compiled_model2 = core.compile_model(model, "AUTO:GPU,MYRIAD,CPU", {{ov::hint::model_priority.name(), "LOW"}}); + + /************ + Assume that all the devices (CPU, GPU, and MYRIAD) can support all the networks. + Result: compiled_model0 will use GPU, compiled_model1 will use MYRIAD, compiled_model2 will use CPU. + ************/ + + // Example 2 + // Compile and load networks: + ov::CompiledModel compiled_model3 = core.compile_model(model, "AUTO:GPU,MYRIAD,CPU", {{ov::hint::model_priority.name(), "LOW"}}); + ov::CompiledModel compiled_model4 = core.compile_model(model, "AUTO:GPU,MYRIAD,CPU", {{ov::hint::model_priority.name(), "MEDIUM"}}); + ov::CompiledModel compiled_model5 = core.compile_model(model, "AUTO:GPU,MYRIAD,CPU", {{ov::hint::model_priority.name(), "LOW"}}); + + /************ + Assume that all the devices (CPU, GPU, and MYRIAD) can support all the networks. + Result: compiled_model3 will use GPU, compiled_model4 will use GPU, compiled_model5 will use MYRIAD. + ************/ + +.. tab:: Python API + + .. code-block:: python + + # Example 1 + # Compile and load networks: + compiled_model0 = core.compile_model(model=model, device_name="AUTO:CPU,GPU,MYRIAD", config={"AUTO_NETWORK_PRIORITY":"0"}) + compiled_model1 = core.compile_model(model=model, device_name="AUTO:CPU,GPU,MYRIAD", config={"AUTO_NETWORK_PRIORITY":"1"}) + compiled_model2 = core.compile_model(model=model, device_name="AUTO:CPU,GPU,MYRIAD", config={"AUTO_NETWORK_PRIORITY":"2"}) + + # Assume that all the devices (CPU, GPU, and MYRIAD) can support all the networks. + # Result: compiled_model0 will use GPU, compiled_model1 will use MYRIAD, compiled_model3 will use CPU. + + # Example 2 + # Compile and load networks: + compiled_model0 = core.compile_model(model=model, device_name="AUTO:CPU,GPU,MYRIAD", config={"AUTO_NETWORK_PRIORITY":"2"}) + compiled_model1 = core.compile_model(model=model, device_name="AUTO:CPU,GPU,MYRIAD", config={"AUTO_NETWORK_PRIORITY":"1"}) + compiled_model2 = core.compile_model(model=model, device_name="AUTO:CPU,GPU,MYRIAD", config={"AUTO_NETWORK_PRIORITY":"2"}) + + # Assume that all the devices (CPU, GPU, and MYRIAD) can support all the networks. + # Result: compiled_model0 will use GPU, compiled_model1 will use GPU, compiled_model3 will use MYRIAD. +@endsphinxdirective + +## Configuring Individual Devices and Creating the Auto-Device plugin on Top +Although the methods described above are currently the preferred way to execute inference with AUTO, the following steps can be also used as an alternative. It is currently available as a legacy feature and used if the device candidate list includes VPUX or Myriad (devices uncapable of utilizing the Performance Hints option). + +@sphinxdirective +.. tab:: C++ API + + .. code-block:: cpp + + ovCore core; + + // Read a network in IR, PaddlePaddle, or ONNX format + stdshared_ptrovModel model = core.read_model(sample.xml); + + // Configure the VPUX and the Myriad devices separately and load the network to the Auto-Device plugin + set VPU config + core.set_property(VPUX, {}); + + // set MYRIAD config + core.set_property(MYRIAD, {}); + ovCompiledModel compiled_model = core.compile_model(model, AUTO); + +.. tab:: Python API + + .. code-block:: python + + from openvino.runtime import Core + + core = Core() + + # Read a network in IR, PaddlePaddle, or ONNX format: + model = core.read_model(model_path) + + # Configure the VPUX and the Myriad devices separately and load the network to the Auto-Device plugin: + core.set_config(config=vpux_config, device_name="VPUX") + core.set_config (config=vpux_config, device_name="MYRIAD") + compiled_model = core.compile_model(model=model) + + # Alternatively, you can combine the individual device settings into one configuration and load the network. + # The AUTO plugin will parse and apply the settings to the right devices. + # The 'device_name' of "AUTO:VPUX,MYRIAD" will configure auto-device to use devices. + compiled_model = core.compile_model(model=model, device_name=device_name, config=full_config) + + # To query the optimization capabilities: + device_cap = core.get_metric("CPU", "OPTIMIZATION_CAPABILITIES") +@endsphinxdirective + + +## Using AUTO with OpenVINO™ Samples and the Benchmark App +To see how the Auto-Device plugin is used in practice and test its performance, take a look at OpenVINO™ samples. All samples supporting the "-d" command-line option (which stands for "device") will accept the plugin out-of-the-box. The Benchmark Application will be a perfect place to start – it presents the optimal performance of the plugin without the need for additional settings, like the number of requests or CPU threads. To evaluate the AUTO performance, you can use the following commands: + +For unlimited device choice: +@sphinxdirective +.. code-block:: sh + + ./benchmark_app –d AUTO –m -i -niter 1000 +@endsphinxdirective + +For limited device choice: +@sphinxdirective +.. code-block:: sh + + ./benchmark_app –d AUTO:CPU,GPU,MYRIAD –m -i -niter 1000 +@endsphinxdirective + +For more information, refer to the [C++](../../samples/cpp/benchmark_app/README.md) or [Python](../../tools/benchmark_tool/README.md) version instructions. + +@sphinxdirective +.. note:: + + The default CPU stream is 1 if using “-d AUTO”. + + You can use the FP16 IR to work with auto-device. + + No demos are yet fully optimized for AUTO, by means of selecting the most suitable device, using the GPU streams/throttling, and so on. +@endsphinxdirective diff --git a/docs/OV_Runtime_UG/automatic_batching.md b/docs/OV_Runtime_UG/automatic_batching.md new file mode 100644 index 00000000000..d21fe94b61e --- /dev/null +++ b/docs/OV_Runtime_UG/automatic_batching.md @@ -0,0 +1,122 @@ +# Automatic Batching {#openvino_docs_OV_UG_Automatic_Batching} + +## (Automatic) Batching Execution + +The Automatic-Batching is a preview of the new functionality in the OpenVINO™ toolkit. It performs on-the-fly automatic batching (i.e. grouping inference requests together) to improve device utilization, with no programming effort from the user. +Inputs gathering and outputs scattering from the individual inference requests required for the batch happen transparently, without affecting the application code. + +The feature primarily targets existing code written for inferencing many requests (each instance with the batch size 1). To obtain corresponding performance improvements, the application must be *running many inference requests simultaneously*. +As explained below, the auto-batching functionality can be also used via a special *virtual* device. + +Batching is a straightforward way of leveraging the GPU compute power and saving on communication overheads. The automatic batching is _implicitly_ triggered on the GPU when the `ov::hint::PerformanceMode::THROUGHPUT` is specified for the `ov::hint::performance_mode` property for the compile_model or set_property calls. +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_auto_batching.cpp + :language: cpp + :fragment: [compile_model] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_auto_batching.py + :language: python + :fragment: [compile_model] + +@endsphinxdirective +> **NOTE**: You can disable the Auto-Batching (for example, for the GPU device) from being triggered by the `ov::hint::PerformanceMode::THROUGHPUT`. To do that, pass the `ov::hint::allow_auto_batching` set to **false** in addition to the `ov::hint::performance_mode`: +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_auto_batching.cpp + :language: cpp + :fragment: [compile_model_no_auto_batching] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_auto_batching.py + :language: python + :fragment: [compile_model_no_auto_batching] + +@endsphinxdirective + + +Alternatively, to enable the Auto-Batching in the legacy apps not akin to the notion of the performance hints, you may need to use the **explicit** device notion, such as 'BATCH:GPU'. In both cases (the *throughput* hint or explicit BATCH device), the optimal batch size selection happens automatically (the implementation queries the `ov::optimal_batch_size` property from the device, passing the model's graph as the parameter). The actual value depends on the model and device specifics, for example, on-device memory for the dGPUs. +Auto-Batching support is not limited to the GPUs, but if a device does not support the `ov::optimal_batch_size` yet, it can work with the auto-batching only when specifying an explicit batch size, for example, "BATCH:(16)". + +This _automatic batch size selection_ assumes that the application queries the `ov::optimal_number_of_infer_requests` to create and run the returned number of requests simultaneously: +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_auto_batching.cpp + :language: cpp + :fragment: [query_optimal_num_requests] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_auto_batching.py + :language: python + :fragment: [query_optimal_num_requests] + +@endsphinxdirective +If not enough inputs were collected, the `timeout` value makes the transparent execution fall back to the execution of individual requests. Configuration-wise, this is the AUTO_BATCH_TIMEOUT property. +The timeout, which adds itself to the execution time of the requests, heavily penalizes the performance. To avoid this, in cases when your parallel slack is bounded, give the OpenVINO an additional hint. + +For example, the application processes only 4 video streams, so there is no need to use a batch larger than 4. The most future-proof way to communicate the limitations on the parallelism is to equip the performance hint with the optional `ov::hint::num_requests` configuration key set to 4. For the GPU this will limit the batch size, for the CPU - the number of inference streams, so each device uses the `ov::hint::num_requests` while converting the hint to the actual device configuration options: +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_auto_batching.cpp + :language: cpp + :fragment: [hint_num_requests] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_auto_batching.py + :language: python + :fragment: [hint_num_requests] + +@endsphinxdirective + +For the *explicit* usage, you can limit the batch size using "BATCH:GPU(4)", where 4 is the number of requests running in parallel. + +### Other Performance Considerations + +To achieve the best performance with the Automatic Batching, the application should: + - Operate the number of inference requests that represents the multiple of the batch size. In the above example, for batch size 4, the application should operate 4, 8, 12, 16, etc. requests. + - Use the requests, grouped by the batch size, together. For example, the first 4 requests are inferred, while the second group of the requests is being populated. Essentially, the Automatic Batching shifts the asynchronousity from the individual requests to the groups of requests that constitute the batches. + - Balance the 'timeout' value vs the batch size. For example, in many cases having a smaller timeout value/batch size may yield better performance than large batch size, but with the timeout value that is not large enough to accommodate the full number of the required requests. + - Carefully apply the auto-batching to the pipelines. For example for the conventional video-sources->detection->classification flow, it is the most benefical to do auto-batching over the inputs to the detection stage. Whereas the resulting number of detections is usually fluent, which makes the auto-batching less applicable for the classification stage. + +The following are limitations of the current implementations: + - Although less critical for the throughput-oriented scenarios, the load-time with auto-batching increases by almost 2x. + - Certain networks are not safely reshape-able by the "batching" dimension (specified as 'N' in the layouts terms). Also, if the batching dimension is not zero-th, the auto-batching is not triggered _implicitly_ by the throughput hint. + - The _explicit_ notion, for example, "BATCH:GPU", uses the relaxed dimensions tracking, often making the auto-batching possible. For example, this trick unlocks most **detection networks**. + - - When *forcing* the auto-batching via the explicit device notion, make sure to validate the results for correctness. + - Performance improvements happen at the cost of the memory footprint growth, yet the auto-batching queries the available memory (especially for the dGPUs) and limits the selected batch size accordingly. + + +### Configuring the Automatic Batching +Following the OpenVINO convention for devices names, the *batching* device is named *BATCH*. The configuration options are as follows: + +| Parameter name | Parameter description | Default | Examples | +| :--- | :--- | :--- |:-----------------------------------------------------------------------------| +| "AUTO_BATCH_DEVICE" | Device name to apply the automatic batching and optional batch size in brackets | N/A | "BATCH:GPU" which triggers the automatic batch size selection. Another example is the device name (to apply the batching) with directly specified batch size "BATCH:GPU(4)" | +| "AUTO_BATCH_TIMEOUT" | timeout value, in ms | 1000 | you can reduce the timeout value (to avoid performance penalty when the data arrives too non-evenly) e.g. pass the "100", or in contrast make it large enough e.g. to accommodate inputs preparation (e.g. when it is serial process) | + +### Testing Automatic Batching Performance with the Benchmark_App +The `benchmark_app`, that exists in both [C++](../../samples/cpp/benchmark_app/README.md) and [Python](../../tools/benchmark_tool/README.md) versions, is the best way to evaluate the performance of the Automatic Batching: + - The most straighforward way is performance hints: +- - benchmark_app **-hint tput** -d GPU -m 'path to your favorite model' + - Overriding the strict rules of implicit reshaping by the batch dimension via the explicit device notion: +- - benchmark_app **-hint none -d BATCH:GPU** -m 'path to your favorite model' + - Finally, overriding the automatically-deduced batch size as well: +- - $benchmark_app -hint none -d **BATCH:GPU(16)** -m 'path to your favorite model' + +The last example is also applicable to the CPU or any other device that generally supports the batched execution. + +### See Also +[Supported Devices](supported_plugins/Supported_Devices.md) diff --git a/docs/OV_Runtime_UG/hetero_execution.md b/docs/OV_Runtime_UG/hetero_execution.md new file mode 100644 index 00000000000..2591232b6d7 --- /dev/null +++ b/docs/OV_Runtime_UG/hetero_execution.md @@ -0,0 +1,167 @@ +# Heterogeneous execution {#openvino_docs_OV_UG_Hetero_execution} + +Heterogeneous execution enables executing inference of one model on several devices. Its purpose is to: + +* Utilize the power of accelerators to process the heaviest parts of the model and to execute unsupported operations on fallback devices, like the CPU. +* Utilize all available hardware more efficiently during one inference. + +Execution via the heterogeneous mode can be divided into two independent steps: + +1. Setting hardware affinity to operations (`ov::Core::query_model` is used internally by the Hetero device) +2. Compiling a model to the Heterogeneous device assumes splitting the model to parts, compiling them on the specified devices (via `ov::device::priorities`), and executing them in the Heterogeneous mode. The model is split to subgraphs in accordance with the affinities, where a set of connected operations with the same affinity is to be a dedicated subgraph. Each subgraph is compiled on a dedicated device and multiple `ov::CompiledModel` objects are made, which are connected via automatically allocated intermediate tensors. + +These two steps are not interconnected and affinities can be set in one of two ways, used separately or in combination (as described below): in the `manual` or the `automatic` mode. + +### Defining and Configuring the Hetero Device + +Following the OpenVINO™ naming convention, the Hetero execution plugin is assigned the label of `"HETERO".` It may be defined with no additional parameters, resulting in defaults being used, or configured further with the following setup options: + +@sphinxdirective ++-------------------------------+--------------------------------------------+-----------------------------------------------------------+ +| Parameter Name & C++ property | Property values | Description | ++===============================+============================================+===========================================================+ +| | "MULTI_DEVICE_PRIORITIES" | | HETERO: | | Lists the devices available for selection. | +| | `ov::device::priorities` | | comma-separated, no spaces | | The device sequence will be taken as priority | +| | | | | | from high to low. | ++-------------------------------+--------------------------------------------+-----------------------------------------------------------+ +@endsphinxdirective + +### Manual and Automatic modes for assigning affinities + +#### The Manual Mode +It assumes setting affinities explicitly for all operations in the model using `ov::Node::get_rt_info` with the `"affinity"` key. + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_hetero.cpp + :language: cpp + :fragment: [set_manual_affinities] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_hetero.py + :language: python + :fragment: [set_manual_affinities] + +@endsphinxdirective + + + +#### The Automatic Mode +It decides automatically which operation is assigned to which device according to the support from dedicated devices (`GPU`, `CPU`, `MYRIAD`, etc.) and query model step is called implicitly by Hetero device during model compilation. + +The automatic mode causes "greedy" behavior and assigns all operations that can be executed on a given device to it, according to the priorities you specify (for example, `ov::device::priorities("GPU,CPU")`). +It does not take into account device peculiarities such as the inability to infer certain operations without other special operations placed before or after that layer. If the device plugin does not support the subgraph topology constructed by the HETERO device, then you should set affinity manually. + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_hetero.cpp + :language: cpp + :fragment: [compile_model] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_hetero.py + :language: python + :fragment: [compile_model] + +@endsphinxdirective + +#### Using Manual and Automatic Modes in Combination +In some cases you may need to consider manually adjusting affinities which were set automatically. It usually serves minimizing the number of total subgraphs to optimize memory transfers. To do it, you need to "fix" the automatically assigned affinities like so: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_hetero.cpp + :language: cpp + :fragment: [fix_automatic_affinities] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_hetero.py + :language: python + :fragment: [fix_automatic_affinities] + +@endsphinxdirective + +Importantly, the automatic mode will not work if any operation in a model has its `"affinity"` already initialized. + +> **NOTE**: `ov::Core::query_model` does not depend on affinities set by a user. Instead, it queries for an operation support based on device capabilities. + +### Configure fallback devices +If you want different devices in Hetero execution to have different device-specific configuration options, you can use the special helper property `ov::device::properties`: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_hetero.cpp + :language: cpp + :fragment: [configure_fallback_devices] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_hetero.py + :language: python + :fragment: [configure_fallback_devices] + +@endsphinxdirective + +In the example above, the `GPU` device is configured to enable profiling data and uses the default execution precision, while `CPU` has the configuration property to perform inference in `fp32`. + +### Handling of Difficult Topologies + +Some topologies are not friendly to heterogeneous execution on some devices, even to the point of being unable to execute. +For example, models having activation operations that are not supported on the primary device are split by Hetero into multiple sets of subgraphs which leads to suboptimal execution. +If transmitting data from one subgraph to another part of the model in the heterogeneous mode takes more time than under normal execution, heterogeneous execution may be unsubstantiated. +In such cases, you can define the heaviest part manually and set the affinity to avoid sending data back and forth many times during one inference. + +### Analyzing Performance of Heterogeneous Execution +After enabling the OPENVINO_HETERO_VISUALIZE environment variable, you can dump GraphViz `.dot` files with annotations of operations per devices. + +The Heterogeneous execution mode can generate two files: + +* `hetero_affinity_.dot` - annotation of affinities per operation. +* `hetero_subgraphs_.dot` - annotation of affinities per graph. + +You can use the GraphViz utility or a file converter to view the images. On the Ubuntu operating system, you can use xdot: + +* `sudo apt-get install xdot` +* `xdot hetero_subgraphs.dot` + +You can use performance data (in sample applications, it is the option `-pc`) to get the performance data on each subgraph. + +Here is an example of the output for Googlenet v1 running on HDDL with fallback to CPU: + +``` +subgraph1: 1. input preprocessing (mean data/HDDL):EXECUTED layerType: realTime: 129 cpu: 129 execType: +subgraph1: 2. input transfer to DDR:EXECUTED layerType: realTime: 201 cpu: 0 execType: +subgraph1: 3. HDDL execute time:EXECUTED layerType: realTime: 3808 cpu: 0 execType: +subgraph1: 4. output transfer from DDR:EXECUTED layerType: realTime: 55 cpu: 0 execType: +subgraph1: 5. HDDL output postprocessing:EXECUTED layerType: realTime: 7 cpu: 7 execType: +subgraph1: 6. copy to IE blob:EXECUTED layerType: realTime: 2 cpu: 2 execType: +subgraph2: out_prob: NOT_RUN layerType: Output realTime: 0 cpu: 0 execType: unknown +subgraph2: prob: EXECUTED layerType: SoftMax realTime: 10 cpu: 10 execType: ref +Total time: 4212 microseconds +``` +### Sample Usage + +OpenVINO™ sample programs can use the Heterogeneous execution used with the `-d` option: + +```sh +./hello_classification /squeezenet1.1.xml /picture.jpg HETERO:GPU,CPU +``` +where: +- `HETERO` stands for the Heterogeneous execution +- `GPU,CPU` points to a fallback policy with the priority on GPU and fallback to CPU + +You can also point to more than two devices: `-d HETERO:MYRIAD,GPU,CPU` + +### See Also +[Supported Devices](supported_plugins/Supported_Devices.md) diff --git a/docs/OV_Runtime_UG/img/preprocess_not_fit.png b/docs/OV_Runtime_UG/img/preprocess_not_fit.png new file mode 100644 index 00000000000..32a43476707 --- /dev/null +++ b/docs/OV_Runtime_UG/img/preprocess_not_fit.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8fed5e153636e3e556e000e3e5fc48b9da8f5a1272490550066d647d306ec24f +size 81575 diff --git a/docs/OV_Runtime_UG/integrate_with_your_application.md b/docs/OV_Runtime_UG/integrate_with_your_application.md new file mode 100644 index 00000000000..0cc9e630f3c --- /dev/null +++ b/docs/OV_Runtime_UG/integrate_with_your_application.md @@ -0,0 +1,257 @@ +# Integrate OpenVINO™ with Your Application {#openvino_docs_Integrate_OV_with_your_application} + +@sphinxdirective + +.. toctree:: + :maxdepth: 1 + :hidden: + + openvino_docs_OV_Runtime_UG_Model_Representation + openvino_docs_OV_Runtime_UG_Infer_request + +@endsphinxdirective + +> **NOTE**: Before start using OpenVINO™ Runtime, make sure you set all environment variables during the installation. If you did not, follow the instructions from the _Set the Environment Variables_ section in the installation guides: +> * [For Windows* 10](../install_guides/installing-openvino-windows.md) +> * [For Linux*](../install_guides/installing-openvino-linux.md) +> * [For macOS*](../install_guides/installing-openvino-macos.md) +> * To build an open source version, use the [OpenVINO™ Runtime Build Instructions](https://github.com/openvinotoolkit/openvino/wiki/BuildingCode). + +## Use OpenVINO™ Runtime API to Implement Inference Pipeline + +This section provides step-by-step instructions to implement a typical inference pipeline with the OpenVINO™ Runtime C++ API: + +![ie_api_use_cpp] + +### Step 1. Create OpenVINO™ Runtime Core + +Include next files to work with OpenVINO™ Runtime: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/src/main.cpp + :language: cpp + :fragment: [include] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/src/main.py + :language: python + :fragment: [import] + +@endsphinxdirective + +Use the following code to create OpenVINO™ Core to manage available devices and read model objects: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/src/main.cpp + :language: cpp + :fragment: [part1] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/src/main.py + :language: python + :fragment: [part1] + +@endsphinxdirective + +### Step 2. Compile the Model + +`ov::CompiledModel` class represents a device specific compiled model. `ov::CompiledModel` allows you to get information inputs or output ports by a tensor name or index. + +Compile the model for a specific device using `ov::Core::compile_model()`: + +@sphinxdirective + +.. tab:: C++ + + .. tab:: IR + + .. doxygensnippet:: docs/snippets/src/main.cpp + :language: cpp + :fragment: [part2_1] + + .. tab:: ONNX + + .. doxygensnippet:: docs/snippets/src/main.cpp + :language: cpp + :fragment: [part2_2] + + .. tab:: PaddlePaddle + + .. doxygensnippet:: docs/snippets/src/main.cpp + :language: cpp + :fragment: [part2_3] + + .. tab:: ov::Model + + .. doxygensnippet:: docs/snippets/src/main.cpp + :language: cpp + :fragment: [part2_4] + +.. tab:: Python + + .. tab:: IR + + .. doxygensnippet:: docs/snippets/src/main.py + :language: python + :fragment: [part2_1] + + .. tab:: ONNX + + .. doxygensnippet:: docs/snippets/src/main.py + :language: python + :fragment: [part2_2] + + .. tab:: PaddlePaddle + + .. doxygensnippet:: docs/snippets/src/main.py + :language: python + :fragment: [part2_3] + + .. tab:: ov::Model + + .. doxygensnippet:: docs/snippets/src/main.py + :language: python + :fragment: [part2_4] + +@endsphinxdirective + +The `ov::Model` object represents any models inside the OpenVINO™ Runtime. +For more details please read article about [OpenVINO™ Model representation](model_representation.md). + +The code above creates a compiled model associated with a single hardware device from the model object. +It is possible to create as many compiled models as needed and use them simultaneously (up to the limitation of the hardware resources). +To learn how to change the device configuration, read the [Query device properties](./supported_plugins/config_properties.md) article. + +### Step 3. Create an Inference Request + +`ov::InferRequest` class provides methods for model inference in OpenVINO™ Runtime. Create an infer request using the following code (see [InferRequest detailed documentation](./ov_infer_request.md) for more details): + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/src/main.cpp + :language: cpp + :fragment: [part3] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/src/main.py + :language: python + :fragment: [part3] + +@endsphinxdirective + +### Step 4. Set Inputs + +You can use external memory to create `ov::Tensor` and use the `ov::InferRequest::set_input_tensor` method to put this tensor on the device: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/src/main.cpp + :language: cpp + :fragment: [part4] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/src/main.py + :language: python + :fragment: [part4] + +@endsphinxdirective + +### Step 5. Start Inference + +OpenVINO™ Runtime supports inference in either synchronous or asynchronous mode. Using the Async API can improve application's overall frame-rate, because rather than wait for inference to complete, the app can keep working on the host, while the accelerator is busy. You can use `ov::InferRequest::start_async` to start model inference in the asynchronous mode and call `ov::InferRequest::wait` to wait for the inference results: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/src/main.cpp + :language: cpp + :fragment: [part5] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/src/main.py + :language: python + :fragment: [part5] + +@endsphinxdirective + +This section demonstrates a simple pipeline, to get more information about other ways to perform inference, read the dedicated ["Run inference" section](./ov_infer_request.md). + +### Step 6. Process the Inference Results + +Go over the output tensors and process the inference results. + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/src/main.cpp + :language: cpp + :fragment: [part6] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/src/main.py + :language: python + :fragment: [part6] + +@endsphinxdirective + +## Link and Build Your C++ Application with OpenVINO™ Runtime + +The example uses CMake for project configuration. + +1. **Create a structure** for the project: + ``` sh + project/ + ├── CMakeLists.txt - CMake file to build + ├── ... - Additional folders like includes/ + └── src/ - source folder + └── main.cpp + build/ - build directory + ... + ``` + +2. **Include OpenVINO™ Runtime libraries** in `project/CMakeLists.txt` + + @snippet snippets/CMakeLists.txt cmake:integration_example + +To build your project using CMake with the default build tools currently available on your machine, execute the following commands: + +> **NOTE**: Make sure you set environment variables first by running `/setupvars.sh` (or `setupvars.bat` for Windows). Otherwise the `OpenVINO_DIR` variable won't be configured properly to pass `find_package` calls. + +```sh +cd build/ +cmake ../project +cmake --build . +``` +It's allowed to specify additional build options (e.g. to build CMake project on Windows with a specific build tools). Please refer to the [CMake page](https://cmake.org/cmake/help/latest/manual/cmake.1.html#manual:cmake(1)) for details. + +## Run Your Application + +Congratulations, you have made your first application with OpenVINO™ toolkit, now you may run it. + +## See also + + - [OpenVINO™ Runtime Preprocessing](./preprocessing_overview.md) + - [Using Encrypted Models with OpenVINO™](./protecting_model_guide.md) + +[ie_api_flow_cpp]: img/BASIC_IE_API_workflow_Cpp.svg +[ie_api_use_cpp]: img/IMPLEMENT_PIPELINE_with_API_C.svg +[ie_api_flow_python]: img/BASIC_IE_API_workflow_Python.svg +[ie_api_use_python]: img/IMPLEMENT_PIPELINE_with_API_Python.svg diff --git a/docs/OV_Runtime_UG/layout_overview.md b/docs/OV_Runtime_UG/layout_overview.md new file mode 100644 index 00000000000..c164fb8a1be --- /dev/null +++ b/docs/OV_Runtime_UG/layout_overview.md @@ -0,0 +1,154 @@ +# Layout API overview {#openvino_docs_OV_Runtime_UG_Layout_Overview} + +## Introduction + +In few words, with layout `NCHW` it is easier to understand what model's shape `{8, 3, 224, 224}` means. Without layout it is just a 4-dimensional tensor. + + +Concept of layout helps you (and your application) to understand what does each particular dimension of input/output tensor mean. For example, if your input has shape `{1, 3, 720, 1280}` and layout "NCHW" - it is clear that `N(batch) = 1`, `C(channels) = 3`, `H(height) = 720` and `W(width) = 1280`. Without layout information `{1, 3, 720, 1280}` doesn't give any idea to your application what these number mean and how to resize input image to fit model's expectations. + + +Reasons when you may want to care about input/output layout: + - Perform model modification: + - Apply [preprocessing](./preprocessing_overview.md) steps, like subtract means, divide by scales, resize image, convert RGB<->BGR + - Set/get batch for a model + - Same operations, used during model conversion phase, see [Model Optimizer Embedding Preprocessing Computation](../MO_DG/prepare_model/Additional_Optimizations.md) + - Improve readability of a model's input and output + +## Layout syntax + +### Short +The easiest way is to fully specify each dimension with one alphabetical letter + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_layout.cpp + :language: cpp + :fragment: [ov:layout:simple] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_layout.py + :language: python + :fragment: [ov:layout:simple] + +@endsphinxdirective + +This assigns 'N' to first dimension, 'C' to second, 'H' to 3rd and 'W' to 4th + +### Advanced +Advanced syntax allows assigning a word to a dimension. To do this, wrap layout with square brackets `[]` and specify each name separated by comma `,` + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_layout.cpp + :language: cpp + :fragment: [ov:layout:complex] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_layout.py + :language: python + :fragment: [ov:layout:complex] + +@endsphinxdirective + + +### Partially defined layout +If some dimension is not important, it's name can be set to `?` + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_layout.cpp + :language: cpp + :fragment: [ov:layout:partially_defined] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_layout.py + :language: python + :fragment: [ov:layout:partially_defined] + +@endsphinxdirective + + +### Dynamic layout +If number of dimensions is not important, ellipsis `...` can be used to specify variadic number of dimensions. + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_layout.cpp + :language: cpp + :fragment: [ov:layout:dynamic] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_layout.py + :language: python + :fragment: [ov:layout:dynamic] + +@endsphinxdirective + +### Predefined names + +Layout has pre-defined some widely used in computer vision dimension names: +- N/Batch - batch size +- C/Channels - channels dimension +- D/Depth - depth +- H/Height - height +- W/Width - width + +These names are used in [PreProcessing API](./preprocessing_overview.md) and there is a set of helper functions to get appropriate dimension index from layout + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_layout.cpp + :language: cpp + :fragment: [ov:layout:predefined] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_layout.py + :language: python + :fragment: [ov:layout:predefined] + +@endsphinxdirective + + +### Equality + +Layout names are case-insensitive, which means that ```Layout("NCHW") == Layout("nChW") == Layout("[N,c,H,w]")``` + +### Dump layout + +Layout can be converted to string in advanced syntax format. Can be useful for debugging and serialization purposes + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_layout.cpp + :language: cpp + :fragment: [ov:layout:dump] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_layout.py + :language: python + :fragment: [ov:layout:dump] + +@endsphinxdirective + +## See also + +* ov::Layout C++ class documentation diff --git a/docs/OV_Runtime_UG/migration_ov_2_0/common_inference_pipeline.md b/docs/OV_Runtime_UG/migration_ov_2_0/common_inference_pipeline.md new file mode 100644 index 00000000000..f349c67d570 --- /dev/null +++ b/docs/OV_Runtime_UG/migration_ov_2_0/common_inference_pipeline.md @@ -0,0 +1,228 @@ +# Inference Pipeline {#openvino_2_0_inference_pipeline} + +Usually to inference model with the OpenVINO™ Runtime an user needs to do the following steps in the application pipeline: +- 1. Create Core object +- 2. Read model from the disk + - 2.1. (Optional) Model preprocessing +- 3. Load the model to the device +- 4. Create an inference request +- 5. Fill input tensors with data +- 6. Start inference +- 7. Process the inference results + +Code snippets below cover these steps and show how application code should be changed for migration to OpenVINO™ Runtime 2.0. + +## 1. Create Core + +Inference Engine API: + +@snippet docs/snippets/ie_common.cpp ie:create_core + +OpenVINO™ Runtime API 2.0: + +@snippet docs/snippets/ov_common.cpp ov_api_2_0:create_core + +## 2. Read model from the disk + +Inference Engine API: + +@snippet docs/snippets/ie_common.cpp ie:read_model + +OpenVINO™ Runtime API 2.0: + +@snippet docs/snippets/ov_common.cpp ov_api_2_0:read_model + +Read model has the same structure as in the example from [Model Creation](./graph_construction.md) migration guide. + +Note, you can combine read and compile model stages into a single call `ov::Core::compile_model(filename, devicename)`. + +### 2.1 (Optional) Model preprocessing + +When application's input data doesn't perfectly match with model's input format, preprocessing steps may need to be added. +See detailed guide [how to migrate preprocessing in OpenVINO Runtime API 2.0](./preprocessing.md) + +## 3. Load the Model to the Device + +Inference Engine API: + +@snippet docs/snippets/ie_common.cpp ie:compile_model + +OpenVINO™ Runtime API 2.0: + +@snippet docs/snippets/ov_common.cpp ov_api_2_0:compile_model + +If you need to configure OpenVINO Runtime devices with additional configuration parameters, please, refer to the migration [Configure devices](./configure_devices.md) guide. + +## 4. Create an Inference Request + +Inference Engine API: + +@snippet docs/snippets/ie_common.cpp ie:create_infer_request + +OpenVINO™ Runtime API 2.0: + +@snippet docs/snippets/ov_common.cpp ov_api_2_0:create_infer_request + +## 5. Fill input tensors + +Inference Engine API fills inputs as `I32` precision (**not** aligned with the original model): + +@sphinxdirective + +.. tab:: IR v10 + + .. doxygensnippet:: docs/snippets/ie_common.cpp + :language: cpp + :fragment: [ie:get_input_tensor] + +.. tab:: IR v11 + + .. doxygensnippet:: docs/snippets/ie_common.cpp + :language: cpp + :fragment: [ie:get_input_tensor] + +.. tab:: ONNX + + .. doxygensnippet:: docs/snippets/ie_common.cpp + :language: cpp + :fragment: [ie:get_input_tensor] + +.. tab:: Model created in code + + .. doxygensnippet:: docs/snippets/ie_common.cpp + :language: cpp + :fragment: [ie:get_input_tensor] + +@endsphinxdirective + +OpenVINO™ Runtime API 2.0 fills inputs as `I64` precision (aligned with the original model): + +@sphinxdirective + +.. tab:: IR v10 + + .. doxygensnippet:: docs/snippets/ov_common.cpp + :language: cpp + :fragment: [ov_api_2_0:get_input_tensor_v10] + +.. tab:: IR v11 + + .. doxygensnippet:: docs/snippets/ov_common.cpp + :language: cpp + :fragment: [ov_api_2_0:get_input_tensor_aligned] + +.. tab:: ONNX + + .. doxygensnippet:: docs/snippets/ov_common.cpp + :language: cpp + :fragment: [ov_api_2_0:get_input_tensor_aligned] + +.. tab:: Model created in code + + .. doxygensnippet:: docs/snippets/ov_common.cpp + :language: cpp + :fragment: [ov_api_2_0:get_input_tensor_aligned] + +@endsphinxdirective + +## 6. Start Inference + +Inference Engine API: + +@sphinxdirective + +.. tab:: Sync + + .. doxygensnippet:: docs/snippets/ie_common.cpp + :language: cpp + :fragment: [ie:inference] + +.. tab:: Async + + .. doxygensnippet:: docs/snippets/ie_common.cpp + :language: cpp + :fragment: [ie:start_async_and_wait] + +@endsphinxdirective + +OpenVINO™ Runtime API 2.0: + +@sphinxdirective + +.. tab:: Sync + + .. doxygensnippet:: docs/snippets/ov_common.cpp + :language: cpp + :fragment: [ov_api_2_0:inference] + +.. tab:: Async + + .. doxygensnippet:: docs/snippets/ov_common.cpp + :language: cpp + :fragment: [ov_api_2_0:start_async_and_wait] + +@endsphinxdirective + +## 7. Process the Inference Results + +Inference Engine API processes outputs as `I32` precision (**not** aligned with the original model): + +@sphinxdirective + +.. tab:: IR v10 + + .. doxygensnippet:: docs/snippets/ie_common.cpp + :language: cpp + :fragment: [ie:get_output_tensor] + +.. tab:: IR v11 + + .. doxygensnippet:: docs/snippets/ie_common.cpp + :language: cpp + :fragment: [ie:get_output_tensor] + +.. tab:: ONNX + + .. doxygensnippet:: docs/snippets/ie_common.cpp + :language: cpp + :fragment: [ie:get_output_tensor] + +.. tab:: Model created in code + + .. doxygensnippet:: docs/snippets/ie_common.cpp + :language: cpp + :fragment: [ie:get_output_tensor] + +@endsphinxdirective + +OpenVINO™ Runtime API 2.0 processes outputs: +- For IR v10 as `I32` precision (**not** aligned with the original model) to match **old** behavior +- For IR v11, ONNX, ov::Model, Paddle as `I64` precision (aligned with the original model) to match **new** behavior + +@sphinxdirective + +.. tab:: IR v10 + + .. doxygensnippet:: docs/snippets/ov_common.cpp + :language: cpp + :fragment: [ov_api_2_0:get_output_tensor_v10] + +.. tab:: IR v11 + + .. doxygensnippet:: docs/snippets/ov_common.cpp + :language: cpp + :fragment: [ov_api_2_0:get_output_tensor_aligned] + +.. tab:: ONNX + + .. doxygensnippet:: docs/snippets/ov_common.cpp + :language: cpp + :fragment: [ov_api_2_0:get_output_tensor_aligned] + +.. tab:: Model created in code + + .. doxygensnippet:: docs/snippets/ov_common.cpp + :language: cpp + :fragment: [ov_api_2_0:get_output_tensor_aligned] + +@endsphinxdirective \ No newline at end of file diff --git a/docs/OV_Runtime_UG/migration_ov_2_0/configure_devices.md b/docs/OV_Runtime_UG/migration_ov_2_0/configure_devices.md new file mode 100644 index 00000000000..a730a15684f --- /dev/null +++ b/docs/OV_Runtime_UG/migration_ov_2_0/configure_devices.md @@ -0,0 +1,179 @@ +# Configure devices {#openvino_2_0_configure_devices} + +### Introduction + +Inference Engine API provides an [ability to configure devices](https://docs.openvino.ai/2021.4/openvino_docs_IE_DG_InferenceEngine_QueryAPI.html) via configuration keys and [get device specific metrics](https://docs.openvino.ai/2021.4/openvino_docs_IE_DG_InferenceEngine_QueryAPI.html#getmetric). The values taken from `InferenceEngine::Core::GetConfig` are requested by its string name, while return type is `InferenceEngine::Parameter` and users don't know what is the actual type is stored in this parameter. + +OpenVINO Runtime API 2.0 solves these issues by introducing [properties](../supported_plugins/config_properties.md), which unify metrics and configuration key concepts, but the main advantage of properties - they have C++ type: + +``` +static constexpr Property full_name{"FULL_DEVICE_NAME"}; +``` + +And the property can be requested from an inference device as: + +@snippet ov_properties_migration.cpp core_get_ro_property + +The snippets below show how to migrate from Inference Engine device configuration to OpenVINO Runtime API 2.0 steps. + +### Set configuration values + +Inference Engine API: + +@sphinxdirective + +.. tab:: Devices + + .. doxygensnippet:: docs/snippets/ov_properties_migration.cpp + :language: cpp + :fragment: [core_set_config] + +.. tab:: Model Loading + + .. doxygensnippet:: docs/snippets/ov_properties_migration.cpp + :language: cpp + :fragment: [core_load_network] + +.. tab:: Execution + + .. doxygensnippet:: docs/snippets/ov_properties_migration.cpp + :language: cpp + :fragment: [executable_network_set_config] + +@endsphinxdirective + +OpenVINO Runtime API 2.0: + +@sphinxdirective + +.. tab:: C++ + + .. tab:: Devices + + .. doxygensnippet:: docs/snippets/ov_properties_migration.cpp + :language: cpp + :fragment: [core_set_property] + + .. tab:: Model Loading + + .. doxygensnippet:: docs/snippets/ov_properties_migration.cpp + :language: cpp + :fragment: [core_compile_model] + + .. tab:: Execution + + .. doxygensnippet:: docs/snippets/ov_properties_migration.cpp + :language: cpp + :fragment: [compiled_model_set_property] + +.. tab:: Python + + .. tab:: Devices + + .. doxygensnippet:: docs/snippets/ov_properties_migration.py + :language: python + :fragment: [core_set_property] + + .. tab:: Model Loading + + .. doxygensnippet:: docs/snippets/ov_properties_migration.py + :language: python + :fragment: [core_compile_model] + + .. tab:: Execution + + .. doxygensnippet:: docs/snippets/ov_properties_migration.py + :language: python + :fragment: [compiled_model_set_property] + +@endsphinxdirective + +### Get information + +Inference Engine API: + +@sphinxdirective + +.. tab:: Device configuration + + .. doxygensnippet:: docs/snippets/ov_properties_migration.cpp + :language: cpp + :fragment: [core_get_config] + +.. tab:: Device metrics + + .. doxygensnippet:: docs/snippets/ov_properties_migration.cpp + :language: cpp + :fragment: [core_get_metric] + +.. tab:: Execution config + + .. doxygensnippet:: docs/snippets/ov_properties_migration.cpp + :language: cpp + :fragment: [executable_network_get_config] + +.. tab:: Execution metrics + + .. doxygensnippet:: docs/snippets/ov_properties_migration.cpp + :language: cpp + :fragment: [executable_network_get_metric] + +@endsphinxdirective + +OpenVINO Runtime API 2.0: + +@sphinxdirective + +.. tab:: C++ + + .. tab:: Device configuration + + .. doxygensnippet:: docs/snippets/ov_properties_migration.cpp + :language: cpp + :fragment: [core_get_rw_property] + + .. tab:: Device metrics + + .. doxygensnippet:: docs/snippets/ov_properties_migration.cpp + :language: cpp + :fragment: [core_get_ro_property] + + .. tab:: Execution config + + .. doxygensnippet:: docs/snippets/ov_properties_migration.cpp + :language: cpp + :fragment: [compiled_model_get_rw_property] + + .. tab:: Execution metrics + + .. doxygensnippet:: docs/snippets/ov_properties_migration.cpp + :language: cpp + :fragment: [compiled_model_get_ro_property] + +.. tab:: Python + + .. tab:: Device configuration + + .. doxygensnippet:: docs/snippets/ov_properties_migration.py + :language: python + :fragment: [core_get_rw_property] + + .. tab:: Device metrics + + .. doxygensnippet:: docs/snippets/ov_properties_migration.py + :language: python + :fragment: [core_get_ro_property] + + .. tab:: Execution config + + .. doxygensnippet:: docs/snippets/ov_properties_migration.py + :language: python + :fragment: [compiled_model_get_rw_property] + + .. tab:: Execution metrics + + .. doxygensnippet:: docs/snippets/ov_properties_migration.py + :language: python + :fragment: [compiled_model_get_ro_property] + +@endsphinxdirective diff --git a/docs/OV_Runtime_UG/migration_ov_2_0/deployment_migration.md b/docs/OV_Runtime_UG/migration_ov_2_0/deployment_migration.md new file mode 100644 index 00000000000..0eb86abd370 --- /dev/null +++ b/docs/OV_Runtime_UG/migration_ov_2_0/deployment_migration.md @@ -0,0 +1,197 @@ +# Installation & Deployment {#openvino_2_0_deployment} + +"Easy to use" is one of the main concepts for OpenVINO™ API 2.0. It includes not only simplifying the migration from frameworks to OpenVINO, but also how OpenVINO is organized, how the development tools are used, and how to develop and deploy OpenVINO-based applications. + +To accomplish that, we have made some changes on the installation and deployment of OpenVINO in the 2022.1 release. This guide will walk you through them. + +## Installer Package Contains OpenVINO™ Runtime Only + +Starting from OpenVINO 2022.1, Model Optimizer, Post-Training Optimization tool and Python-based Development tools such as Open Model Zoo tools are distributed via [PyPI](https://pypi.org/project/openvino-dev/) only, and are no longer included in the OpenVINO installer package. This change has several benefits as it: + +* Simplifies the user experience. In previous versions, the installation and usage of OpenVINO Development Tools differ according to the distribution type (via an OpenVINO installer or PyPI). +* Ensures that dependencies are handled properly via the PIP package manager and support virtual environments of development tools. + +The structure of OpenVINO 2022.1 installer package has been organized as below: + +- The `runtime` folder includes headers, libraries and CMake interfaces. +- The `tools` folder contains [the compile tool](../../../tools/compile_tool/README.md), [deployment manager](../../install_guides/deployment-manager-tool.md) and a set of `requirements.txt` files with links to the corresponding versions of the `openvino-dev` package. +- The `python` folder contains the Python version for OpenVINO Runtime. + +## Installing OpenVINO Development Tools via PyPI + +Since OpenVINO Development Tools is no longer in the installer package, the installation process has changed too. This section describes it through a comparison with previous versions. + +### For Versions Prior to 2022.1 + +In previous versions, OpenVINO Development Tools is a part of main package. After the package is installed, to convert models (for example, TensorFlow), you need to install additional dependencies by using the requirements files such as `requirements_tf.txt`, install Post-Training Optimization tool and Accuracy Checker tool via the `setup.py` scripts, and then use the `setupvars` scripts to make the tools available to the following command: + +```sh +$ mo.py -h +``` + +### For 2022.1 and After + +Starting from OpenVINO 2022.1, you can install the development tools from [PyPI](https://pypi.org/project/openvino-dev/) repository only, using the following command (taking TensorFlow as an example): + +```sh +$ python3 -m pip install -r /tools/requirements_tf.txt +``` + +This will install all the development tools and additional necessary components to work with TensorFlow via the `openvino-dev` package (see **Step 4. Install the Package** on the [PyPI page](https://pypi.org/project/openvino-dev/) for parameters of other frameworks). + +Then, the tools can be used by commands like: + +```sh +$ mo -h +$ pot -h +``` + +You don't have to install any other dependencies. For more details on the installation steps, see [Install OpenVINO Development Tools](../../install_guides/installing-model-dev-tools.md). + +## Interface Changes for Building C/C++ Applications + +The new OpenVINO Runtime with API 2.0 has also brought some changes for builiding your C/C++ applications. + +### CMake Interface + +The CMake interface has been changed as below: + +**With Inference Engine of previous versions**: + +```cmake +find_package(InferenceEngine REQUIRED) +find_package(ngraph REQUIRED) +add_executable(ie_ngraph_app main.cpp) +target_link_libraries(ie_ngraph_app PRIVATE ${InferenceEngine_LIBRARIES} ${NGRAPH_LIBRARIES}) +``` + +**With OpenVINO Runtime 2022.1 (API 2.0)**: + +```cmake +find_package(OpenVINO REQUIRED) +add_executable(ov_app main.cpp) +target_link_libraries(ov_app PRIVATE openvino::runtime) + +add_executable(ov_c_app main.c) +target_link_libraries(ov_c_app PRIVATE openvino::runtime::c) +``` + +### Native Interfaces + +To build applications without CMake interface, you can also use MSVC IDE, UNIX makefiles and any other interfaces, which have been changed as below: + +**With Inference Engine of previous versions**: + +@sphinxdirective + +.. tab:: Include dirs + + .. code-block:: sh + + /deployment_tools/inference_engine/include + /deployment_tools/ngraph/include + +.. tab:: Path to libs + + .. code-block:: sh + + /deployment_tools/inference_engine/lib/intel64/Release + /deployment_tools/ngraph/lib/ + +.. tab:: Shared libs + + .. code-block:: sh + + // UNIX systems + inference_engine.so ngraph.so + + // Windows + inference_engine.dll ngraph.dll + +.. tab:: (Windows) .lib files + + .. code-block:: sh + + ngraph.lib + inference_engine.lib + +@endsphinxdirective + +**With OpenVINO Runtime 2022.1 (API 2.0)**: + +@sphinxdirective + +.. tab:: Include dirs + + .. code-block:: sh + + /runtime/include + +.. tab:: Path to libs + + .. code-block:: sh + + /runtime/lib/intel64/Release + +.. tab:: Shared libs + + .. code-block:: sh + + // UNIX systems + openvino.so + + // Windows + openvino.dll + +.. tab:: (Windows) .lib files + + .. code-block:: sh + + openvino.lib + +@endsphinxdirective + +## Clearer Library Structure for Deployment + +OpenVINO 2022.1 has reorganized the libraries to make it easier for deployment. In previous versions, to perform deployment steps, you have to use several libraries. Now you can just use `openvino` or `openvino_c` based on your developing language plus necessary plugins to complete your task. For example, `openvino_intel_cpu_plugin` and `openvino_ir_frontend` plugins will enable you to load OpenVINO IRs and perform inference on CPU device. + +Here you can find some detailed comparisons on library structure between OpenVINO 2022.1 and previous versions: + +* A single core library with all the functionalities (`openvino` for C++ Runtime, `openvino_c` for Inference Engine API C interface) is used in 2022.1, instead of the previous core libraries which contain `inference_engine`, `ngraph`, `inference_engine_transformations` and `inference_engine_lp_transformations`. +* The optional `inference_engine_preproc` preprocessing library (if `InferenceEngine::PreProcessInfo::setColorFormat` or `InferenceEngine::PreProcessInfo::setResizeAlgorithm` is used) is renamed as `openvino_gapi_preproc` and deprecated in 2022.1. See more details on [Preprocessing capabilities of OpenVINO API 2.0](preprocessing.md). +* The libraries of plugins are renamed as below: + * `openvino_intel_cpu_plugin` is used for [CPU](../supported_plugins/CPU.md) device instead of `MKLDNNPlugin` in previous versions. + * `openvino_intel_gpu_plugin` is used for [GPU](../supported_plugins/GPU.md) device instead of `clDNNPlugin` in previous versions. + * `openvino_auto_plugin` is used for [Auto-Device Plugin](../auto_device_selection.md) in 2022.1. +* The plugins for reading and converting models have been changed as below: + * `openvino_ir_frontend` is used to read IRs instead of `inference_engine_ir_reader` in previous versions. + * `openvino_onnx_frontend` is used to read ONNX models instead of `inference_engine_onnx_reader` (with its dependencies) in previous versions. + * `openvino_paddle_frontend` is added in 2022.1 to read PaddlePaddle models. + + \ No newline at end of file diff --git a/docs/OV_Runtime_UG/migration_ov_2_0/graph_construction.md b/docs/OV_Runtime_UG/migration_ov_2_0/graph_construction.md new file mode 100644 index 00000000000..0ec5ec644ee --- /dev/null +++ b/docs/OV_Runtime_UG/migration_ov_2_0/graph_construction.md @@ -0,0 +1,16 @@ +# Model creation in runtime {#openvino_2_0_model_creation} + +OpenVINO™ Runtime API 2.0 includes nGraph engine as a common part. The `ngraph` namespace was changed to `ov`, all other ngraph API is preserved as is. +Code snippets below show how application code should be changed for migration to OpenVINO™ Runtime API 2.0. + +### nGraph API + +@snippet snippets/ngraph.cpp ngraph:graph + +### OpenVINO™ Runtime API 2.0: + +@snippet snippets/ov_graph.cpp ov:graph + +**See also:** +- [Hello Model Creation C++ Sample](../../../samples/cpp/model_creation_sample/README.md) +- [Hello Model Creation Python Sample](../../../samples/python/model_creation_sample/README.md) diff --git a/docs/OV_Runtime_UG/migration_ov_2_0/intro.md b/docs/OV_Runtime_UG/migration_ov_2_0/intro.md new file mode 100644 index 00000000000..b36c5588efb --- /dev/null +++ b/docs/OV_Runtime_UG/migration_ov_2_0/intro.md @@ -0,0 +1,87 @@ +# OpenVINO™ 2.0 Transition Guide {#openvino_2_0_transition_guide} + +@sphinxdirective + +.. toctree:: + :maxdepth: 1 + :hidden: + + openvino_2_0_deployment + openvino_2_0_inference_pipeline + openvino_2_0_configure_devices + openvino_2_0_preprocessing + openvino_2_0_model_creation + +@endsphinxdirective + +### Introduction + +Older versions of OpenVINO™ (prior to 2022.1) required to change the logic of applications when an user migrates from the frameworks like TensorFlow, ONNX Runtime, PyTorch, PaddlePaddle, etc. The change of application's logic is connected with: + +- Model Optimizer changed input precisions for some inputs. For example, neural language processing models with `I64` input are becoming to have `I32` input element type. +- Model Optimizer changed layouts for TensorFlow models (see [Layouts in OpenVINO](../layout_overview.md)). It leads to unexpected user behavior that a user needs to use a different layout for its input data with compare to the framework: +![tf_openvino] +- Inference Engine API (`InferenceEngine::CNNNetwork`) also applied some conversion rules for input and output precisions because of device plugins limitations. +- Users need to specify input shapes during model conversions in Model Optimizer and work with static shapes in the application. + +OpenVINO™ introduces API 2.0 to align logic of working with model as it is done in the frameworks - no layout and precision changes, operates with tensor names and indices to address inputs and outputs. OpenVINO Runtime is composed of Inference Engine API used for inference and nGraph API targeted to work with models, operations. The API 2.0 has common structure, naming convention styles, namespaces, removes duplicated structures. See [How to migrate to OpenVINO API 2.0](common_inference_pipeline.md) for details. + +> **NOTE**: Most important is that your existing application can continue working with OpenVINO Runtime 2022.1 as it used to be, but we recommend migration to API 2.0 to unlock additional features like [Preprocessing](../preprocessing_overview.md) and [Dynamic shapes support](../ov_dynamic_shapes.md). + +### Introducing IR v11 + +To support these features, OpenVINO introduced IR v11 which is generated by Model Optimizer by default since 2022.1. The model represented in IR v11 fully matches the original model in a original framework format in terms of inputs and outputs. Also, a user does not have to specify input shapes during the conversion, so the resulting IR v11 contains `-1` to denote undefined dimensions (see [Working with dynamic shapes](../ov_dynamic_shapes.md) to fully utilize this feature; or [Changing input shapes](../ShapeInference.md) to reshape to static shapes in the application). + +What is also important to mention - the IR v11 is fully compatible with old applications written with Inference Engine API from older versions of OpenVINO. This is achieved by adding additional runtime information to the IR v11 which is responsible for backward compatible behavior. So, once the IR v11 is read by the old Inference Engine based application, it's internally converted to IR v10 to provide backward-compatible behavior. + +The IR v11 is supported by all OpenVINO Development tools including Post-Training Optimization tool, Benchmark app, etc. + +### IR v10 Compatibility + +OpenVINO API 2.0 also supports models in IR v10 for backward compatibility. So, if a user has an IR v10, it can be fed to OpenVINO Runtime as well (see [migration steps](common_inference_pipeline.md)). + +Some OpenVINO Development Tools also support both IR v10 and IR v11 as an input: +- Accuracy checker also supports IR v10, but requires an additional option to denote which API is used underneath. +- [Compile tool](../../../tools/compile_tool/README.md) compiles the model to be used in API 2.0 by default. If a user wants to use the resulting compiled blob in Inference Engine API, the additional `ov_api_1_0` option should be passed. + +The following OpenVINO tools don't support IR v10 as an input, and require to generate an IR v11 from the original model with the latest version of Model Optimizer: +- Post-Training Optimization tool +- Deep Learning Workbench + +> **NOTE**: If you need to quantize your IR v10 models to run with OpenVINO 2022.1, it's recommended to download and use Post-Training Optimization tool from OpenVINO 2021.4 release. + +### Differences between Inference Engine and OpenVINO Runtime 2022.1 + +### Differences between Inference Engine and OpenVINO Runtime 2.0 + +Inference Engine and nGraph APIs are not deprecated, they are fully functional and can be used in applications. However, it's highly recommended to migrate to API 2.0, because it already has additional features and this list will be extended later. The following list of additional features is supported by API 2.0: +- [Working with dynamic shapes](../ov_dynamic_shapes.md). The feature is quite useful for best performance for NLP (Neural Language Processing) models, super resolution models and other which accepts dynamic input shapes. +- [Preprocessing of the model](../preprocessing_overview.md) to add preprocessing operations to the inference models and fully occupy the accelerator and free CPU resources. + +To define a difference on the API level between Inference Engine and API 2.0, let's define two types of behaviors: +- **Old behavior** of OpenVINO supposes: + - Model Optimizer can change input element types, order of dimensions (layouts) with compare to the model from the original framework. + - Inference Engine can override input and output element types. + - Inference Engine API operates with operation names to address inputs and outputs (e.g. InferenceEngine::InferRequest::GetBlob). + - Does not support compiling of models with dynamic input shapes. +- **New behavior** assumes full model alignment with the framework and is implemented in OpenVINO 2022.1: + - Model Optimizer preserves the input element types, order of dimensions (layouts) and stores tensor names from the original models. + - OpenVINO Runtime 2022.1 reads models in any formats (IR v10, IR v11, ONNX, PaddlePaddle, etc) as is. + - API 2.0 operates with tensor names. Note, the difference between tensor names and operations names is that in case if a single operation has several output tensors, such tensors cannot identified in a unique manner, so tensor names are used for addressing as it's usually done in the frameworks. + - API 2.0 can address input and outputs tensors also by its index. Some model formats like ONNX are sensitive to order of inputs, outputs and its preserved by OpenVINO 2022.1. + +The table below demonstrates which behavior **old** or **new** is used depending on a model source, used APIs. + +| API | IR v10 | IR v11 | ONNX file | Model created in code | +|-------------------------------|---------|---------|-----------|-----------------------| +|Inference Engine / nGraph APIs | Old | Old | Old | Old | +|API 2.0 | Old | New | New | New | + +Please look at next transition guides to understand how migrate Inference Engine-based application to API 2.0: + - [Installation & Deployment](deployment_migration.md) + - [OpenVINO™ Common Inference pipeline](common_inference_pipeline.md) + - [Preprocess your model](./preprocessing.md) + - [Configure device](./configure_devices.md) + - [OpenVINO™ Model Creation](graph_construction.md) + +[tf_openvino]: ../../img/tf_openvino.png diff --git a/docs/OV_Runtime_UG/migration_ov_2_0/preprocessing.md b/docs/OV_Runtime_UG/migration_ov_2_0/preprocessing.md new file mode 100644 index 00000000000..a860ac261f6 --- /dev/null +++ b/docs/OV_Runtime_UG/migration_ov_2_0/preprocessing.md @@ -0,0 +1,203 @@ +# Preprocessing {#openvino_2_0_preprocessing} + +### Introduction + +Inference Engine API has preprocessing capabilities in `InferenceEngine::CNNNetwork` class. Such preprocessing information is not a part of the main inference graph executed by the [OpenVINO devices](../supported_plugins/Device_Plugins.md), so it is stored and executed separately before an inference stage: +- Preprocessing operations are executed on CPU processor for most of the OpenVINO inference plugins. So, instead of occupying of acceleators, CPU processor is also busy with computational tasks. +- Preprocessing information stored in `InferenceEngine::CNNNetwork` is lost during saving back to IR file format. + +OpenVINO Runtime API 2.0 introduces [new way of adding preprocessing operations to the model](../preprocessing_overview.md) - each preprocessing or postprocessing operation is integrated directly to the model and compiled together with inference graph: +- Add preprocessing operations first using `ov::preprocess::PrePostProcessor` +- Compile model on the target then using `ov::Core::compile_model` + +Having preprocessing operations as a part of OpenVINO opset allows to read and serialize preprocessed model as the IR file format. + +It's also important to mention that since OpenVINO 2.0, the Runtime API does not assume any default layouts like Inference Engine did, for example both `{ 1, 224, 224, 3 }` and `{ 1, 3, 224, 224 }` shapes are supposed to have `NCHW` layout while only the last shape has `NCHW`. So, some preprocessing capabilities in OpenVINO Runtime API 2.0 requires explicitly set layouts, see [Layout overview](../layout_overview.md) how to do it. For example, to perform image scaling by partial dimensions `H` and `W`, preprocessing needs to know what dimensions are `H` and `W`. + +> **NOTE**: Use Model Optimizer preprocessing capabilities to insert and optimize preprocessing operations to the model. In this case you don't need to read model in runtime application and set preprocessing, you can use [model caching feature](../Model_caching_overview.md) to improve time to inference stage. + +The steps below demonstrates how to migrate preprocessing scenarios from Inference Engine API to OpenVINO Runtime API 2.0. +The snippets suppose we need to preprocess a model input with tensor name `tensor_name`, in Inferenece Engine API using operation names to address the data, it's called `operation_name`. + +#### Importing preprocessing in Python + +In order to utilize preprocessing following imports must be added. + +Inference Engine API: + +@sphinxdirective + +.. doxygensnippet:: docs/snippets/ov_preprocessing_migration.py + :language: python + :fragment: [imports] + +@endsphinxdirective + +OpenVINO Runtime API 2.0: + +@sphinxdirective + +.. doxygensnippet:: docs/snippets/ov_preprocessing_migration.py + :language: python + :fragment: [ov_imports] + +@endsphinxdirective + +There are two different namespaces `runtime`, which contains OpenVINO Runtime API classes and `preprocess` which provides Preprocessing API. + + +### Mean and scale values + +Inference Engine API: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing_migration.cpp + :language: cpp + :fragment: [mean_scale] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing_migration.py + :language: python + :fragment: [mean_scale] + +@endsphinxdirective + +OpenVINO Runtime API 2.0: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing_migration.cpp + :language: cpp + :fragment: [ov_mean_scale] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing_migration.py + :language: python + :fragment: [ov_mean_scale] + +@endsphinxdirective + +### Precision and layout conversions + +Inference Engine API: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing_migration.cpp + :language: cpp + :fragment: [conversions] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing_migration.py + :language: python + :fragment: [conversions] + +@endsphinxdirective + +OpenVINO Runtime API 2.0: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing_migration.cpp + :language: cpp + :fragment: [ov_conversions] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing_migration.py + :language: python + :fragment: [ov_conversions] + +@endsphinxdirective + +### Image scaling + +Inference Engine API: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing_migration.cpp + :language: cpp + :fragment: [image_scale] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing_migration.py + :language: python + :fragment: [image_scale] + +@endsphinxdirective + +OpenVINO Runtime API 2.0: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing_migration.cpp + :language: cpp + :fragment: [ov_image_scale] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing_migration.py + :language: python + :fragment: [ov_image_scale] + +@endsphinxdirective + +### Color space conversions + +Inference Engine API: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing_migration.cpp + :language: cpp + :fragment: [color_space] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing_migration.py + :language: python + :fragment: [color_space] + +@endsphinxdirective + +OpenVINO Runtime API 2.0: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing_migration.cpp + :language: cpp + :fragment: [ov_color_space] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing_migration.py + :language: python + :fragment: [ov_color_space] + +@endsphinxdirective + +**See also:** +- [Preprocessing details](../preprocessing_details.md) +- [NV12 classification sample](../../../samples/cpp/hello_nv12_input_classification/README.md) diff --git a/docs/OV_Runtime_UG/model_representation.md b/docs/OV_Runtime_UG/model_representation.md index 91202aeb8c8..ebf30b89ca1 100644 --- a/docs/OV_Runtime_UG/model_representation.md +++ b/docs/OV_Runtime_UG/model_representation.md @@ -2,8 +2,6 @@ In OpenVINO™ Runtime a model is represented by the `ov::Model` class. -## Model Representation - The `ov::Model` object stores shared pointers to `ov::op::v0::Parameter`, `ov::op::v0::Result` and `ov::op::Sink` operations that are inputs, outputs and sinks of the graph. Sinks of the graph have no consumers and are not included in the results vector. All other operations hold each other via shared pointers: child operation holds its parent (hard link). If an operation has no consumers and it's not the `Result` or `Sink` operation (shared pointer counter is zero), then it will be destructed and won't be accessible anymore. @@ -12,24 +10,63 @@ Each operation in `ov::Model` has the `std::shared_ptr` type. For details on how to build a model in OpenVINO™ Runtime, see the [Build a Model in OpenVINO™ Runtime](@ref build_model) section. +OpenVINO™ Runtime allows using tensor names or indexes to work wit model inputs/outputs. To get model input/output ports, use the `ov::Model::inputs()` or `ov::Model::outputs()` methods respectively. + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_model_snippets.cpp + :language: cpp + :fragment: [all_inputs_ouputs] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_model_snippets.py + :language: python + :fragment: [all_inputs_ouputs] + +@endsphinxdirective + +OpenVINO™ Runtime model representation uses special classes to work with model data types and shapes. For data types the `ov::element::Type` is used. + +## Shapes Representation + +OpenVINO™ Runtime provides two types for shape representation: + +* `ov::Shape` - Represents static (fully defined) shapes. + +* `ov::PartialShape` - Represents dynamic shapes. That means that the rank or some of dimensions are dynamic (dimension defines an interval or undefined). `ov::PartialShape` can be converted to `ov::Shape` using the `get_shape()` method if all dimensions are static; otherwise the conversion raises an exception. + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_model_snippets.cpp + :language: cpp + :fragment: [ov:partial_shape] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_model_snippets.py + :language: python + :fragment: [ov:partial_shape] + +@endsphinxdirective + + But in most cases before getting static shape using `get_shape()` method, you need to check that shape is static. + ## Operations -The `ov::Op` class represents any abstract operation in the model representation. Use this class to create [custom operations](../OV_Runtime_UG/Extensibility_DG/AddingNGraphOps.md). +The `ov::Op` class represents any abstract operation in the model representation. Use this class to create [custom operations](../Extensibility_UG/add_openvino_ops.md). ## Operation Sets Operation set (opset) is a collection of operations that can be used to construct a model. The `ov::OpSet` class provides a functionality to work with operation sets. -For each operation set, OpenVINO™ Runtime provides a separate namespace, for example `opset8`. -Each OpenVINO™ Release release introduces new operations and add these operations to a new operation set. New operation sets help to introduce a new version of operations that change behavior of previous operations. Using operation sets allows you to avoid changes in your application if new operations have been introduced. - -## Static and Partial Shapes - -There are two types used for shape representation: - -* `ov::Shape` - Represents static (fully defined) shapes. - -* `ov::PartialShape` - Represents dynamic shapes. That means that the rank or some of dimensions are dynamic (undefined). `ov::PartialShape` can be converted to `ov::Shape` using the `get_shape()` method if all dimensions are static; otherwise the conversion raises an exception. - +For each operation set, OpenVINO™ Runtime provides a separate namespace, for example `opset8`. +Each OpenVINO™ Release introduces new operations and add these operations to a new operation set. New operation sets help to introduce a new version of operations that change behavior of previous operations. Using operation sets allows you to avoid changes in your application if new operations have been introduced. +For a complete list of operation sets supported in OpenVINO™ toolkit, see [Available Operations Sets](../ops/opset.md). +To add support of custom operations, see the [Add Custom OpenVINO Operations](../Extensibility_UG/Intro.md) document. ## Build a Model in OpenVINO™ Runtime {#build_model} @@ -37,55 +74,109 @@ You can create a model from source. This section illustrates how to construct a Operation set `opsetX` integrates a list of pre-compiled operations that work for this purpose. In other words, `opsetX` defines a set of operations for building a graph. -For a complete list of operation sets supported in OpenVINO™ toolkit, see [Available Operations Sets](../ops/opset.md). - -To add suport of custom operations, see the [Add Custom OpenVINO Operations](../OV_Runtime_UG/Extensibility_DG/Intro.md) document. - To build an `ov::Model` instance from `opset8` operations, include the following files: - - C++ - @snippet example_ngraph_utils.cpp ov:include +@sphinxdirective - - Python - ```python - import openvino.runtime.opset8 as ov - from openvino.runtime import Model - ``` +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_model_snippets.cpp + :language: cpp + :fragment: [ov:include] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_model_snippets.py + :language: python + :fragment: [import] + +@endsphinxdirective The following code demonstrates how to create a simple model: - - C++ - @snippet example_ngraph_utils.cpp ov:create_simple_model +@sphinxdirective - - Python - ```python - TBD - ``` +.. tab:: C++ + .. doxygensnippet:: docs/snippets/ov_model_snippets.cpp + :language: cpp + :fragment: [ov:create_simple_model] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_model_snippets.py + :language: python + :fragment: [ov:create_simple_model] + +@endsphinxdirective The following code creates a model with several outputs: - - C++ - @snippet example_ngraph_utils.cpp ov:create_advanced_model +@sphinxdirective - - Python - ```python - TBD - ``` +.. tab:: C++ -## FAQ + .. doxygensnippet:: docs/snippets/ov_model_snippets.cpp + :language: cpp + :fragment: [ov:create_advanced_model] -### Does OpenVINO™ Runtime provide any capabilities to debug the model structure and model modification? - - To receive additional messages about applied graph modifications, rebuild the OpenVINO™ Runtime library with the `-DENABLE_OPENVINO_DEBUG=ON` option. - - A model can be visualized to image from the xDot format: - @snippet example_ngraph_utils.cpp ov:visualize - - A model can be serialized to IR: - @snippet example_ngraph_utils.cpp ov:serialize +.. tab:: Python -### How can I develop my own transformation pass? - See the [Transformations Developer Guide](./nGraphTransformation.md). + .. doxygensnippet:: docs/snippets/ov_model_snippets.py + :language: python + :fragment: [ov:create_advanced_model] + +@endsphinxdirective + +## Model debug capabilities + +OpenVINO™ provides several debug capabilities: + - To receive additional messages about applied model modifications, rebuild the OpenVINO™ Runtime library with the `-DENABLE_OPENVINO_DEBUG=ON` option. + - Model can be visualized to image from the xDot format: + @sphinxdirective + + .. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_model_snippets.cpp + :language: cpp + :fragment: [ov:visualize] + + .. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_model_snippets.py + :language: python + :fragment: [ov:visualize] + + @endsphinxdirective + + `ov::pass::VisualizeTree` can be parametrized via environment variables: + + OV_VISUALIZE_TREE_OUTPUT_SHAPES=1 - visualize shapes + OV_VISUALIZE_TREE_OUTPUT_TYPES=1 - visualize types + OV_VISUALIZE_TREE_MIN_MAX_DENORMAL=1 - pretty denormal values + OV_VISUALIZE_TREE_RUNTIME_INFO=1 - print runtime information + OV_VISUALIZE_TREE_IO=1 - print I/O ports + OV_VISUALIZE_TREE_MEMBERS_NAME=1 - print member names + + - Also model can be serialized to IR: + @sphinxdirective + + .. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_model_snippets.cpp + :language: cpp + :fragment: [ov:serialize] + + .. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_model_snippets.py + :language: python + :fragment: [ov:serialize] + + @endsphinxdirective ## See Also * [Available Operation Sets](../ops/opset.md) -* [OpenVINO™ Runtime Extensibility Developer Guide](../OV_Runtime_UG/Extensibility_DG/Intro.md) +* [OpenVINO™ Runtime Extensibility Developer Guide](../Extensibility_UG/Intro.md) +* [Transformations Developer Guide](../Extensibility_UG/ov_transformations.md). diff --git a/docs/OV_Runtime_UG/supported_plugins/MULTI.md b/docs/OV_Runtime_UG/multi_device.md similarity index 58% rename from docs/OV_Runtime_UG/supported_plugins/MULTI.md rename to docs/OV_Runtime_UG/multi_device.md index ff8c73a8131..f858d1b6906 100644 --- a/docs/OV_Runtime_UG/supported_plugins/MULTI.md +++ b/docs/OV_Runtime_UG/multi_device.md @@ -1,4 +1,4 @@ -# Multi-Device Plugin {#openvino_docs_IE_DG_supported_plugins_MULTI} +# Running on multiple devices simultaneously {#openvino_docs_OV_UG_Running_on_multiple_devices} ## Introducing the Multi-Device Plugin (C++) @@ -13,13 +13,13 @@ The Multi-Device plugin automatically assigns inference requests to available co * Improved throughput from using multiple devices (compared to single-device execution) * More consistent performance, since the devices share the inference burden (if one device is too busy, another can take more of the load) -Note that with Multi-Device the application logic is left unchanged, so you don't need to explicitly load the network to every device, create and balance the inference requests and so on. From the application point of view, this is just another device that handles the actual machinery. The only thing that is required to leverage performance is to provide the multi-device (and hence the underlying devices) with enough inference requests to process. For example, if you were processing 4 cameras on the CPU (with 4 inference requests), it might be desirable to process more cameras (with more requests in flight) to keep CPU and GPU busy via Multi-Device. +Note that with Multi-Device the application logic is left unchanged, so you don't need to explicitly compile the model on every device, create and balance the inference requests and so on. From the application point of view, this is just another device that handles the actual machinery. The only thing that is required to leverage performance is to provide the multi-device (and hence the underlying devices) with enough inference requests to process. For example, if you were processing 4 cameras on the CPU (with 4 inference requests), it might be desirable to process more cameras (with more requests in flight) to keep CPU and GPU busy via Multi-Device. The setup of Multi-Device can be described in three major steps: -1. Configure each device as usual. -2. Load the network to the Multi-Device plugin created on top of a (prioritized) list of the configured devices. This is the only change needed in the application. -3. As with any other ExecutableNetwork call (resulting from `InferenceEngine::Core::LoadNetwork`), you create as many requests as needed to saturate the devices. +1. Prepare configure for each device. +2. Compile the model on the Multi-Device plugin created on top of a (prioritized) list of the configured devices with the configure prepared in step one. +3. As with any other CompiledModel call (resulting from `compile_model`), you create as many requests as needed to saturate the devices. These steps are covered below in detail. @@ -29,22 +29,38 @@ Following the OpenVINO™ convention of labeling devices, the Multi-Device plugi | Parameter name | Parameter values | Default | Description | | -------------- | ---------------- | --- | --- | -| "MULTI_DEVICE_PRIORITIES" | comma-separated device names with no spaces | N/A | Prioritized list of devices | +| ov::device::priorities | comma-separated device names with no spaces | N/A | Prioritized list of devices | + +You can set the priorities directly as a string. -You can set the configuration directly as a string, or use the metric key `MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES from the `multi/multi_device_config.hpp` file, which defines the same string. - Basically, there are three ways to specify the devices to be use by the "MULTI": -@snippet snippets/MULTI0.cpp part0 +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/MULTI0.cpp + :language: cpp + :fragment: [part0] + +@endsphinxdirective Notice that the priorities of the devices can be changed in real time for the executable network: -@snippet snippets/MULTI1.cpp part1 +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/MULTI1.cpp + :language: cpp + :fragment: [part1] + +@endsphinxdirective Finally, there is a way to specify number of requests that the Multi-Device will internally keep for each device. Suppose your original app was running 4 cameras with 4 inference requests. You would probably want to share these 4 requests between 2 devices used in MULTI. The easiest way is to specify a number of requests for each device using parentheses: "MULTI:CPU(2),GPU(2)" and use the same 4 requests in your app. However, such an explicit configuration is not performance-portable and hence not recommended. Instead, the better way is to configure the individual devices and query the resulting number of requests to be used at the application level (see [Configuring the Individual Devices and Creating the Multi-Device On Top](#configuring-the-individual-devices-and-creating-the-multi-device-on-top)). ### Enumerating Available Devices -The Inference Engine features a dedicated API to enumerate devices and their capabilities. See the [Hello Query Device C++ Sample](../../../samples/cpp/hello_query_device/README.md). This is example output from the sample (truncated to device names only): +The OpenVINO Runtime API features a dedicated methods to enumerate devices and their capabilities. See the [Hello Query Device C++ Sample](../../samples/cpp/hello_query_device/README.md). This is example output from the sample (truncated to device names only): ```sh ./hello_query_device @@ -60,7 +76,15 @@ The Inference Engine features a dedicated API to enumerate devices and their cap A simple programmatic way to enumerate the devices and use with the multi-device is as follows: -@snippet snippets/MULTI2.cpp part2 +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/MULTI2.cpp + :language: cpp + :fragment: [part2] + +@endsphinxdirective Beyond the trivial "CPU", "GPU", "HDDL" and so on, when multiple instances of a device are available the names are more qualified. For example, this is how two Intel® Movidius™ Myriad™ X sticks are listed with the hello_query_sample: ``` @@ -72,13 +96,28 @@ Beyond the trivial "CPU", "GPU", "HDDL" and so on, when multiple instances of a So the explicit configuration to use both would be "MULTI:MYRIAD.1.2-ma2480,MYRIAD.1.4-ma2480". Accordingly, the code that loops over all available devices of "MYRIAD" type only is below: -@snippet snippets/MULTI3.cpp part3 +@sphinxdirective +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/MULTI3.cpp + :language: cpp + :fragment: [part3] + +@endsphinxdirective ### Configuring the Individual Devices and Creating the Multi-Device On Top As discussed in the first section, you shall configure each individual device as usual and then just create the "MULTI" device on top: -@snippet snippets/MULTI4.cpp part4 +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/MULTI4.cpp + :language: cpp + :fragment: [part4] + +@endsphinxdirective An alternative is to combine all the individual device settings into a single config file and load that, allowing the Multi-Device plugin to parse and apply settings to the right devices. See the code example in the next section. @@ -86,19 +125,27 @@ Note that while the performance of accelerators combines really well with Multi- See the [Using the Multi-Device with OpenVINO samples and benchmarking the performance](#using-the-multi-device-with-openvino-samples-and-benchmarking-the-performance) section below. ### Querying the Optimal Number of Inference Requests -You can use the new GetMetric API to query the optimal number of requests. Similarly, when using the Multi-Device you don't need to sum over included devices yourself, you can query metric directly: +You can use the [configure devices](supported_plugins/config_properties.md) to query the optimal number of requests. Similarly, when using the Multi-Device you don't need to sum over included devices yourself, you can query property directly: -@snippet snippets/MULTI5.cpp part5 +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/MULTI5.cpp + :language: cpp + :fragment: [part5] + +@endsphinxdirective ### Using the Multi-Device with OpenVINO Samples and Benchmarking the Performance -Every OpenVINO sample that supports the `-d` (which stands for "device") command-line option transparently accepts Multi-Device. The [Benchmark Application](../../../samples/cpp/benchmark_app/README.md) is the best reference for the optimal usage of Multi-Device. As discussed earlier, you do not need to set up the number of requests, CPU streams or threads because the application provides optimal performance out of the box. Below is an example command to evaluate HDDL+GPU performance with that: +Every OpenVINO sample that supports the `-d` (which stands for "device") command-line option transparently accepts Multi-Device. The [Benchmark Application](../../samples/cpp/benchmark_app/README.md) is the best reference for the optimal usage of Multi-Device. As discussed earlier, you do not need to set up the number of requests, CPU streams or threads because the application provides optimal performance out of the box. Below is an example command to evaluate HDDL+GPU performance with that: ```sh ./benchmark_app –d MULTI:HDDL,GPU –m -i -niter 1000 ``` -The Multi-Device plugin supports FP16 IR files. The CPU plugin automatically upconverts it to FP32 and the other devices support it natively. Note that no demos are (yet) fully optimized for Multi-Device, by means of supporting the OPTIMAL_NUMBER_OF_INFER_REQUESTS metric, using the GPU streams/throttling, and so on. +The Multi-Device plugin supports FP16 IR files. The CPU plugin automatically upconverts it to FP32 and the other devices support it natively. Note that no demos are (yet) fully optimized for Multi-Device, by means of supporting the ov::optimal_number_of_infer_requests property, using the GPU streams/throttling, and so on. ### Video: MULTI Plugin @@ -110,7 +157,22 @@ The Multi-Device plugin supports FP16 IR files. The CPU plugin automatically upc @endsphinxdirective ### See Also -[Supported Devices](Supported_Devices.md) +[Supported Devices](supported_plugins/Supported_Devices.md) + +## Performance Considerations for the Multi-Device Execution +This section covers few recommendations for the multi-device execution (applicable for both Python and C++): +- MULTI usually performs best when the fastest device is specified first in the list of the devices. + This is particularly important when the request-level parallelism is not sufficient + (e.g. the number of request in the flight is not enough to saturate all devices). +- Just like with any throughput-oriented execution, it is highly recommended to query the optimal number of inference requests directly from the instance of the `ov:compiled_model`. +Please refer to the code of the `benchmark_app`, that exists in both [C++](../../samples/cpp/benchmark_app/README.md) and [Python](../../tools/benchmark_tool/README.md), for more details. +- Notice that for example CPU+GPU execution performs better with certain knobs + which you can find in the code of the same [Benchmark App](../../samples/cpp/benchmark_app/README.md) sample. + One specific example is disabling GPU driver polling, which in turn requires multiple GPU streams to amortize slower + communication of inference completion from the device to the host. +- Multi-device logic always attempts to save on the (e.g. inputs) data copies between device-agnostic, user-facing inference requests + and device-specific 'worker' requests that are being actually scheduled behind the scene. + To facilitate the copy savings, it is recommended to run the requests in the order that they were created. ## Introducing the Multi-Device Plugin (Python) @@ -125,13 +187,13 @@ The Multi-Device plugin automatically assigns inference requests to available co * Improved throughput from using multiple devices (compared to single-device execution) * More consistent performance, since the devices share the inference burden (if one device is too busy, another can take more of the load) -Note that with Multi-Device the application logic is left unchanged, so you don't need to explicitly load the network to every device, create and balance the inference requests and so on. From the application point of view, this is just another device that handles the actual machinery. The only thing that is required to leverage performance is to provide the multi-device (and hence the underlying devices) with enough inference requests to process. For example, if you were processing 4 cameras on the CPU (with 4 inference requests), it might be desirable to process more cameras (with more requests in flight) to keep CPU and GPU busy via Multi-Device. +Note that with Multi-Device the application logic is left unchanged, so you don't need to explicitly compile the model on every device, create and balance the inference requests and so on. From the application point of view, this is just another device that handles the actual machinery. The only thing that is required to leverage performance is to provide the multi-device (and hence the underlying devices) with enough inference requests to process. For example, if you were processing 4 cameras on the CPU (with 4 inference requests), it might be desirable to process more cameras (with more requests in flight) to keep CPU and GPU busy via Multi-Device. The setup of Multi-Device can be described in three major steps: -1. Configure each device as usual (using the conventional [ie_api.IECore.set_config](api/ie_python_api/_autosummary/openvino.inference_engine.IECore.html#openvino.inference_engine.IECore.set_config) method -2. Load the network to the Multi-Device plugin created on top of a (prioritized) list of the configured devices. This is the only change needed in the application. -3. As with any other ExecutableNetwork call (resulting from `load_network`), you create as many requests as needed to saturate the devices. +1. Configure each device (using the conventional [configure devices](supported_plugins/config_properties.md) method +2. Compile the model on the Multi-Device plugin created on top of a (prioritized) list of the configured devices. This is the only change needed in the application. +3. As with any other CompiledModel call (resulting from `compile_model`), you create as many requests as needed to saturate the devices. These steps are covered below in detail. @@ -148,41 +210,36 @@ You can set the configuration directly as a string, or use the metric key `MULTI #### The Three Ways to Specify Devices Targets for the MULTI plugin * Option 1 - Pass a Prioritized List as a Parameter in ie.load_network() - ```python - from openvino.inference_engine import IECore - ie = IECore() - # Read a network in IR or ONNX format - net = ie.read_network(model=path_to_model) - exec_net = ie.load_network(network=net, device_name="MULTI:CPU,GPU") - ``` +@sphinxdirective + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_multi.py + :language: python + :fragment: [Option_1] + +@endsphinxdirective * Option 2 - Pass a List as a Parameter, and Dynamically Change Priorities during Execution Notice that the priorities of the devices can be changed in real time for the executable network: - ```python - from openvino.inference_engine import IECore - # Init the Inference Engine Core - ie = IECore() +@sphinxdirective - # Read a network in IR or ONNX format - net = ie.read_network(model=path_to_model) +.. tab:: Python - ie.set_config( config={"MULTI_DEVICE_PRIORITIES":"HDDL,GPU"}, device_name="MULTI") + .. doxygensnippet:: docs/snippets/ov_multi.py + :language: python + :fragment: [Option_2] - # Change priorities - ie.set_config( config={"MULTI_DEVICE_PRIORITIES":"GPU,HDDL"}, device_name="MULTI") - ie.set_config( config={"MULTI_DEVICE_PRIORITIES":"GPU"}, device_name="MULTI") - ie.set_config( config={"MULTI_DEVICE_PRIORITIES":"HDDL,GPU"}, device_name="MULTI") - ie.set_config( config={"MULTI_DEVICE_PRIORITIES":"CPU,HDDL,GPU"}, device_name="MULTI") - ``` +@endsphinxdirective * Option 3 - Use Explicit Hints for Controlling Request Numbers Executed by Devices There is a way to specify the number of requests that Multi-Device will internally keep for each device. If the original app was running 4 cameras with 4 inference requests, it might be best to share these 4 requests between 2 devices used in the MULTI. The easiest way is to specify a number of requests for each device using parentheses: “MULTI:CPU(2),GPU(2)” and use the same 4 requests in the app. However, such an explicit configuration is not performance-portable and not recommended. The better way is to configure the individual devices and query the resulting number of requests to be used at the application level. See [Configuring the Individual Devices and Creating the Multi-Device On Top](#configuring-the-individual-devices-and-creating-the-multi-device-on-top). ### Enumerating Available Devices -The Inference Engine features a dedicated API to enumerate devices and their capabilities. See the [Hello Query Device Python Sample](../../../samples/python/hello_query_device/README.md). This is example output from the sample (truncated to device names only): +The OpenVINO Runtime API features a dedicated methods to enumerate devices and their capabilities. See the [Hello Query Device Python Sample](../../samples/python/hello_query_device/README.md). This is example output from the sample (truncated to device names only): ```sh ./hello_query_device @@ -198,16 +255,15 @@ The Inference Engine features a dedicated API to enumerate devices and their cap A simple programmatic way to enumerate the devices and use with the multi-device is as follows: -```python +@sphinxdirective -from openvino.inference_engine import IECore +.. tab:: Python -all_devices = "MULTI:" -ie = IECore() -net = ie.read_network(model=path_to_model) -all_devices += ",".join(ie.available_devices) -exec_net = ie.load_network(network=net, device_name=all_devices) -``` + .. doxygensnippet:: docs/snippets/ov_multi.py + :language: python + :fragment: [available_devices_1] + +@endsphinxdirective Beyond the trivial "CPU", "GPU", "HDDL" and so on, when multiple instances of a device are available the names are more qualified. For example, this is how two Intel® Movidius™ Myriad™ X sticks are listed with the hello_query_sample: @@ -220,46 +276,29 @@ Beyond the trivial "CPU", "GPU", "HDDL" and so on, when multiple instances of a So the explicit configuration to use both would be "MULTI:MYRIAD.1.2-ma2480,MYRIAD.1.4-ma2480". Accordingly, the code that loops over all available devices of "MYRIAD" type only is below: -```python -from openvino.inference_engine import IECore +@sphinxdirective -ie = IECore() -match_list = [] -all_devices = "MULTI:" -dev_match_str = "MYRIAD" -net = ie.read_network(model=path_to_model) +.. tab:: Python -for d in ie.available_devices: - if dev_match_str in d: - match_list.append(d) + .. doxygensnippet:: docs/snippets/ov_multi.py + :language: python + :fragment: [available_devices_2] -all_devices += ",".join(match_list) -exec_net = ie.load_network(network=net, device_name=all_devices) -``` +@endsphinxdirective ### Configuring the Individual Devices and Creating the Multi-Device On Top It is possible to configure each individual device as usual and then create the "MULTI" device on top: -```python -from openvino.inference_engine import IECore +@sphinxdirective -ie = IECore() -net = ie.read_network(model=path_to_model) +.. tab:: Python -cpu_config = {} -gpu_config = {} + .. doxygensnippet:: docs/snippets/ov_multi.py + :language: python + :fragment: [set_property] -ie.set_config(config=cpu_config, device_name="CPU") -ie.set_config(config=gpu_config, device_name="GPU") - -# Load the network to the multi-device, specifying the priorities -exec_net = ie.load_network( - network=net, device_name="MULTI", config={"MULTI_DEVICE_PRIORITIES": "CPU,GPU"} -) -# Query the optimal number of requests -nireq = exec_net.get_metric("OPTIMAL_NUMBER_OF_INFER_REQUESTS") -``` +@endsphinxdirective An alternative is to combine all the individual device settings into a single config file and load that, allowing the Multi-Device plugin to parse and apply settings to the right devices. See the code example in the next section. @@ -268,7 +307,7 @@ Note that while the performance of accelerators works well with Multi-Device, th ### Using the Multi-Device with OpenVINO Samples and Benchmarking the Performance -Every OpenVINO sample that supports the `-d` (which stands for "device") command-line option transparently accepts Multi-Device. The [Benchmark application](../../../tools/benchmark_tool/README.md) is the best reference for the optimal usage of Multi-Device. As discussed earlier, you do not need to set up the number of requests, CPU streams or threads because the application provides optimal performance out of the box. Below is an example command to evaluate CPU+GPU performance with the Benchmark application: +Every OpenVINO sample that supports the `-d` (which stands for "device") command-line option transparently accepts Multi-Device. The [Benchmark application](../../tools/benchmark_tool/README.md) is the best reference for the optimal usage of Multi-Device. As discussed earlier, you do not need to set up the number of requests, CPU streams or threads because the application provides optimal performance out of the box. Below is an example command to evaluate CPU+GPU performance with the Benchmark application: ```sh ./benchmark_app.py –d MULTI:CPU,GPU –m @@ -276,7 +315,7 @@ Every OpenVINO sample that supports the `-d` (which stands for "device") command > **NOTE**: If you installed OpenVINO with pip, use `benchmark_app -d MULTI:CPU,GPU -m ` -The Multi-Device plugin supports FP16 IR files. The CPU plugin automatically upconverts it to FP32 and the other devices support it natively. Note that no demos are (yet) fully optimized for Multi-Device, by means of supporting the OPTIMAL_NUMBER_OF_INFER_REQUESTS metric, using the GPU streams/throttling, and so on. +The Multi-Device plugin supports FP16 IR files. The CPU plugin automatically upconverts it to FP32 and the other devices support it natively. Note that no demos are (yet) fully optimized for Multi-Device, by means of supporting the ov::optimal_number_of_infer_requests property, using the GPU streams/throttling, and so on. ### Video: MULTI Plugin > **NOTE**: This video is currently available only for C++, but many of the same concepts apply to Python. @@ -289,4 +328,4 @@ The Multi-Device plugin supports FP16 IR files. The CPU plugin automatically upc @endsphinxdirective ### See Also -[Supported Devices](Supported_Devices.md) \ No newline at end of file +[Supported Devices](supported_plugins/Supported_Devices.md) diff --git a/docs/OV_Runtime_UG/nGraphTransformation.md b/docs/OV_Runtime_UG/nGraphTransformation.md deleted file mode 100644 index 39fb3606e5a..00000000000 --- a/docs/OV_Runtime_UG/nGraphTransformation.md +++ /dev/null @@ -1,449 +0,0 @@ -# Overview of Transformations API {#ngraph_transformation} - -This guide contains all necessary information that you need to start implementing nGraph transformations. - -## Prerequisites -Before creating a transformation, do the following: - -* Make sure that there is no transformation with the same functionality in the [Transformation Library](group__ie__transformation__api.html) -* Learn how the [Transformation Library](group__ie__transformation__api.html) is structured and how transformations are organized -* Understand where to put your transformation code - -### Transformation Library Structure -OpenVINO transformations are located in the `src/common/transformations` directory. - -Transformations root directory contains two folders: -* `ngraph_ops` - Contains internal opset operations that are common for plugins. -* `transformations` - Includes all transformations, utils, runtime info attributes, and pass managers. - -All internal operations and transformations located inside the [Transformation Library](group__ie__transformation__api.html) can be used inside plugins. -All legacy operations and transformations were moved to a legacy library and are not recommended to be used. - -### Transformation Flow Layers -Transformation flow in the transformation library has several layers: - -1. Pass managers - Execute any type of transformations and provide additional debug capabilities. -2. Transformations - Perform a particular transformation algorithm on `ngraph::Function`. -3. Low-level functions - Take a set of nodes and perform some transformation action. -They are not mandatory and all transformation code can be located inside the transformation. -But if some transformation parts can potentially be reused in other transformations, we suggest keeping them as separate functions. - -### Location for Your Transformation Code -To decide where to store your transformation code, please follow these rules: - -1. If it is a plugin-specific transformation and cannot be reused by other plugins, keep source code inside plugin. -2. If this transformation relates to opset operation conversion or optimization, keep sources inside the transformation library. - -After you decide where to store your transformation code, you can start developing your own nGraph transformation. - -## ngraph::Function and graph representation - -nGraph function is a very simple thing: it stores shared pointers to `ngraph::op::Parameter`, `ngraph::op::Result` and `ngraph::op::Sink` operations that are inputs, outputs and sinks of the graph. -Sinks of the graph have no consumers and not included into results vector. All other operations hold each other via shared pointers: child operation holds its parent (hard link). If operation has no consumers and it's not Result or Sink operation -(shared pointer counter is zero) then it will be destructed and won't be accessible anymore. Each operation in `ngraph::Function` has a `std::shared_ptr` type. - -For examples of how to build an nGraph function, see the [Build nGraph Function](./model_representation.md) page. - -## Transformations types - -nGraph has three main transformation types: - -* `ngraph::pass::FunctionPass` - straightforward way to work with `ngraph::Function` directly -* `ngraph::pass::MatcherPass` - pattern-based transformation approach -* `ngraph::pass::GraphRewrite` - container for matcher passes needed for efficient execution - -![transformations_structure] - -### ngraph::pass::FunctionPass - -`ngraph::pass::FunctionPass` is used for transformations that take entire `ngraph::Function` as an input and process it. - -Template for FunctionPass transformation class - -@snippet src/transformations/template_function_transformation.hpp function_pass:template_transformation_hpp - -@snippet src/transformations/template_function_transformation.cpp function_pass:template_transformation_cpp - -Using `ngraph::FunctionPass`, you need to override the `run_on_function` method where you will write the transformation code. -Return value is `true` if the original function has changed during transformation (new operation was added, or operations replacement was made, or node attributes were changed); otherwise, it is `false`. -For transformation API, please follow the [working with ngraph::Function](#working_with_ngraph_function) section. -Also `ngraph::FunctionPass` based transformations can be executed via `pass::Manager`. See the examples in the [Using pass manager](#using_pass_manager) section. - -### ngraph::pass::MatcherPass - -`ngraph::pass::MatcherPass` is used for pattern-based transformations. - -Template for MatcherPass transformation class -@snippet src/transformations/template_pattern_transformation.hpp graph_rewrite:template_transformation_hpp - -@snippet src/transformations/template_pattern_transformation.cpp graph_rewrite:template_transformation_cpp - -To use `ngraph::pass::MatcherPass`, you need to complete these steps: -1. Create a pattern -2. Implement a callback -3. Register the pattern and Matcher -4. Execute MatcherPass - -So let's go through each of these steps. - -### Create a pattern -Pattern is a single root `ngraph::Function`. But the only difference is that you do not need to create a function object, you just need to create and connect opset or special pattern operations. -Then you need to take the last created operation and put it as a root of the pattern. This root node will be used as a root node in pattern matching. -> **NOTE**: Any nodes in a pattern that have no consumers and are not registered as root will not be used in pattern matching. - -@snippet example_ngraph_utils.cpp pattern:simple_example - -The `Parameter` operation in the example above has type and shape specified. These attributes are needed only to create Parameter operation class and will not be used in pattern matching. - -For more pattern examples, refer to the [pattern matching](#pattern_matching) section. - -### Implement callback -Callback is an action applied to every pattern entrance. In general, callback is the lambda function that takes Matcher object with detected subgraph. - -@snippet example_ngraph_utils.cpp pattern:callback_example - -The example above shows the callback structure and how Matcher can be used for accessing nodes detected by pattern. -Callback return value is `true` if root node was replaced and another pattern cannot be applied to the same root node; otherwise, it is `false`. -> **NOTE**: It is not recommended to manipulate with nodes that are under root node. This may affect GraphRewrite execution as it is expected that all nodes that come after root node in topological order are valid and can be used in pattern matching. - -MatcherPass also provides functionality that allows reporting of the newly created nodes that can be used in additional pattern matching. -If MatcherPass was registered in `pass::Manager` or `pass::GraphRewrite`, these registered nodes will be added for additional pattern matching. -That means that matcher passes registered in `pass::GraphRewrite` will be applied to these nodes. - -The example below shows how single MatcherPass can fuse sequence of operations using the `register_new_node` method. - -@snippet src/transformations/template_pattern_transformation.cpp matcher_pass:relu_fusion - -> **NOTE**: If you register multiple nodes, please add them in topological order. We do not topologically sort these nodes as it is a time-consuming operation. - -### Register pattern and Matcher -The last step is to register Matcher and callback inside the MatcherPass pass. To do this, call the `register_matcher` method. -> **NOTE**: Only one matcher can be registered for a single MatcherPass class. - -```cpp -// Register matcher and callback -register_matcher(m, callback); -``` -### Execute MatcherPass -MatcherPass has multiple ways to be executed: -* Run on a single node - it can be useful if you want to run MatcherPass inside another transformation. -@snippet src/transformations/template_pattern_transformation.cpp matcher_pass:run_on_node -* Run on `ngraph::Function` using GraphRewrite - this approach gives ability to run MatcherPass on whole `ngraph::Function`. Moreover, multiple MatcherPass transformation can be registered in a single GraphRewite to be executed in a single graph traversal. -@snippet src/transformations/template_pattern_transformation.cpp matcher_pass:graph_rewrite -* Run on `ngraph::Function` using `pass::Manager` - this approach helps you to register MatcherPass for execution on `ngraph::Function` as another transformation types. -@snippet src/transformations/template_pattern_transformation.cpp matcher_pass:manager - - -### ngraph::pass::GraphRewrite - -GraphRewrite pass serves for running multiple matcher passes on `ngraph::Function` in a single graph traversal. -Example: - -@snippet src/transformations/template_pattern_transformation.cpp matcher_pass:graph_rewrite - -In addition, GraphRewrite handles nodes that were registered by MatcherPasses during their execution. This nodes will be added to the beginning of the sequence with nodes for pattern matching. - -> **NOTE**: when using `pass::Manager` temporary GraphRewrite is used to execute single MatcherPass. - -GraphRewrite has two algorithms for MatcherPasses execution. First algorithm is straightforward. It applies each MatcherPass in registration order to current node. - -![graph_rewrite_execution] - -But it is not really efficient when you have a lot of registered passes. So first of all GraphRewrite checks that all MatcherPass patterns has type-based root node (it means that type of this node is not hidden into predicate). -And then creates map from registered MatcherPasses. That helps to avoid additional cost of applying each MatcherPass for each node. - -![graph_rewrite_efficient_search] - -> **NOTE**: GraphRewrite execution algorithm cannot be set manually and depends only on root nodes registered inside MatcherPasses. - -## Pattern Matching - -Sometimes patterns cannot be expressed via regular nGraph operations or it is too complicated. -For example, if you want to detect Convolution->Add sub-graph without specifying particular input type for Convolution operation or you want to create a pattern where some of operations can have different types. -And for these cases nGraph provides additional helpers to construct patterns for GraphRewrite transformations. - -There are two main helpers: -1. `ngraph::pattern::any_input` - helps to express inputs if their types are undefined. -2. `ngraph::pattern::wrap_type` - helps to express nodes of pattern without specifying node attributes. - -Let's go through the example to have better understanding of how it works: - -> **NOTE**: Node attributes do not participate in pattern matching and are needed only for operations creation. Only operation types participate in pattern matching. - -The example below shows basic usage of `pattern::any_input`. -Here we construct Multiply pattern with arbitrary first input and Constant as a second input. -Also as Multiply is commutative operation, it does not matter in which order we set inputs (any_input/Constant or Constant/any_input) because both cases will be matched. - -@snippet example_ngraph_utils.cpp pattern:label_example - -This example shows how we can construct a pattern when operation has arbitrary number of inputs. - -@snippet example_ngraph_utils.cpp pattern:concat_example - -This example shows how to use predicate to construct a pattern. Also it shows how to match pattern manually on given node. - -@snippet example_ngraph_utils.cpp pattern:predicate_example - -> **NOTE**: Be careful with manual matching because Matcher object holds matched nodes. To clear a match, use the m->clear_state() method. - -## Working with ngraph::Function - -In this chapter we will review nGraph API that allows us to manipulate with `ngraph::Function`. - -### ngraph::Node input and output ports - -First of all let's talk about `ngraph::Node` input/output ports. Each nGraph operation has input and output ports except cases when operation has `Result`, `Parameter`, or `Constant` type. - -Every port belongs to its node, so using a port we can access parent node, get shape and type for particular input/output, get all consumers in case of output port, and get producer node in case of input port. -With output port we can set inputs for newly created operations. - -Lets look at the code example. - -@snippet example_ngraph_utils.cpp ngraph:ports_example - -You may notice that we usually construct operations in this way: -```cpp -std::shared_ptr neg_const = opset1::Constant::create(sub->get_input_element_type(1), Shape{1}, {-1})); -Output data = node->input_value(0); -auto neg = std::make_shared(data, neg_const); -``` -In this example, the `opset3::Multiply` operation takes `Output` and `std::shared_ptr` as inputs. But the constructor takes both as `Output`. -In this case, `std::shared_ptr` will be automatically converted to `Output` if node has exactly one output port; otherwise, conversion raises an exception. - -### ngraph::Node replacement - -nGraph provides two ways for node replacement: via nGraph helper function and directly via port methods. We are going to review both of them. - -Let's start with nGraph helper functions. The most popular function is `ngraph::replace_node(old_node, new_node)`. - -We will review real replacement case where Negative operation is replaced with Multiply. - -![ngraph_replace_node] - -@snippet example_ngraph_utils.cpp ngraph:replace_node - -`ngraph::replace_node` has a constraint that number of output ports for both of ops must be the same; otherwise, it raises an exception. - - -The alternative way to do the same replacement is the following: -```cpp -// All neg->output(0) consumers will be moved to mul->output(0) port -neg->output(0).replace(mul->output(0)); -``` - -Another transformation example is insertion. - -![ngraph_insert_node] - -@snippet example_ngraph_utils.cpp ngraph:insert_node - -The alternative way to the insert operation is to make a node copy and use `replace_node`: - -@snippet example_ngraph_utils.cpp ngraph:insert_node_with_copy - -### ngraph::Node elimination - -Another type of node replacement is its elimination. - -To eliminate operation, nGraph has special method that considers all limitations related to InferenceEngine. - -@snippet example_ngraph_utils.cpp ngraph:eliminate_node - -`replace_output_update_name` in case of successful replacement it automatically preserves friendly name and runtime info. - - -## Transformation conditional compilation - -Transformation library has two internal macros to support conditional compilation feature. - -* `MATCHER_SCOPE(region)` - allows to disable the MatcherPass if matcher isn't used. The region name should be unique. This macro creates a local variable `matcher_name` which you should use as a matcher name. -* `RUN_ON_FUNCTION_SCOPE(region)` - allows to disable run_on_function pass if it isn't used. The region name should be unique. - -## Transformation writing essentials - -When developing a transformation, you need to follow these transformation rules: - -###1. Operation Set (OpSet) - -Use the latest version of OpSet in your transformation. An exception is op_conversion transformations, where different opsets can be used. - -@snippet example_ngraph_utils.cpp ov:include - -###2. Dynamic Shape and Rank - -nGraph has two types for shape representation: -`ngraph::Shape` - represents static shape. -`ngraph::PartialShape` - represents dynamic shape. It means that rank or some of dimensions are dynamic (undefined). -`ngraph::PartialShape` can be converted to `ngraph::Shape` using the `get_shape()` method if all dimensions are static; otherwise, conversion raises an exception. - -@snippet example_ngraph_utils.cpp ngraph:shape - -But in most cases before getting static shape using `get_shape()` method, you need to check that shape is static. - -Also if your transformation requires only input shape rank or particular dimension value, please do not use the `get_shape()` method. See the example below demonstrating how to avoid using `get_shape()` - -@snippet example_ngraph_utils.cpp ngraph:shape_check - -Not using `get_shape()` method makes your transformation more flexible and applicable for more cases. - -###3. Friendly Names - -Each `ngraph::Node` has a unique name (used for nGraph internals) and a friendly name. In transformations we care only about friendly name because it represents the name from intermediate representation (IR). -Also friendly name is used as output tensor name (until we do not have other way to represent output tensor name) and user code that requests intermediate outputs based on these names. -To avoid losing friendly name when replacing node with other node or subgraph, set the original friendly name to the latest node in replacing subgraph. See the example below. - -```cpp -// Replace Div operation with Power and Multiply sub-graph and set original friendly name to Multiply operation -auto pow = std::make_shared(div->input(1).get_source_output(), - op::Constant::create(div->get_input_element_type(1), Shape{1}, {-1})); -auto mul = std::make_shared(div->input(0).get_source_output(), pow); -mul->set_friendly_name(div->get_friendly_name()); -ngraph::replace_node(div, mul); -``` - -In more advanced cases, when replaced operation has several outputs and we add additional consumers to its outputs, we make a decision how to set friendly name by arrangement. - -###4. Runtime Info - -Runtime info is a map `std::map` located inside `ngraph::Node` class. It represents additional attributes in `ngraph::Node`. -These attributes can be set by users or by plugins and when executing transformation that changes `ngraph::Function` we need to preserve these attributes as they will not be automatically propagated. -In most cases, transformations have the following types: 1:1 (replace node with another node), 1:N (replace node with a sub-graph), N:1 (fuse sub-graph into a single node), N:M (any other transformation). -Currently, there is no mechanism that automatically detects transformation types, so we need to propagate this runtime information manually. See the examples below. - -```cpp -// Replace Transpose with Reshape operation (1:1) -ngraph::copy_runtime_info(transpose, reshape); -``` - -```cpp -// Replace Div operation with Power and Multiply sub-graph (1:N) -ngraph::copy_runtime_info(div, {pow, mul}); -``` - -```cpp -// Fuse Convolution with Add operation (N:1) -ngraph::copy_runtime_info({conv, bias}, {conv_ie}); -``` - -```cpp -// Any other transformation that replaces one sub-graph with another sub-graph (N:M) -ngraph::copy_runtime_info({a, b, c}, {e, f}); -``` - -When transformation has multiple fusions or decompositions, `ngraph::copy_runtime_info` must be called multiple times for each case. - -> **Note**: copy_runtime_info removes rt_info from destination nodes. If you want to keep it, you need to specify them in source nodes like this: copy_runtime_info({a, b, c}, {a, b}) - -###5. Constant Folding - -If your transformation inserts constant sub-graphs that need to be folded, do not forget to use `ngraph::pass::ConstantFolding()` after your transformation or call constant folding directly for operation. -The example below shows how constant subgraph can be constructed. - -```cpp -// After ConstantFolding pass Power will be replaced with Constant -auto pow = std::make_shared( - opset3::Constant::create(element::f32, Shape{1}, {2}) - opset3::Constant::create(element::f32, Shape{1}, {3})); -auto mul = std::make_shared(input /* not constant input */, pow); -``` - -Manual constant folding is more preferable than `ngraph::pass::ConstantFolding()` because it is much faster. - -Below you can find an example of manual constant folding: - -@snippet src/transformations/template_pattern_transformation.cpp manual_constant_folding - -## Common mistakes in transformations - -In transformation development process: - -* Do not use deprecated nGraph API. Deprecated methods has the `NGRAPH_DEPRECATED` macros in its definition. -* Do not pass `shared_ptr` as an input for other node if type of node is unknown or it has multiple outputs. Use explicit output port. -* If you replace node with another node that produces different shape, remember that new shape will not be propagated until the first `validate_nodes_and_infer_types` call for `ngraph::Function`. If you are using `pass::Manager`, it will automatically call this method after each transformation execution. -* Do not forget to call the `ngraph::ConstantFolding` pass if your transformation creates constant subgraphs. -* Use latest OpSet if you are not developing downgrade transformation pass. -* When developing a callback for `ngraph::pass::MatcherPass`, do not change nodes that come after the root node in topological order. - -## Using pass manager - -`ngraph::pass::Manager` is a container class that can store the list of transformations and execute them. The main idea of this class is to have high-level representation for grouped list of transformations. -It can register and apply any [transformation types](#transformations_types) on function. -In addition, `ngraph::pass::Manager` has extended debug capabilities (find more information in the [how to debug transformations](#how_to_debug_transformations) section). - -The example below shows basic usage of `ngraph::pass::Manager` - -@snippet src/transformations/template_pattern_transformation.cpp matcher_pass:manager3 - -Another example shows how multiple matcher passes can be united into single GraphRewrite. - -@snippet src/transformations/template_pattern_transformation.cpp matcher_pass:manager2 - -> **NOTE**: nGraph used to have the `pass::PassConfig` class for transformation pipeline manipulation. -This mechanism is now obsolete and the `pass::PassConfig` class will be removed in future release. - -## How to debug transformations - -The most popular tool for transformations debugging is the `ngraph::pass::VisualizeTree` transformation, which visualizes ngraph::Function. - -Usage example: - -@snippet example_ngraph_utils.cpp ov:visualize - -`ngraph::pass::VisualizeTree` can be parametrized via environment variables: - -``` -OV_VISUALIZE_TREE_OUTPUT_SHAPES=1 - visualize shapes -OV_VISUALIZE_TREE_OUTPUT_TYPES=1 - visualize types -OV_VISUALIZE_TREE_MIN_MAX_DENORMAL=1 - pretty denormal values -OV_VISUALIZE_TREE_RUNTIME_INFO=1 - print runtime information -OV_VISUALIZE_TREE_IO=1 - print I/O ports -OV_VISUALIZE_TREE_MEMBERS_NAME=1 - print member names -``` - -> **Note**: current VisualTree does not have user-friendly interface and it will be changed in the nearest future. The intention is to move visualization abilities inside transformations. - -If you are using `ngraph::pass::Manager` to run sequence of transformations, you can get additional debug capabilities by using the following environment variables: - -``` -OV_PROFILE_PASS_ENABLE=1 - enables performance measurement for each transformation and prints execution status -OV_ENABLE_VISUALIZE_TRACING=1 - enables visualization after each transformation. By default, it saves dot and svg files. -``` - -> **Note**: Make sure that you have dot installed on your machine; otherwise, it will silently save only dot file without svg file. - -## Disabling/Enabling specific transformations for plugin X - -In transformation library, we provide plugins transformations like CommonOptimizations, which contains predefined sequence of transformations. -We also provide a tool that helps to disable or partially disable particular transformations in a transformation pipeline. -For example, if a plugin uses the CommonOptimization transformation and needs to disable the ConvertGELU transformation, then inside the plugin we have to take the PassConfig instance -from pass::Manger and call disable method. - -@snippet example_ngraph_utils.cpp ngraph:disable_gelu - -In some cases, we need to disable transformation for some condition: - -@snippet example_ngraph_utils.cpp ngraph:disable_callback - -In some cases, pass::Manager pipelines inside transformations may have transformations disabled by default but enabled inside plugins. - -@snippet example_ngraph_utils.cpp ngraph:disabled_by_default - -PassConfig instance taken from pass::Manager is shared across all registered transformations including nested transformations. So it does not matter where we work with this object (before passes registration or after). - -## Transformations testing - -If you are developing new transformation inside plugin, you need to add test into the `template_plugin/tests/functional/transformations` folder. -We have two types of tests: nGraph reader tests located in `src/tests/functional/inference_engine/ngraph_reader` and transformation tests located in `src/tests/functional/inference_engine/transformations` -Reader tests are IR based and test end-to-end conversion from IR to CNNNetwork. Transformation tests test single ngraph transformations or low-level functions that are used inside transformations. - -The basic transformation test looks like this: - -@snippet tests/functional/transformations/template_transformations_test.cpp transformation:test - - -[ngraph_replace_node]: ./img/ngraph_replace_node.png -[ngraph_insert_node]: ./img/ngraph_insert_node.png -[transformations_structure]: ./img/transformations_structure.png -[register_new_node]: ./img/register_new_node.png -[graph_rewrite_execution]: ./img/graph_rewrite_execution.png -[graph_rewrite_efficient_search]: ./img/graph_rewrite_efficient_search.png diff --git a/docs/OV_Runtime_UG/network_state_intro.md b/docs/OV_Runtime_UG/network_state_intro.md index 2a04dd05dc9..1721f6145e6 100644 --- a/docs/OV_Runtime_UG/network_state_intro.md +++ b/docs/OV_Runtime_UG/network_state_intro.md @@ -1,4 +1,4 @@ -Introduction to OpenVINO state API {#openvino_docs_IE_DG_network_state_intro} +Stateful models {#openvino_docs_IE_DG_network_state_intro} ============================== This section describes how to work with stateful networks in OpenVINO toolkit, specifically: @@ -15,7 +15,7 @@ The section additionally provides small examples of stateful network and code to between data portions should be addressed. For that, networks save some data between inferences - state. When one dependent sequence is over, state should be reset to initial value and new sequence can be started. - Several frameworks have special API for states in networks. For example, Keras have special option for RNNs `stateful` that turns on saving state + Several frameworks have special API for states in networks. For example, Keras has special option for RNNs `stateful` that turns on saving state between inferences. Kaldi contains special specifier `Offset` to define time offset in a network. OpenVINO also contains special API to simplify work with networks with states. State is automatically saved between inferences, @@ -196,9 +196,7 @@ sink from `ngraph::Function` after deleting the node from graph with the `delete Let's take an IR from the previous section example. The example below demonstrates inference of two independent sequences of data. State should be reset between these sequences. -One infer request and one thread -will be used in this example. Using several threads is possible if you have several independent sequences. Then each sequence can be processed in its own infer -request. Inference of one sequence in several infer requests is not recommended. In one infer request state will be saved automatically between inferences, but +One infer request and one thread will be used in this example. Using several threads is possible if you have several independent sequences. Then each sequence can be processed in its own infer request. Inference of one sequence in several infer requests is not recommended. In one infer request state will be saved automatically between inferences, but if the first step is done in one infer request and the second in another, state should be set in new infer request manually (using `IVariableState::SetState` method). @snippet openvino/docs/snippets/InferenceEngine_network_with_state_infer.cpp part1 @@ -213,7 +211,7 @@ Decsriptions can be found in [Samples Overview](./Samples_Overview.md) If the original framework does not have a special API for working with states, after importing the model, OpenVINO representation will not contain Assign/ReadValue layers. For example, if the original ONNX model contains RNN operations, IR will contain TensorIterator operations and the values will be obtained only after execution of the whole TensorIterator primitive. Intermediate values from each iteration will not be available. To enable you to work with these intermediate values of each iteration and receive them with a low latency after each infer request, special LowLatency and LowLatency2 transformations were introduced. -### How to get TensorIterator/Loop operaions from different frameworks via ModelOptimizer. +### How to get TensorIterator/Loop operations from different frameworks via ModelOptimizer. **ONNX and frameworks supported via ONNX format:** *LSTM, RNN, GRU* original layers are converted to the TensorIterator operation. TensorIterator body contains LSTM/RNN/GRU Cell. Peepholes, InputForget modifications are not supported, sequence_lengths optional input is supported. *ONNX Loop* layer is converted to the OpenVINO Loop operation. @@ -245,7 +243,7 @@ After applying the transformation, ReadValue operations can receive other operat 1. Get CNNNetwork. Either way is acceptable: - * [from IR or ONNX model](./Integrate_with_customer_application_new_API.md) + * [from IR or ONNX model](./integrate_with_your_application.md) * [from ov::Model](../OV_Runtime_UG/model_representation.md) 2. Change the number of iterations inside TensorIterator/Loop nodes in the network using the [Reshape](ShapeInference.md) feature. @@ -349,7 +347,7 @@ After applying the transformation, ReadValue operations can receive other operat 1. Get CNNNetwork. Either way is acceptable: - * [from IR or ONNX model](./Integrate_with_customer_application_new_API.md) + * [from IR or ONNX model](./integrate_with_your_application.md) * [from ov::Model](../OV_Runtime_UG/model_representation.md) 2. [Reshape](ShapeInference.md) the CNNNetwork network if necessary. **Necessary case:** where the sequence_lengths dimension of input > 1, it means TensorIterator layer will have number_iterations > 1. We should reshape the inputs of the network to set sequence_dimension to exactly 1. diff --git a/docs/OV_Runtime_UG/openvino_intro.md b/docs/OV_Runtime_UG/openvino_intro.md new file mode 100644 index 00000000000..e5864a5f9d6 --- /dev/null +++ b/docs/OV_Runtime_UG/openvino_intro.md @@ -0,0 +1,51 @@ +# OpenVINO™ Runtime User Guide {#openvino_docs_OV_Runtime_User_Guide} + +@sphinxdirective + +.. _deep learning inference engine: + +.. toctree:: + :maxdepth: 1 + :hidden: + + openvino_docs_Integrate_OV_with_your_application + openvino_docs_IE_DG_ShapeInference + openvino_docs_OV_UG_Working_with_devices + openvino_docs_OV_Runtime_UG_Preprocessing_Overview + openvino_docs_OV_UG_DynamicShapes + openvino_docs_IE_DG_supported_plugins_AUTO + openvino_docs_OV_UG_Running_on_multiple_devices + openvino_docs_OV_UG_Hetero_execution + openvino_docs_OV_UG_Performance_Hints + openvino_docs_OV_UG_Automatic_Batching + openvino_docs_IE_DG_network_state_intro + openvino_docs_OV_Runtime_UG_Python_API_exclusives + openvino_2_0_transition_guide + +@endsphinxdirective + +## Introduction +OpenVINO Runtime is a set of C++ libraries with C and Python bindings providing a common API to deliver inference solutions on the platform of your choice. Use the OpenVINO Runtime API to read an Intermediate Representation (IR), ONNX, or PaddlePaddle model and execute it on preferred devices. + +OpenVINO Runtime uses a plugin architecture. Its plugins are software components that contain complete implementation for inference on a particular Intel® hardware device: CPU, GPU, VPU, etc. Each plugin implements the unified API and provides additional hardware-specific APIs, for configuring devices, or API interoperability between OpenVINO Runtime and underlying plugin backend. + +The scheme below illustrates the typical workflow for deploying a trained deep learning model: + + +![](img/BASIC_FLOW_IE_C.svg) + + +## Video + +@sphinxdirective + +.. list-table:: + + * - .. raw:: html + + + * - **Inference Engine Concept**. Duration: 3:43 + +@endsphinxdirective diff --git a/docs/OV_Runtime_UG/ov_dynamic_shapes.md b/docs/OV_Runtime_UG/ov_dynamic_shapes.md new file mode 100644 index 00000000000..0208c2fc974 --- /dev/null +++ b/docs/OV_Runtime_UG/ov_dynamic_shapes.md @@ -0,0 +1,216 @@ +# Dynamic Shapes {#openvino_docs_OV_UG_DynamicShapes} + +@sphinxdirective + +.. toctree:: + :maxdepth: 1 + :hidden: + + openvino_docs_OV_UG_NoDynamicShapes + +@endsphinxdirective + +As it was demonstrated in the [Changing Input Shapes](ShapeInference.md) article, there are models that support changing of input shapes before model compilation in `Core::compile_model`. +Reshaping models provides an ability to customize the model input shape for exactly that size that is required in the end application. +This article explains how the ability of model to reshape can further be leveraged in more dynamic scenarios. + +## When to Apply Dynamic Shapes + +Conventional "static" model reshaping works well when it can be done once per many model inference calls with the same shape. +However, this approach doesn't perform efficiently if the input tensor shape is changed on every inference call: calling `reshape()` and `compile_model()` each time when a new size comes is extremely time-consuming. +A popular example would be an inference of natural language processing models (like BERT) with arbitrarily-sized input sequences that come from the user. +In this case, the sequence length cannot be predicted and may change every time you need to call inference. +Below, such dimensions that can be frequently changed are called *dynamic dimensions*. +When real shape of input is not known at `compile_model` time, that's the case when dynamic shapes should be considered. + +Here are several examples of dimensions that can be naturally dynamic: + - Sequence length dimension for various sequence processing models, like BERT + - Spatial dimensions in segmentation and style transfer models + - Batch dimension + - Arbitrary number of detections in object detection models output + +There are various tricks to address input dynamic dimensions through combining multiple pre-reshaped models and input data padding. +The tricks are sensitive to model internals, do not always give optimal performance and cumbersome. +Short overview of the methods you can find [here](ov_without_dynamic_shapes.md). +Apply those methods only if native dynamic shape API described in the following sections doesn't work for you or doesn't give desired performance. + +The decision about using dynamic shapes should be based on proper benchmarking of real application with real data. +That's because unlike statically shaped models, inference of dynamically shaped ones takes different inference time depending on input data shape or input tensor content. + +## Dynamic Shapes without Tricks + +This section describes how to handle dynamically shaped models natively with OpenVINO Runtime API version 2022.1 and higher. +There are three main parts in the flow that differ from static shapes: + - configure the model + - prepare data for inference + - read resulting data after inference + +### Configure the Model + +To avoid the tricks mentioned in the previous section there is a way to directly specify one or multiple dimensions in the model inputs to be dynamic. +This is achieved with the same reshape method that is used for alternating static shape of inputs. +Dynamic dimensions are specified as `-1` or `ov::Dimension()` instead of a positive number used for static dimensions: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_dynamic_shapes.cpp + :language: cpp + :fragment: [ov_dynamic_shapes:reshape_undefined] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_dynamic_shapes.py + :language: python + :fragment: [reshape_undefined] + +@endsphinxdirective + +To simplify the code, the examples assume that the model has a single input and single output. +However, there are no limitations on the number of inputs and outputs to apply dynamic shapes. + +### Undefined Dimensions "Out Of the Box" + +Dynamic dimensions may appear in the input model without calling reshape. +Many DL frameworks support undefined dimensions. +If such a model is converted with Model Optimizer or read directly by Core::read_model, undefined dimensions are preserved. +Such dimensions automatically treated as dynamic ones. +So you don't need to call reshape if undefined dimensions are already configured in the original model or in the IR file. + +If the input model has undefined dimensions that you are not going to change during the inference, you can set them to static values, using the same `reshape` method of the model. +From the API perspective any combination of dynamic and static dimensions can be configured. + +Model Optimizer provides capability to reshape the model during the conversion, including specifying dynamic dimensions. +Use this capability to save time on calling `reshape` method in the end application. +To get information about setting input shapes using Model Optimizer, refer to [Setting Input Shapes](../MO_DG/prepare_model/convert_model/Converting_Model.md) + +### Dimension Bounds + +Besides marking a dimension just dynamic, you can also specify lower and/or upper bounds that define a range of allowed values for the dimension. +Bounds are coded as arguments for `ov::Dimension`: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_dynamic_shapes.cpp + :language: cpp + :fragment: [ov_dynamic_shapes:reshape_bounds] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_dynamic_shapes.py + :language: python + :fragment: [reshape_bounds] + +@endsphinxdirective + +Information about bounds gives opportunity for the inference plugin to apply additional optimizations. +Using dynamic shapes assumes the plugins apply more loose optimization technique during model compilation +It may require more time/memory for model compilation and inference. +So providing any additional information like bounds can be beneficial. +For the same reason it is not recommended to leave dimensions as undefined without the real need. + +When specifying bounds, the lower bound is not so important as upper bound, because knowing of upper bound allows inference devices to more precisely allocate memory for intermediate tensors for inference and use lesser number of tuned kernels for different sizes. +Precisely speaking benefits of specifying lower or upper bound is device dependent. +Depending on the plugin specifying upper bounds can be required. +. +If users known lower and upper bounds for dimension it is recommended to specify them even when plugin can execute model without the bounds. + +### Setting Input Tensors + +Preparing model with the reshape method was the first step. +The second step is passing a tensor with an appropriate shape to infer request. +This is similar to [regular steps](integrate_with_your_application.md), but now we can pass tensors with different shapes for the same executable model and even for the same inference request: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_dynamic_shapes.cpp + :language: cpp + :fragment: [ov_dynamic_shapes:set_input_tensor] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_dynamic_shapes.py + :language: python + :fragment: [set_input_tensor] + +@endsphinxdirective + +In the example above `set_input_tensor` is used to specify input tensors. +The real dimensions of the tensor is always static, because it is a concrete tensor and it doesn't have any dimension variations in contrast to model inputs. + +Similar to static shapes, `get_input_tensor` can be used instead of `set_input_tensor`. +In contrast to static input shapes, when using `get_input_tensor` for dynamic inputs, `set_shape` method for the returned tensor should be called to define the shape and allocate memory. +Without doing that, the tensor returned by `get_input_tensor` is an empty tensor, it's shape is not initialized and memory is not allocated, because infer request doesn't have information about real shape you are going to feed. +Setting shape for input tensor is required when the corresponding input has at least one dynamic dimension regardless of bounds information. +The following example makes the same sequence of two infer request as the previous example but using `get_input_tensor` instead of `set_input_tensor`: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_dynamic_shapes.cpp + :language: cpp + :fragment: [ov_dynamic_shapes:get_input_tensor] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_dynamic_shapes.py + :language: python + :fragment: [get_input_tensor] + +@endsphinxdirective + +### Dynamic Shapes in Outputs + +Examples above handle correctly case when dynamic dimensions in output may be implied by propagating of dynamic dimension from the inputs. +For example, batch dimension in input shape is usually propagated through the whole model and appears in the output shape. +The same is true for other dimensions, like sequence length for NLP models or spatial dimensions for segmentation models, that are propagated through the entire network. + +Whether or not output has dynamic dimensions can be examined by querying output partial shape after model read or reshape. +The same is applicable for inputs. For example: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_dynamic_shapes.cpp + :language: cpp + :fragment: [ov_dynamic_shapes:print_dynamic] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_dynamic_shapes.py + :language: python + :fragment: [print_dynamic] + +@endsphinxdirective + +Appearing `?` or ranges like `1..10` means there are dynamic dimensions in corresponding inputs or outputs. + +Or more programmatically: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_dynamic_shapes.cpp + :language: cpp + :fragment: [ov_dynamic_shapes:detect_dynamic] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_dynamic_shapes.py + :language: python + :fragment: [detect_dynamic] + +@endsphinxdirective + +If at least one dynamic dimension exists in output of the model, shape of the corresponding output tensor will be set as the result of inference call. +Before the first inference, memory for such a tensor is not allocated and has shape `[0]`. +If user call `set_output_tensor` with pre-allocated tensor, the inference will call `set_shape` internally, and the initial shape is replaced by the really calculated shape. +So setting shape for output tensors in this case is useful only if you want to pre-allocate enough memory for output tensor, because `Tensor`'s `set_shape` method will re-allocate memory only if new shape requires more storage. diff --git a/docs/OV_Runtime_UG/ov_infer_request.md b/docs/OV_Runtime_UG/ov_infer_request.md new file mode 100644 index 00000000000..6b93392661b --- /dev/null +++ b/docs/OV_Runtime_UG/ov_infer_request.md @@ -0,0 +1,279 @@ +# OpenVINO™ Inference Request {#openvino_docs_OV_Runtime_UG_Infer_request} + +OpenVINO™ Runtime uses Infer Request mechanism which allows to run models on different devices in asynchronous or synchronous manners. +`ov::InferRequest` class is used for this purpose inside the OpenVINO™ Runtime. +This class allows to set and get data for model inputs, outputs and run inference for the model. + +## Creating Infer Request + +`ov::InferRequest` can be created from the `ov::CompiledModel`: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_infer_request.cpp + :language: cpp + :fragment: [create_infer_request] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_infer_request.py + :language: python + :fragment: [create_infer_request] + +@endsphinxdirective + +## Run inference + +`ov::InferRequest` supports synchronous and asynchronous modes for inference. + +### Synchronous mode + +You can use `ov::InferRequest::infer`, which blocks the application execution, to infer model in the synchronous mode: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_infer_request.cpp + :language: cpp + :fragment: [sync_infer] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_infer_request.py + :language: python + :fragment: [sync_infer] + +@endsphinxdirective + +### Asynchronous mode + +Asynchronous mode can improve application's overall frame-rate, because rather than wait for inference to complete, the app can keep working on the host, while the accelerator is busy. You can use `ov::InferRequest::start_async` to infer model in the asynchronous mode: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_infer_request.cpp + :language: cpp + :fragment: [async_infer] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_infer_request.py + :language: python + :fragment: [async_infer] + +@endsphinxdirective + +Asynchronous mode supports two ways the application waits for inference results: + * `ov::InferRequest::wait_for` - specifies the maximum duration in milliseconds to block the method. The method is blocked until the specified time has passed, or the result becomes available, whichever comes first. + @sphinxdirective + + .. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_infer_request.cpp + :language: cpp + :fragment: [wait_for] + + .. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_infer_request.py + :language: python + :fragment: [wait_for] + + @endsphinxdirective + * `ov::InferRequest::wait` - waits until inference result becomes available + @sphinxdirective + + .. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_infer_request.cpp + :language: cpp + :fragment: [wait] + + .. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_infer_request.py + :language: python + :fragment: [wait] + + @endsphinxdirective + +Both methods are thread-safe. + +When you are running several inference requests in parallel, a device can process them simultaneously, with no garauntees on the completion order. This may complicate a possible logic based on the `ov::InferRequest::wait` (unless your code needs to wait for the _all_ requests). For multi-request scenarios, consider using the `ov::InferRequest::set_callback` method to set a callback which is called upon completion of the request: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_infer_request.cpp + :language: cpp + :fragment: [set_callback] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_infer_request.py + :language: python + :fragment: [set_callback] + +@endsphinxdirective + +> **NOTE**: Use weak reference of infer_request (`ov::InferRequest*`, `ov::InferRequest&`, `std::weal_ptr`, etc.) in the callback. It is necessary to avoid cyclic references. +For more details, check [Classification Sample Async](../../samples/cpp/classification_sample_async/README.md). + +You can use the `ov::InferRequest::cancel` method if you want to abort execution of the current inference request: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_infer_request.cpp + :language: cpp + :fragment: [cancel] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_infer_request.py + :language: python + :fragment: [cancel] + +@endsphinxdirective + +## Working with Input and Output tensors + +`ov::InferRequest` allows to get input/output tensors by tensor name, index, port and without any arguments in case if model has only one input or output. + + * `ov::InferRequest::get_input_tensor`, `ov::InferRequest::set_input_tensor`, `ov::InferRequest::get_output_tensor`, `ov::InferRequest::set_output_tensor` methods without arguments can be used to get or set input/output tensor for model with only one input/output: + @sphinxdirective + + .. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_infer_request.cpp + :language: cpp + :fragment: [get_set_one_tensor] + + .. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_infer_request.py + :language: python + :fragment: [get_set_one_tensor] + + @endsphinxdirective + + * `ov::InferRequest::get_input_tensor`, `ov::InferRequest::set_input_tensor`, `ov::InferRequest::get_output_tensor`, `ov::InferRequest::set_output_tensor` methods with argument can be used to get or set input/output tensor by input/output index: + @sphinxdirective + + .. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_infer_request.cpp + :language: cpp + :fragment: [get_set_index_tensor] + + .. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_infer_request.py + :language: python + :fragment: [get_set_index_tensor] + + @endsphinxdirective + + * `ov::InferRequest::get_tensor`, `ov::InferRequest::set_tensor` methods can be used to get or set input/output tensor by tensor name: + @sphinxdirective + + .. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_infer_request.cpp + :language: cpp + :fragment: [get_set_tensor] + + .. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_infer_request.py + :language: python + :fragment: [get_set_tensor] + + @endsphinxdirective + + * `ov::InferRequest::get_tensor`, `ov::InferRequest::set_tensor` methods can be used to get or set input/output tensor by port: + @sphinxdirective + + .. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_infer_request.cpp + :language: cpp + :fragment: [get_set_tensor_by_port] + + .. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_infer_request.py + :language: python + :fragment: [get_set_tensor_by_port] + + @endsphinxdirective + +## Examples of InferRequest usages + +### Cascade of models + +`ov::InferRequest` can be used to organize cascade of models. You need to have infer requests for each model. +In this case you can get output tensor from the first request using `ov::InferRequest::get_tensor` and set it as input for the second request using `ov::InferRequest::set_tensor`. But be careful, shared tensors across compiled models can be rewritten by the first model if the first infer request is run once again, while the second model has not started yet. + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_infer_request.cpp + :language: cpp + :fragment: [cascade_models] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_infer_request.py + :language: python + :fragment: [cascade_models] + +@endsphinxdirective + +### Using of ROI tensors + +It is possible to re-use shared input by several models. You do not need to allocate separate input tensor for a model if it processes a ROI object located inside of already allocated input of a previous model. For instance, when the first model detects objects in a video frame (stored as input tensor) and the second model accepts detected bounding boxes (ROI inside of the frame) as input. In this case, it is allowed to re-use pre-allocated input tensor (used by the first model) by the second model and just crop ROI without allocation of new memory using `ov::Tensor` with passing of `ov::Tensor` and `ov::Coordinate` as parameters. + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_infer_request.cpp + :language: cpp + :fragment: [roi_tensor] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_infer_request.py + :language: python + :fragment: [roi_tensor] + +@endsphinxdirective + +### Using of remote tensors + +You can create a remote tensor to work with remote device memory. `ov::RemoteContext` allows to create remote tensor. + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_infer_request.cpp + :language: cpp + :fragment: [remote_tensor] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_infer_request.py + :language: python + :fragment: [remote_tensor] + +@endsphinxdirective diff --git a/docs/OV_Runtime_UG/ov_without_dynamic_shapes.md b/docs/OV_Runtime_UG/ov_without_dynamic_shapes.md new file mode 100644 index 00000000000..8e07d1b7821 --- /dev/null +++ b/docs/OV_Runtime_UG/ov_without_dynamic_shapes.md @@ -0,0 +1,44 @@ +# When Dynamic Shapes API is Not Applicable {#openvino_docs_OV_UG_NoDynamicShapes} + +Several approaches to emulate dynamic shapes are considered in this chapter +Apply these methods only if [native dynamic shape API](ov_dynamic_shapes.md) doesn't work for you or doesn't give desired performance. + +## Padding + +The model can be designed in a way that supports partially filled tensors. +For the BERT model you can use a special input to the model to mask unused elements out. +So, the model can be reshaped for some predefined big sequence length once and compiled once, and then the input tensors are used only partially with mask specifying valid tokens. +This approach is called *padding*. + +However, padding is not applicable to every model and every use case. +You should be aware of model internals to apply padding. Otherwise, if the model is not designed to handle dummy element gracefully in padding area, +then the results of inference may be totally scrambled, +or accuracy is significantly affected. +Model can even crash during inference. + +Besides the bad developer experience, +the main disadvantage of padding is a bad performance due to spending time for processing dummy elements in the padding area, +even if the model is properly designed to be used with padding. +It turns out that usually such models are designed in a way where calculations in the padded area still happen not affecting the end result. + +## Multiple Precompiled Models + +Another approach to handle arbitrary sized inputs is to pre-compile several models reshaped for different input shapes. +This method works well if the number of different shapes is small enough to afford increased time for multiple reshapes and compilations +as well as increased amount of consumed memory. +As this method cannot be scaled well it is used in combination with the padding: +model with the most suitable input shape among pre-reshaped models is chosen. +It gives smaller pad area in comparison to a single model. + +## Dimension Partitioning + +Another practical but still a complicated approach is when the input tensor can be divided into multiple chunks along the dynamic dimension. +For example, if we have a batch of independent inputs as a single tensor. +If arbitrary division along batch dimension is possible - and for batch dimension it should be possible by the dimension purpose - +you can run multiple inferences using the approach with several pre-compiled models choosing sized to have the minimal number of inferences +having a particular batch size in the input tensor. + +For example, if there are models pre-compiled for batch sizes 1, 2, 4 and 8, +the input tensor with batch 5 can be processed with two inference calls with batch size 1 and 4. +(Here it's assumed the batch processing is required for performance reasons, otherwise you can just loop over images in a batch, +and process image by image with a single compiled model.) diff --git a/docs/OV_Runtime_UG/performance_hints.md b/docs/OV_Runtime_UG/performance_hints.md new file mode 100644 index 00000000000..5e81921854b --- /dev/null +++ b/docs/OV_Runtime_UG/performance_hints.md @@ -0,0 +1,138 @@ +# High-level Performance Hints {#openvino_docs_OV_UG_Performance_Hints} + +Each of the OpenVINO's [supported devices](supported_plugins/Supported_Devices.md) offers low-level performance settings. Tweaking this detailed configuration requires deep architecture understanding. +Also, while the performance may be optimal for the specific combination of the device and the inferred model, the resulting configuration is not necessarily optimal for another device or model. +The OpenVINO performance hints are the new way to configure the performance with the _portability_ in mind. + +The hints also "reverse" the direction of the configuration in the right fashion: rather than map the application needs to the low-level performance settings, and keep an associated application logic to configure each possible device separately, the idea is to express a target scenario with a single config key and let the *device* to configure itself in response. +As the hints are supported by every OpenVINO device, this is completely portable and future-proof solution. + +Previously, certain level of automatic configuration was coming from the _default_ values of the parameters. For example, number of the CPU streams was deduced from the number of CPU cores, when the `ov::streams::AUTO` (`CPU_THROUGHPUT_AUTO` in the pre-OpenVINO 2.0 parlance) is set. However, the resulting number of streams didn't account for actual compute requirements of the model to be inferred. +The hints, in contrast, respect the actual model, so the parameters for the optimal throughput are calculated for each model individually (based on it's compute versus memory bandwidth requirements and capabilities of the device). + +## Performance Hints: Latency and Throughput +As discussed in the [Optimization Guide](../optimization_guide/dldt_optimization_guide.md) there are few different metrics associated with the inference speed. +Throughput and latency are some of the most critical factors that influence the overall performance of an application. + +This is why, to ease the configuration of the device, the OpenVINO already offers two dedicated hints, namely `ov::hint::PerformanceMode::THROUGHPUT` and `ov::hint::PerformanceMode::LATENCY`. +Every OpenVINO device supports these, which makes the things portable and future-proof. +The also allows to do a performance configuration that is fully compatible with the [automatic device selection](./auto_device_selection.md). +A special `ov::hint::PerformanceMode::UNDEFINED` acts same just as specifying no hint. + +Please also see the last section in the document on conducting the performance measurements with the `benchmark_app`. + +Notice that if there are other performance factors (other than inference time) like memory footprint and model load/compilation time are of concern, a typical model may take significantly more time to load with `ov::hint::PerformanceMode::THROUGHPUT` and then consume much more memory, compared to the `ov::hint::PerformanceMode::LATENCY`. + +## Performance Hints: How It Works? +Internally, every device "translates" the value of the hint to the actual performance settings. +For example the `ov::hint::PerformanceMode::THROUGHPUT` selects number of CPU or GPU streams. +For the GPU, additionally the optimal batch size is selected and the [automatic batching](../OV_Runtime_UG/automatic_batching.md) is applied whenever possible (and also if the device supports that [refer to the devices/features support matrix](./supported_plugins/Device_Plugins.md)). + +The resulting (device-specific) settings can be queried back from the instance of the `ov:Compiled_Model`. +Notice that the `benchmark_app`, outputs the actual settings for the THROUGHPUT hint, please the bottom of the output example: + + ``` + $benchmark_app -hint tput -d CPU -m 'path to your favorite model' + ... + [Step 8/11] Setting optimal runtime parameters + [ INFO ] Device: CPU + [ INFO ] { PERFORMANCE_HINT , THROUGHPUT } + ... + [ INFO ] { OPTIMAL_NUMBER_OF_INFER_REQUESTS , 4 } + [ INFO ] { NUM_STREAMS , 4 } + ... + ``` + +## Using the Performance Hints: Basic API +In the example code-snippet below the `ov::hint::PerformanceMode::THROUGHPUT` is specified for the `ov::hint::performance_mode` property for the compile_model: +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_auto_batching.cpp + :language: cpp + :fragment: [compile_model] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_auto_batching.py + :language: python + :fragment: [compile_model] + +@endsphinxdirective + +## Additional (Optional) Hints from the App +Let's take an example of an application that processes 4 video streams. The most future-proof way to communicate the limitation of the parallel slack is to equip the performance hint with the optional `ov::hint::num_requests` configuration key set to 4. +As discussed previosly, for the GPU this will limit the batch size, for the CPU - the number of inference streams, so each device uses the `ov::hint::num_requests` while converting the hint to the actual device configuration options: +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_auto_batching.cpp + :language: cpp + :fragment: [hint_num_requests] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_auto_batching.py + :language: python + :fragment: [hint_num_requests] + +@endsphinxdirective + +## Optimal Number of Inference Requests +Using the hints assumes that the application queries the `ov::optimal_number_of_infer_requests` to create and run the returned number of requests simultaneously: +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_auto_batching.cpp + :language: cpp + :fragment: [query_optimal_num_requests] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_auto_batching.py + :language: python + :fragment: [query_optimal_num_requests] + +@endsphinxdirective + +While an application is free to create more requests if needed (for example to support asynchronous inputs population) **it is very important to at least run the `ov::optimal_number_of_infer_requests` of the inference requests in parallel**, for efficiency (device utilization) reasons. + +Also, notice that `ov::hint::PerformanceMode::LATENCY` does not necessarily imply using single inference request. For example, multi-socket CPUs can deliver as high number of requests (at the same minimal latency) as there are NUMA nodes the machine features. +To make your application fully scalable, prefer to query the `ov::optimal_number_of_infer_requests` directly. + +## Prefer Async API +The API of the inference requests offers Sync and Async execution. While the `ov::InferRequest::infer()` is inherently synchronous and simple to operate (as it serializes the execution flow in the current application thread), the Async "splits" the `infer()` into `ov::InferRequest::start_async()` and use of the `ov::InferRequest::wait()` (or callbacks). Please consider the [API examples](../OV_Runtime_UG/ov_infer_request.md). + Although the Synchronous API can be somewhat easier to start with, in the production code always prefer to use the Asynchronous (callbacks-based) API, as it is the most general and scalable way to implement the flow control for any possible number of requests (and hence both latency and throughput scenarios). + +## Combining the Hints and Individual Low-Level Settings +While sacrificing the portability at a some extent, it is possible to combine the hints with individual device-specific settings. +For example, you can let the device prepare a configuration `ov::hint::PerformanceMode::THROUGHPUT` while overriding any specific value: +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_auto_batching.cpp + :language: cpp + :fragment: [hint_plus_low_level] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_auto_batching.py + :language: python + :fragment: [hint_plus_low_level] + + +@endsphinxdirective +## Testing the Performance of The Hints with the Benchmark_App +The `benchmark_app`, that exists in both [C++](../../samples/cpp/benchmark_app/README.md) and [Python](../../tools/benchmark_tool/README.md) versions, is the best way to evaluate the performance of the performance hints for a particular device: + - benchmark_app **-hint tput** -d 'device' -m 'path to your model' + - benchmark_app **-hint latency** -d 'device' -m 'path to your model' +- Disabling the hints to emulate the pre-hints era (highly recommended before trying the individual low-level settings, such as the number of streams as below, threads, etc): +- - benchmark_app **-hint none -nstreams 1** -d 'device' -m 'path to your model' + + +### See Also +[Supported Devices](./supported_plugins/Supported_Devices.md) \ No newline at end of file diff --git a/docs/OV_Runtime_UG/preprocessing_details.md b/docs/OV_Runtime_UG/preprocessing_details.md new file mode 100644 index 00000000000..b7fa4e97161 --- /dev/null +++ b/docs/OV_Runtime_UG/preprocessing_details.md @@ -0,0 +1,346 @@ +# Preprocessing API - details {#openvino_docs_OV_Runtime_UG_Preprocessing_Details} + +## Preprocessing capabilities + +### Addressing particular input/output + +If your model has only one input, then simple ov::preprocess::PrePostProcessor::input() will get a reference to preprocessing builder for this input (tensor, steps, model): + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:input_1] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:input_1] + +@endsphinxdirective + +In general, when model has multiple inputs/outputs, each one can be addressed by tensor name + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:input_name] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:input_name] + +@endsphinxdirective + + +Or by it's index + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:input_index] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:input_index] + +@endsphinxdirective + +C++ references: + * ov::preprocess::InputTensorInfo + * ov::preprocess::OutputTensorInfo + * ov::preprocess::PrePostProcessor + + +### Supported preprocessing operations + +C++ references: +* ov::preprocess::PreProcessSteps + +#### Mean/Scale normalization + +Typical data normalization includes 2 operations for each data item: subtract mean value and divide to standard deviation. This can be done with the following code: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:mean_scale] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:mean_scale] + +@endsphinxdirective + + +In Computer Vision area normalization is usually done separately for R, G, B values. To do this, [layout with 'C' dimension](./layout_overview.md) shall be defined. Example: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:mean_scale_array] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:mean_scale_array] + +@endsphinxdirective + +C++ references: +* ov::preprocess::PreProcessSteps::mean() +* ov::preprocess::PreProcessSteps::scale() + + +#### Convert precision + +In Computer Vision, image is represented by array of unsigned 8-but integer values (for each color), but model accepts floating point tensors + +To integrate precision conversion into execution graph as a preprocessing step, just do: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:convert_element_type] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:convert_element_type] + +@endsphinxdirective + +C++ references: + * ov::preprocess::InputTensorInfo::set_element_type() + * ov::preprocess::PreProcessSteps::convert_element_type() + + +#### Convert layout (transpose) + +Transposing of matrices/tensors is a typical operation in Deep Learning - you may have a BMP image 640x480 which is an array of `{480, 640, 3}` elements, but Deep Learning model can require input with shape `{1, 3, 480, 640}` + +Using [layout](./layout_overview.md) of user's tensor and layout of original model conversion can be done implicitly + +@sphinxdirective +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:convert_layout] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:convert_layout] + +@endsphinxdirective + + +Or if you prefer manual transpose of axes without usage of [layout](./layout_overview.md) in your code, just do: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:convert_layout_2] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:convert_layout_2] + +@endsphinxdirective + +It performs the same transpose, but we believe that approach using source and destination layout can be easier to read and understand + +C++ references: + * ov::preprocess::PreProcessSteps::convert_layout() + * ov::preprocess::InputTensorInfo::set_layout() + * ov::preprocess::InputModelInfo::set_layout() + * ov::Layout + +#### Resize image + +Resizing of image is a typical preprocessing step for computer vision tasks. With preprocessing API this step can also be integrated into execution graph and performed on target device. + +To resize the input image, it is needed to define `H` and `W` dimensions of [layout](./layout_overview.md) + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:resize_1] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:resize_1] + +@endsphinxdirective + +Or in case if original model has known spatial dimensions (widht+height), target width/height can be omitted + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:resize_2] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:resize_2] + +@endsphinxdirective + +C++ references: +* ov::preprocess::PreProcessSteps::resize() +* ov::preprocess::ResizeAlgorithm + + +#### Color conversion + +Typical use case is to reverse color channels from RGB to BGR and wise versa. To do this, specify source color format in `tensor` section and perform `convert_color` preprocessing operation. In example below, user has `BGR` image and needs to convert it to `RGB` as required for model's input + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:convert_color_1] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:convert_color_1] + +@endsphinxdirective + +#### Color conversion - NV12/I420 +Preprocessing also support YUV-family source color formats, i.e. NV12 and I420. +In advanced cases such YUV images can be splitted into separate planes, e.g. for NV12 images Y-component may come from one source and UV-component comes from another source. Concatenating such components in user's application manually is not a perfect solution from performance and device utilization perspectives, so there is a way to use Preprocessing API. For such cases there is `NV12_TWO_PLANES` and `I420_THREE_PLANES` source color formats, which will split original `input` to 2 or 3 inputs + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:convert_color_2] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:convert_color_2] + +@endsphinxdirective + +In this example, original `input` is being split to `input/y` and `input/uv` inputs. You can fill `input/y` from one source, and `input/uv` from another source. Color conversion to `RGB` will be performed using these sources, it is more optimal as there will be no additional copies of NV12 buffers. + +C++ references: +* ov::preprocess::ColorFormat +* ov::preprocess::PreProcessSteps::convert_color + + +### Custom operations + +Preprocessing API also allows adding custom preprocessing steps into execution graph. Custom step is a function which accepts current 'input' node and returns new node after adding preprocessing step + +> **Note:** Custom preprocessing function shall only insert node(s) after input, it will be done during model compilation. This function will NOT be called during execution phase. This may look not trivial and require some knowledge of [OpenVINO™ operations](../ops/opset.md) + +If there is a need to insert some additional operations to execution graph right after input, like some specific crops and/or resizes - Preprocessing API can be a good choice to implement this + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:custom] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:custom] + +@endsphinxdirective + +C++ references: +* ov::preprocess::PreProcessSteps::custom() +* [Available Operations Sets](../ops/opset.md) + +## Postprocessing + +Postprocessing steps can be added to model outputs. As for preprocessing, these steps will be also integrated into graph and executed on selected device. + +Preprocessing uses flow **User tensor** -> **Steps** -> **Model input** + +Postprocessing is wise versa: **Model output** -> **Steps** -> **User tensor** + +Comparing to preprocessing, there is not so much operations needed to do in post-processing stage, so right now only following postprocessing operations are supported: + - Convert [layout](./layout_overview.md) + - Convert element type + - Custom operations + +Usage of these operations is similar to Preprocessing. Some example is shown below: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:postprocess] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:postprocess] + +@endsphinxdirective + +C++ references: +* ov::preprocess::PostProcessSteps +* ov::preprocess::OutputModelInfo +* ov::preprocess::OutputTensorInfo diff --git a/docs/OV_Runtime_UG/preprocessing_overview.md b/docs/OV_Runtime_UG/preprocessing_overview.md new file mode 100644 index 00000000000..4abb9ec1ca2 --- /dev/null +++ b/docs/OV_Runtime_UG/preprocessing_overview.md @@ -0,0 +1,170 @@ +# Overview of Preprocessing API {#openvino_docs_OV_Runtime_UG_Preprocessing_Overview} + +@sphinxdirective + +.. toctree:: + :maxdepth: 1 + :hidden: + + openvino_docs_OV_Runtime_UG_Preprocessing_Details + openvino_docs_OV_Runtime_UG_Layout_Overview + openvino_docs_OV_Runtime_UG_Preprocess_Usecase_save + +@endsphinxdirective + +## Introduction + +When your input data don't perfectly fit to Neural Network model input tensor - this means that additional operations/steps are needed to transform your data to format expected by model. These operations are known as "preprocessing". + +### Example +Consider the following standard example: deep learning model expects input with shape `{1, 3, 224, 224}`, `FP32` precision, `RGB` color channels order, and requires data normalization (subtract mean and divide by scale factor). But you have just a `640x480` `BGR` image (data is `{480, 640, 3}`). This means that we need some operations which will: + - Convert U8 buffer to FP32 + - Transform to `planar` format: from `{1, 480, 640, 3}` to `{1, 3, 480, 640}` + - Resize image from 640x480 to 224x224 + - Make `BGR->RGB` conversion as model expects `RGB` + - For each pixel, subtract mean values and divide by scale factor + + +![](img/preprocess_not_fit.png) + + +Even though all these steps can be relatively easy implemented manually in application's code before actual inference, it is possible to do it with Preprocessing API. Reasons to use this API are: + - Preprocessing API is easy to use + - Preprocessing steps will be integrated into execution graph and will be performed on selected device (CPU/GPU/VPU/etc.) rather than always being executed on CPU. This will improve selected device utilization which is always good. + +## Preprocessing API + +Intuitively, Preprocessing API consists of the following parts: + 1. **Tensor:** Declare user's data format, like shape, [layout](./layout_overview.md), precision, color format of actual user's data + 2. **Steps:** Describe sequence of preprocessing steps which need to be applied to user's data + 3. **Model:** Specify Model's data format. Usually, precision and shape are already known for model, only additional information, like [layout](./layout_overview.md) can be specified + +> **Note:** All model's graph modification shall be performed after model is read from disk and **before** it is being loaded on actual device. + +### PrePostProcessor object + +`ov::preprocess::PrePostProcessor` class allows specifying preprocessing and postprocessing steps for model read from disk. + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:create] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:create] + +@endsphinxdirective + +### Declare user's data format + +To address particular input of model/preprocessor, use `ov::preprocess::PrePostProcessor::input(input_name)` method + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:tensor] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:tensor] + +@endsphinxdirective + + +Here we've specified all information about user's input: + - Precision is U8 (unsigned 8-bit integer) + - Data represents tensor with {1,480,640,3} shape + - [Layout](./layout_overview.md) is "NHWC". It means that 'height=480, width=640, channels=3' + - Color format is `BGR` + +### Declare model's layout + +Model's input already has information about precision and shape. Preprocessing API is not intended to modify this. The only thing that may be specified is input's data [layout](./layout_overview.md) + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:model] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:model] + +@endsphinxdirective + + +Now, if model's input has `{1,3,224,224}` shape, preprocessing will be able to identify that model's `height=224`, `width=224`, `channels=3`. Height/width information is necessary for 'resize', and `channels` is needed for mean/scale normalization + +### Preprocessing steps + +Now we can define sequence of preprocessing steps: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:steps] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:steps] + +@endsphinxdirective + +Here: + - Convert U8 to FP32 precision + - Convert current color format (BGR) to RGB + - Resize to model's height/width. **Note** that if model accepts dynamic size, e.g. {?, 3, ?, ?}, `resize` will not know how to resize the picture, so in this case you should specify target height/width on this step. See also ov::preprocess::PreProcessSteps::resize() + - Subtract mean from each channel. On this step, color format is RGB already, so `100.5` will be subtracted from each Red component, and `101.5` will be subtracted from `Blue` one. + - Divide each pixel data to appropriate scale value. In this example each `Red` component will be divided by 50, `Green` by 51, `Blue` by 52 respectively + - **Note:** last `convert_layout` step is commented out as it is not necessary to specify last layout conversion. PrePostProcessor will do such conversion automatically + +### Integrate steps into model + +We've finished with preprocessing steps declaration, now it is time to build it. For debugging purposes it is possible to print `PrePostProcessor` configuration on screen: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:build] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:build] + +@endsphinxdirective + + +After this, `model` will accept U8 input with `{1, 480, 640, 3}` shape, with `BGR` channels order. All conversion steps will be integrated into execution graph. Now you can load model on device and pass your image to model as is, without any data manipulation on application's side + + +## See Also + +* [Preprocessing Details](./preprocessing_details.md) +* [Layout API overview](./layout_overview.md) +* ov::preprocess::PrePostProcessor C++ class documentation diff --git a/docs/OV_Runtime_UG/preprocessing_usecase_save.md b/docs/OV_Runtime_UG/preprocessing_usecase_save.md new file mode 100644 index 00000000000..b0f5c023cd3 --- /dev/null +++ b/docs/OV_Runtime_UG/preprocessing_usecase_save.md @@ -0,0 +1,83 @@ +# Use Case - Integrate and Save Preprocessing Steps Into IR {#openvino_docs_OV_Runtime_UG_Preprocess_Usecase_save} + +## Introduction + +In previous sections we've covered how to add [preprocessing steps](./preprocessing_details.md) and got the overview of [Layout](./layout_overview.md) API. + +For many applications it is also important to minimize model's read/load time, so performing integration of preprocessing steps every time on application startup after `ov::runtime::Core::read_model` may look not convenient. In such cases, after adding of Pre- and Post-processing steps it can be useful to store new execution model to Intermediate Representation (IR, .xml format). + +Most part of existing preprocessing steps can also be performed via command line options using Model Optimizer tool. Refer to [Model Optimizer - Optimize Preprocessing Computation](../MO_DG/prepare_model/Additional_Optimizations.md) for details os such command line options. + +## Code example - saving model with preprocessing to IR + +In case if you have some preprocessing steps which can't be integrated into execution graph using Model Optimizer command line options (e.g. `YUV->RGB` color space conversion, Resize, etc.) it is possible to write simple code which: + - Reads original model (IR, ONNX, Paddle) + - Adds preprocessing/postprocessing steps + - Saves resulting model as IR (.xml/.bin) + +Let's consider the example, there is an original `ONNX` model which takes one `float32` input with shape `{1, 3, 224, 224}` with `RGB` channels order, with mean/scale values applied. User's application can provide `BGR` image buffer with not fixed size. Additionally, we'll also imagine that our application provides input images as batches, each batch contains 2 images. Here is how model conversion code may look like in your model preparation script + +- Includes / Imports +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:save_headers] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:save_headers] + +@endsphinxdirective + +- Preprocessing & Saving to IR code +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:save] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:save] + +@endsphinxdirective + + +## Application code - load model to target device + +After this, your application's code can load saved file and don't perform preprocessing anymore. In this example we'll also enable [model caching](./Model_caching_overview.md) to minimize load time when cached model is available + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_preprocessing.cpp + :language: cpp + :fragment: [ov:preprocess:save_load] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_preprocessing.py + :language: python + :fragment: [ov:preprocess:save_load] + +@endsphinxdirective + + +## See Also +* [Preprocessing Details](./preprocessing_details.md) +* [Layout API overview](./layout_overview.md) +* [Model Optimizer - Optimize Preprocessing Computation](../MO_DG/prepare_model/Additional_Optimizations.md) +* [Model Caching Overview](./Model_caching_overview.md) +* ov::preprocess::PrePostProcessor C++ class documentation +* ov::pass::Serialize - pass to serialize model to XML/BIN +* ov::set_batch - update batch dimension for a given model \ No newline at end of file diff --git a/docs/OV_Runtime_UG/protecting_model_guide.md b/docs/OV_Runtime_UG/protecting_model_guide.md index e710eeeb668..222bdb90ffc 100644 --- a/docs/OV_Runtime_UG/protecting_model_guide.md +++ b/docs/OV_Runtime_UG/protecting_model_guide.md @@ -16,22 +16,22 @@ This guide demonstrates how to use OpenVINO securely with protected models. After a model is optimized by the OpenVINO Model Optimizer, it's deployed to target devices in the Intermediate Representation (IR) format. An optimized -model is stored on an edge device and executed by the Inference Engine. -(ONNX and nGraph models can also be read natively by the Inference Engine.) +model is stored on an edge device and executed by the OpenVINO Runtime. +(ONNX, PDPD models can also be read natively by the OpenVINO Runtime.) To protect deep-learning models, you can encrypt an optimized model before deploying it to the edge device. The edge device should keep the stored model protected at all times and have the model decrypted **in runtime only** for use -by the Inference Engine. +by the OpenVINO Runtime. ![deploy_encrypted_model](img/deploy_encrypted_model.png) ## Loading Encrypted Models -The OpenVINO Inference Engine requires model decryption before loading. Allocate +The OpenVINO Runtime requires model decryption before loading. Allocate a temporary memory block for model decryption and use the -`InferenceEngine::Core::ReadNetwork` method to load the model from a memory buffer. -For more information, see the `InferenceEngine::Core` Class Reference Documentation. +`ov::Core::read_model` method to load the model from a memory buffer. +For more information, see the `ov::Core` Class Reference Documentation. @snippet snippets/protecting_model_guide.cpp part0 @@ -40,12 +40,12 @@ Hardware-based protection such as Intel® Software Guard Extensions bind them to a device. For more information, go to [Intel® Software Guard Extensions](https://software.intel.com/en-us/sgx). -Use `InferenceEngine::Core::ReadNetwork()` to set model representations and +Use `ov::Core::read_model` to set model representations and weights respectively. Currently there is no way to read external weights from memory for ONNX models. -The `ReadNetwork(const std::string& model, const Blob::CPtr& weights)` function -should be called with `weights` passed as an empty `Blob`. +The `ov::Core::read_model(const std::string& model, const Tensor& weights)` method +should be called with `weights` passed as an empty `ov::Tensor`. @snippet snippets/protecting_model_guide.cpp part1 @@ -54,7 +54,7 @@ should be called with `weights` passed as an empty `Blob`. - Intel® Distribution of OpenVINO™ toolkit home page: [https://software.intel.com/en-us/openvino-toolkit](https://software.intel.com/en-us/openvino-toolkit) - OpenVINO™ toolkit online documentation: [https://docs.openvino.ai](https://docs.openvino.ai) - Model Optimizer Developer Guide: [Model Optimizer Developer Guide](../MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) -- [OpenVINO™ runTime User Guide](OpenVINO_Runtime_User_Guide.md) -- For more information on Sample Applications, see the [Inference Engine Samples Overview](Samples_Overview.md) +- [OpenVINO™ runTime User Guide](openvino_intro.md) +- For more information on Sample Applications, see the [OpenVINO Samples Overview](Samples_Overview.md) - For information on a set of pre-trained models, see the [Overview of OpenVINO™ Toolkit Pre-Trained Models](@ref omz_models_group_intel) - For IoT Libraries and Code Samples see the [Intel® IoT Developer Kit](https://github.com/intel-iot-devkit). diff --git a/docs/OV_Runtime_UG/supported_plugins/ARM_CPU.md b/docs/OV_Runtime_UG/supported_plugins/ARM_CPU.md new file mode 100644 index 00000000000..f10bd3c2076 --- /dev/null +++ b/docs/OV_Runtime_UG/supported_plugins/ARM_CPU.md @@ -0,0 +1,91 @@ +# Arm® CPU device {#openvino_docs_OV_UG_supported_plugins_ARM_CPU} + + +## Introducing the Arm® CPU Plugin +The Arm® CPU plugin is developed in order to enable deep neural networks inference on Arm® CPU, using [Compute Library](https://github.com/ARM-software/ComputeLibrary) as a backend. + +> **NOTE**: Note that this is a community-level add-on to OpenVINO™. Intel® welcomes community participation in the OpenVINO™ ecosystem and technical questions on community forums as well as code contributions are welcome. However, this component has not undergone full release validation or qualification from Intel®, and no official support is offered. + +The Arm® CPU plugin is not a part of the Intel® Distribution of OpenVINO™ toolkit and is not distributed in pre-built form. To use the plugin, it should be built from source code. Plugin build procedure is described on page [How to build Arm® CPU plugin](https://github.com/openvinotoolkit/openvino_contrib/wiki/How-to-build-ARM-CPU-plugin). + +The set of supported layers is defined on [Operation set specification](https://github.com/openvinotoolkit/openvino_contrib/wiki/ARM-plugin-operation-set-specification). + + +## Supported inference data types +The Arm® CPU plugin supports the following data types as inference precision of internal primitives: + +- Floating-point data types: + - f32 + - f16 +- Quantized data types: + - i8 + + +> **NOTE**: i8 support is experimental. + +[Hello Query Device C++ Sample](../../../samples/cpp/hello_query_device/README.md) can be used to print out supported data types for all detected devices. + +## Supported features + +### Preprocessing acceleration +The Arm® CPU plugin supports the following accelerated preprocessing operations: +- Precision conversion: + - u8 -> u16, s16, s32 + - u16 -> u8, u32 + - s16 -> u8, s32 + - f16 -> f32 +- Transposion of tensors with dims < 5 +- Interpolation of 4D tensors with no padding (`pads_begin` and `pads_end` equal 0). + +The Arm® CPU plugin supports the following preprocessing operations, however they are not accelerated: +- Precision conversion that are not mentioned above +- Color conversion: + - NV12 to RGB + - NV12 to BGR + - i420 to RGB + - i420 to BGR + +See [preprocessing API guide](../preprocessing_overview.md) for more details. + +## Supported properties +The plugin supports the properties listed below. + +### Read-write properties +All parameters must be set before calling `ov::Core::compile_model()` in order to take effect or passed as additional argument to `ov::Core::compile_model()` + +- ov::enable_profiling + +### Read-only properties +- ov::supported_properties +- ov::available_devices +- ov::range_for_async_infer_requests +- ov::range_for_streams +- ov::device::full_name +- ov::device::capabilities + + +## Known Layers Limitation +* `AvgPool` layer is supported via arm_compute library for 4D input tensor and via reference implementation for another cases. +* `BatchToSpace` layer is supported 4D tensors only and constant nodes: `block_shape` with `N` = 1 and `C`= 1, `crops_begin` with zero values and `crops_end` with zero values. +* `ConvertLike` layer is supported configuration like `Convert`. +* `DepthToSpace` layer is supported 4D tensors only and for `BLOCKS_FIRST` of `mode` attribute. +* `Equal` does not support `broadcast` for inputs. +* `Gather` layer is supported constant scalar or 1D indices axes only. Layer is supported as via arm_compute library for non negative indices and via reference implementation otherwise. +* `Less` does not support `broadcast` for inputs. +* `LessEqual` does not support `broadcast` for inputs. +* `LRN` layer is supported `axes = {1}` or `axes = {2, 3}` only. +* `MaxPool-1` layer is supported via arm_compute library for 4D input tensor and via reference implementation for another cases. +* `Mod` layer is supported for f32 only. +* `MVN` layer is supported via arm_compute library for 2D inputs and `false` value of `normalize_variance` and `false` value of `across_channels`, for another cases layer is implemented via runtime reference. +* `Normalize` layer is supported via arm_compute library with `MAX` value of `eps_mode` and `axes = {2 | 3}`, and for `ADD` value of `eps_mode` layer uses `DecomposeNormalizeL2Add`, for another cases layer is implemented via runtime reference. +* `NotEqual` does not support `broadcast` for inputs. +* `Pad` layer works with `pad_mode = {REFLECT | CONSTANT | SYMMETRIC}` parameters only. +* `Round` layer is supported via arm_compute library with `RoundMode::HALF_AWAY_FROM_ZERO` value of `mode`, for another cases layer is implemented via runtime reference. +* `SpaceToBatch` layer is supported 4D tensors only and constant nodes: `shapes`, `pads_begin` or `pads_end` with zero paddings for batch or channels and one values `shapes` for batch and channels. +* `SpaceToDepth` layer is supported 4D tensors only and for `BLOCKS_FIRST` of `mode` attribute. +* `StridedSlice` layer is supported via arm_compute library for tensors with dims < 5 and zero values of `ellipsis_mask` or zero values of `new_axis_mask` and `shrink_axis_mask`, for another cases layer is implemented via runtime reference. +* `FakeQuantize` layer is supported via arm_compute library in Low Precision evaluation mode for suitable models and via runtime reference otherwise. + +## See Also +* [How to run YOLOv4 model inference using OpenVINO™ and OpenCV on Arm®](https://opencv.org/how-to-run-yolov4-using-openvino-and-opencv-on-arm/) +* [Face recognition on Android™ using OpenVINO™ toolkit with Arm® plugin](https://opencv.org/face-recognition-on-android-using-openvino-toolkit-with-arm-plugin/) diff --git a/docs/OV_Runtime_UG/supported_plugins/AUTO.md b/docs/OV_Runtime_UG/supported_plugins/AUTO.md deleted file mode 100644 index 4d692abb3ab..00000000000 --- a/docs/OV_Runtime_UG/supported_plugins/AUTO.md +++ /dev/null @@ -1,332 +0,0 @@ -# Auto-Device Plugin {#openvino_docs_IE_DG_supported_plugins_AUTO} - -## Auto-Device Plugin Execution (C++) - -@sphinxdirective -.. raw:: html - -
C++
-@endsphinxdirective - -The AUTO device is a new, special "virtual" or "proxy" device in the OpenVINO™ toolkit. - -Use "AUTO" as the device name to delegate selection of an actual accelerator to OpenVINO. The Auto-device plugin internally recognizes and selects devices from among CPU, integrated GPU and discrete Intel GPUs (when available) depending on the device capabilities and the characteristics of CNN models (for example, precision). Then the Auto-device assigns inference requests to the selected device. - -From the application's point of view, this is just another device that handles all accelerators in the full system. - -With the 2021.4 release, Auto-device setup is done in three major steps: -1. Configure each device as usual (for example, via the conventional `SetConfig()` method) -2. Load a network to the Auto-device plugin. This is the only change needed in your application. -3. As with any other executable network resulting from `LoadNetwork()`, create as many requests as needed to saturate the devices. - -These steps are covered below in detail. - -### Defining and Configuring the Auto-Device Plugin -Following the OpenVINO convention for devices names, the Auto-device uses the label "AUTO". The only configuration option for Auto-device is a limited device list: - -| Parameter name | Parameter values | Default | Description | -| :--- | :--- | :--- |:-----------------------------------------------------------------------------| -| "MULTI_DEVICE_PRIORITIES" | comma-separated device names with no spaces| N/A | Device candidate list to be selected | - -You can use the configuration name directly as a string or use `InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES` from `multi-device/multi_device_config.hpp`, which defines the same string. - -There are two ways to use Auto-device: -1. Directly indicate device by "AUTO" or an empty string: -@snippet snippets/AUTO0.cpp part0 - -2. Use the Auto-device configuration: -@snippet snippets/AUTO1.cpp part1 - -Both methods allow limiting the list of device candidates for the AUTO plugin. - -> **NOTE**: The Inference Engine lets you use "GPU" as an alias for "GPU.0" in function calls. - -The Auto-device plugin supports query device optimization capabilities in metric. - -| Parameter name | Parameter values | -| :--- | :--- | -| "OPTIMIZATION_CAPABILITIES" | Auto-Device capabilities | - -### Enumerating Devices and Selection Logic - -The Inference Engine now features a dedicated API to enumerate devices and their capabilities. -See [Hello Query Device C++ Sample](../../../samples/cpp/hello_query_device/README.md). -This is the example output from the sample (truncated to device names only): - -```sh -./hello_query_device -Available devices: - Device: CPU -... - Device: GPU.0 -... - Device: GPU.1 -``` - -### Default Auto-Device Selection Logic - -With the 2021.4 release, the Auto-Device selects the most suitable device using the following default logic: - -1. Check if dGPU (discrete), iGPU (integrated) and CPU devices are available -2. Get the precision of the input model, such as FP32 -3. According to the priority of dGPU, iGPU, and CPU (in this order), if the device supports the precision of the input network, select it as the most suitable device - -For example, CPU, dGPU and iGPU can support the following precision and optimization capabilities: - -| Device | OPTIMIZATION_CAPABILITIES | -| :--- | :--- | -| CPU | WINOGRAD FP32 FP16 INT8 BIN | -| dGPU | FP32 BIN BATCHED_BLOB FP16 INT8 | -| iGPU | FP32 BIN BATCHED_BLOB FP16 INT8 | - -* When the application uses the Auto-device to run FP16 IR on a system with CPU, dGPU and iGPU, Auto-device will offload this workload to dGPU. -* When the application uses the Auto-device to run FP16 IR on a system with CPU and iGPU, Auto-device will offload this workload to iGPU. -* When the application uses the Auto-device to run WINOGRAD-enabled IR on a system with CPU, dGPU and iGPU, Auto-device will offload this workload to CPU. - -In cases when loading the network to dGPU or iGPU fails, CPU is the fall-back choice. - -According to the Auto-device selection logic from the previous section, tell the Inference Engine -to use the most suitable device from available devices as follows: - -@snippet snippets/AUTO2.cpp part2 - -You can also use the Auto-device plugin to choose a device from a limited choice of devices, in this example CPU and GPU: - -@snippet snippets/AUTO3.cpp part3 - -### Configuring the Individual Devices and Creating the Auto-Device on Top - -It is possible to configure each individual device as usual and create the "AUTO" device on top: - -@snippet snippets/AUTO4.cpp part4 - -Alternatively, you can combine all the individual device settings into single config file and load it, allowing the Auto-device plugin to parse and apply it to the right devices. See the code example here: - -@snippet snippets/AUTO5.cpp part5 - -### Using the Auto-Device with OpenVINO Samples and Benchmark App - -Note that every OpenVINO sample or application that supports the "-d" (which stands for "device") command-line option transparently accepts the Auto-device. The Benchmark Application is the best example of the optimal usage of the Auto-device. You do not need to set the number of requests and CPU threads, as the application provides optimal out-of-the-box performance. Below is the example command-line to evaluate AUTO performance with that: - -@sphinxdirective -.. tab:: Package, Docker, open-source installation - - .. code-block:: sh - - ./benchmark_app.py –d AUTO –m - -.. tab:: pip installation - - .. code-block:: sh - - benchmark_app –d AUTO –m - -@endsphinxdirective - - -You can also use the auto-device with limit device choice: - -@sphinxdirective -.. tab:: Package, Docker, open-source installation - - .. code-block:: sh - - ./benchmark_app.py –d AUTO:CPU,GPU –m - -.. tab:: pip installation - - .. code-block:: sh - - benchmark_app –d AUTO:CPU,GPU –m - -@endsphinxdirective - -**NOTES:** -* The default CPU stream is 1 if using `-d AUTO`. -* You can use the FP16 IR to work with Auto-device. -* No demos are fully optimized for Auto-device yet to select the most suitable device, -use GPU streams/throttling, and so on. - -## Auto-Device Plugin Execution (Python) - -@sphinxdirective -.. raw:: html - -
Python
-@endsphinxdirective - -The AUTO device is a new, special "virtual" or "proxy" device in the OpenVINO™ toolkit. - -Use "AUTO" as the device name to delegate selection of an actual accelerator to OpenVINO. The Auto-device plugin internally recognizes and selects devices from among CPU, integrated GPU and discrete Intel GPUs (when available) depending on the device capabilities and the characteristics of CNN models (for example, precision). Then the Auto-device assigns inference requests to the selected device. - -From the application's point of view, this is just another device that handles all accelerators in the full system. - -With the 2021.4 release, Auto-device setup is done in three major steps: - -1. Configure each device as usual (for example, via the conventional [IECore.set_config](https://docs.openvino.ai/latest/ie_python_api/classie__api_1_1IECore.html#a2c738cee90fca27146e629825c039a05) method). -2. Load a network to the Auto-device plugin. This is the only change needed in your application. -3. As with any other executable network resulting from [IECore.load_network](https://docs.openvino.ai/latest/ie_python_api/classie__api_1_1IECore.html#ac9a2e043d14ccfa9c6bbf626cfd69fcc), create as many requests as needed to saturate the devices. - -These steps are covered below in detail. - -### Defining and Configuring the Auto-Device Plugin -Following the OpenVINO convention for devices names, the Auto-device uses the label "AUTO". The only configuration option for Auto-device is a limited device list: - -| Parameter name | Parameter values | Default | Description | -| -------------- | ---------------- | ------- | ----------- | -| "AUTO_DEVICE_LIST" | comma-separated device names with no spaces | N/A | Device candidate list to be selected - -There are two ways to use the Auto-device plugin: - -1. Directly indicate device by "AUTO" or an empty string. -2. Use the Auto-device configuration - -Both methods allow limiting the list of device candidates for the AUTO plugin. - -```python -from openvino.inference_engine import IECore - -ie = IECore() -# Read a network in IR or ONNX format -net = ie.read_network(model=path_to_model) - -# Load a network on the "AUTO" device -exec_net = ie.load_network(network=net, device_name="AUTO") - -# Optionally specify the list of device candidates for the AUTO plugin -# The following two lines are equivalent -exec_net = ie.load_network(network=net, device_name="AUTO:CPU,GPU") -exec_net = ie.load_network(network=net, device_name="AUTO", - config={"AUTO_DEVICE_LIST": "CPU,GPU"}) -``` - -The Auto-device plugin supports query device optimization capabilities in metric. - -| Parameter name | Parameter values | -| --- | --- | -| "OPTIMIZATION_CAPABILITIES" | Auto-Device capabilities | - -### Enumerating Devices and Selection Logic - -The Inference Engine now features a dedicated API to enumerate devices and their capabilities. See the [Hello Query Device Python Sample](../../../inference_engine/ie_bridges/python/sample_hello_query_device_README.html) for code. - -This is the example output from the sample (truncated to device names only): - -```python -./hello_query_device - -Available devices: - Device: CPU -... - Device: GPU.0 -... - Device: GPU.1 -``` - -### Default Auto-Device Selection Logic - -With the 2021.4 release, the Auto-Device selects the most suitable device using the following default logic: - -1. Check if dGPU (discrete), iGPU (integrated) and CPU devices are available -2. Get the precision of the input model, such as FP32 -3. According to the priority of dGPU, iGPU, and CPU (in this order), if the device supports the precision of the input network, select it as the most suitable device - -For example, CPU, dGPU and iGPU can support the following precision and optimization capabilities: - -| Device | OPTIMIZATION_CAPABILITIES | -| --- | --- | -| CPU | WINOGRAD FP32 FP16 INT8 BIN | -| dGPU | FP32 BIN BATCHED_BLOB FP16 INT8 | -| iGPU | FP32 BIN BATCHED_BLOB FP16 INT8 | - -* When the application uses the Auto-device to run FP16 IR on a system with CPU, dGPU and iGPU, Auto-device will offload this workload to dGPU. -* When the application uses the Auto-device to run FP16 IR on a system with CPU and iGPU, Auto-device will offload this workload to iGPU. -* When the application uses the Auto-device to run WINOGRAD-enabled IR on a system with CPU, dGPU and iGPU, Auto-device will offload this workload to CPU. - -In cases when loading the network to dGPU or iGPU fails, CPU is the fall-back choice. - -To show the capabilities for a specific device, query the OPTIMIZATION_CAPABILITIES metric: - - -```python -from openvino.inference_engine import IECore - -ie = IECore() -ie.get_metric(device_name=device, - metric_name="OPTIMIZATION_CAPABILITIES") -``` - -### Configuring the Individual Devices and Creating the Auto-Device on Top - -It is possible to configure each individual device as usual and create the "AUTO" device on top: - -```python -from openvino.inference_engine import IECore - -ie = IECore() -net = ie.read_network(model=path_to_model) - -cpu_config = {} -gpu_config = {} - -ie.set_config(config=cpu_config, device_name="CPU") -ie.set_config(config=gpu_config, device_name="GPU") - -# Load the network to the AUTO device -exec_net = ie.load_network(network=net, device_name="AUTO") -``` - -Alternatively, you can combine all the individual device settings into single config file and load it, allowing the Auto-device plugin to parse and apply it to the right devices. See the code example here: - -```python -from openvino.inference_engine import IECore - -# Init the Inference Engine Core -ie = IECore() - -# Read a network in IR or ONNX format -net = ie.read_network(model=path_to_model) - -full_config = {} - -# Load the network to the AUTO device -exec_net = ie.load_network(network=net, device_name="AUTO", config=full_config) -``` - -### Using the Auto-Device with OpenVINO Samples and Benchmark App - -Note that every OpenVINO sample or application that supports the "-d" (which stands for "device") command-line option transparently accepts the Auto-device. The Benchmark Application is the best example of the optimal usage of the Auto-device. You do not need to set the number of requests and CPU threads, as the application provides optimal out-of-the-box performance. Below is the example command-line to evaluate AUTO performance with that: - -@sphinxdirective -.. tab:: Package, Docker, open-source installation - - .. code-block:: sh - - ./benchmark_app.py –d AUTO –m - -.. tab:: pip installation - - .. code-block:: sh - - benchmark_app –d AUTO –m - -@endsphinxdirective - -You can also use the auto-device with limit device choice: - -@sphinxdirective -.. tab:: Package, Docker, open-source installation - - .. code-block:: sh - - ./benchmark_app.py –d AUTO:CPU,GPU –m - -.. tab:: pip installation - - .. code-block:: sh - - benchmark_app –d AUTO:CPU,GPU –m - -@endsphinxdirective - -> **NOTE**: If you installed OpenVINO with pip, use `benchmark_app -d AUTO:CPU,GPU -m ` diff --git a/docs/OV_Runtime_UG/supported_plugins/AutoPlugin_Debugging.md b/docs/OV_Runtime_UG/supported_plugins/AutoPlugin_Debugging.md new file mode 100644 index 00000000000..b0c38920e55 --- /dev/null +++ b/docs/OV_Runtime_UG/supported_plugins/AutoPlugin_Debugging.md @@ -0,0 +1,136 @@ +# Debugging Auto-Device Plugin {#openvino_docs_IE_DG_supported_plugins_AUTO_debugging} + +## Using Debug Log +In case of execution problems, just like all other plugins, Auto-Device provides the user with information on exceptions and error values. If the returned data is not enough for debugging purposes, more information may be acquired by means of `ov::log::Level`. + +There are six levels of logs, which can be called explicitly or set via the `OPENVINO_LOG_LEVEL` environment variable (can be overwritten by `compile_model()` or `set_property()`): + +0 - ov::log::Level::NO +1 - ov::log::Level::ERR +2 - ov::log::Level::WARNING +3 - ov::log::Level::INFO +4 - ov::log::Level::DEBUG +5 - ov::log::Level::TRACE + +@sphinxdirective +.. tab:: C++ API + + .. code-block:: cpp + + ov::Core core; + + // read a network in IR, PaddlePaddle, or ONNX format + std::shared_ptr model = core.read_model("sample.xml"); + + // load a network to AUTO and set log level to debug + ov::CompiledModel compiled_model = core.compile_model(model, "AUTO", {{ov::log::level.name(), "LOG_DEBUG"}}); + + // or set log level with set_config and load network + core.set_property("AUTO", {{ov::log::level.name(), "LOG_DEBUG"}}); + ov::CompiledModel compiled_model2 = core.compile_model(model, "AUTO"); + +.. tab:: Python API + + .. code-block:: python + + from openvino.runtime import Core + core = Core() + + # read a network in IR, PaddlePaddle, or ONNX format + model = core.read_model(model_path) + + # load a network to AUTO and set log level to debug + compiled_model = core.compile_model(model=model, device_name="AUTO", config={"LOG_LEVEL":"LOG_DEBUG"}); + + // or set log level with set_config and load network + ie.SetConfig(config={"LOG_LEVEL":"LOG_DEBUG"}, device_name="AUTO"); + compiled_model = core.compile_model(model=model, device_name="AUTO"); + +.. tab:: OS environment variable + + .. code-block:: sh + + When defining it via the variable, + a number needs to be used instead of a log level name, e.g.: + + Linux + export OPENVINO_LOG_LEVEL=0 + + Windows + set OPENVINO_LOG_LEVEL=0 +@endsphinxdirective + +The property returns information in the following format: + +@sphinxdirective +.. code-block:: sh + + [time]LOG_LEVEL[file] [PLUGIN]: message +@endsphinxdirective + +in which the `LOG_LEVEL` is represented by the first letter of its name (ERROR being an exception and using its full name). For example: + +@sphinxdirective +.. code-block:: sh + + [17:09:36.6188]D[plugin.cpp:167] deviceName:MYRIAD, defaultDeviceID:, uniqueName:MYRIAD_ + [17:09:36.6242]I[executable_network.cpp:181] [AUTOPLUGIN]:select device:MYRIAD + [17:09:36.6809]ERROR[executable_network.cpp:384] [AUTOPLUGIN] load failed, MYRIAD:[ GENERAL_ERROR ] +@endsphinxdirective + + +## Instrumentation and Tracing Technology + +All major performance calls of both OpenVINO™ Runtime and the AUTO plugin are instrumented with Instrumentation and Tracing Technology (ITT) APIs. To enable ITT in OpenVINO™ Runtime, compile it with the following option: +@sphinxdirective +.. code-block:: sh + + -DENABLE_PROFILING_ITT=ON +@endsphinxdirective + +For more information, you can refer to: +* [OpenVINO profiling](https://docs.openvino.ai/latest/groupie_dev_profiling.html) +* [Intel® VTune™ Profiler User Guide](https://www.intel.com/content/www/us/en/develop/documentation/vtune-help/top/api-support/instrumentation-and-tracing-technology-apis.html) + +### Analyze Code Performance on Linux + +You can analyze code performance using Intel® VTune™ Profiler. For more information and installation instructions refer to the [installation guide (PDF)](https://software.intel.com/content/www/us/en/develop/download/intel-vtune-install-guide-linux-os.html) +With Intel® VTune™ Profiler installed you can configure your analysis with the following steps: + +1. Open Intel® VTune™ Profiler GUI on the host machine with the following command: +@sphinxdirective + +.. code-block:: sh + + cd /vtune install dir/intel/oneapi/vtune/2021.6.0/env + source vars.sh + vtune-gui +@endsphinxdirective + +2. select **Configure Analysis** +3. In the **where** pane, select **Local Host** +@sphinxdirective +.. image:: _static/images/IE_DG_supported_plugins_AUTO_debugging-img01-localhost.png + :align: center +@endsphinxdirective +4. In the **what** pane, specify your target application/script on the local system. +@sphinxdirective +.. image:: _static/images/IE_DG_supported_plugins_AUTO_debugging-img02-launch.png + :align: center +@endsphinxdirective +5. In the **how** pane, choose and configure the analysis type you want to perform, for example, **Hotspots Analysis**: +identify the most time-consuming functions and drill down to see time spent on each line of source code. Focus optimization efforts on hot code for the greatest performance impact. +@sphinxdirective +.. image:: _static/images/IE_DG_supported_plugins_AUTO_debugging-img03-hotspots.png + :align: center +@endsphinxdirective +6. Start the analysis by clicking the start button. When it is done, you will get a summary of the run, including top hotspots and top tasks in your application: +@sphinxdirective +.. image:: _static/images/IE_DG_supported_plugins_AUTO_debugging-img04-vtunesummary.png + :align: center +@endsphinxdirective +7. To analyze ITT info related to the Auto plugin, click on the **Bottom-up** tab, choose the **Task Domain/Task Type/Function/Call Stack** from the dropdown list - Auto plugin-related ITT info is under the MULTIPlugin task domain: +@sphinxdirective +.. image:: _static/images/IE_DG_supported_plugins_AUTO_debugging-img05-vtunebottomup.png + :align: center +@endsphinxdirective diff --git a/docs/OV_Runtime_UG/supported_plugins/CPU.md b/docs/OV_Runtime_UG/supported_plugins/CPU.md index 08eafe3ac0d..696ed83990c 100644 --- a/docs/OV_Runtime_UG/supported_plugins/CPU.md +++ b/docs/OV_Runtime_UG/supported_plugins/CPU.md @@ -1,139 +1,211 @@ -# CPU Plugin {#openvino_docs_IE_DG_supported_plugins_CPU} +# CPU device {#openvino_docs_OV_UG_supported_plugins_CPU} + +The CPU plugin is developed to achieve high performance inference of neural networks on Intel® x86-64 CPUs. +For an in-depth description of CPU plugin, see + +- [CPU plugin developers documentation](https://github.com/openvinotoolkit/openvino/wiki/CPUPluginDevelopersDocs) + +- [OpenVINO Runtime CPU plugin source files](https://github.com/openvinotoolkit/openvino/tree/master/src/plugins/intel_cpu/) -## Introducing the CPU Plugin -The CPU plugin was developed to achieve high performance of neural networks on CPU, using the Intel® Math Kernel Library for Deep Neural Networks (Intel® MKL-DNN). +The CPU plugin is a part of the Intel® Distribution of OpenVINO™ toolkit. -Currently, the CPU plugin uses Intel® Threading Building Blocks (Intel® TBB) in order to parallelize calculations. Please refer to the [Optimization Guide](../../optimization_guide/dldt_optimization_guide.md) for associated performance considerations. +## Device name +For the CPU plugin `"CPU"` device name is used, and even though there can be more than one socket on a platform, from the plugin's point of view, there is only one `"CPU"` device. +On multi-socket platforms, load balancing and memory usage distribution between NUMA nodes are handled automatically. +In order to use CPU for inference the device name should be passed to `ov::Core::compile_model()` method: -The set of supported layers can be expanded with [the Extensibility mechanism](../Extensibility_DG/Intro.md). +@snippet snippets/cpu/compile_model.cpp compile_model_default -## Supported Platforms - -OpenVINO™ toolkit, including the CPU plugin, is officially supported and validated on the following platforms: - -| Host | OS (64-bit) | -| :--- | :--- | -| Development | Ubuntu* 18.04 or 20.04, CentOS* 7.6, MS Windows* 10, macOS* 10.15 | -| Target | Ubuntu* 18.04 or 20.04, CentOS* 7.6, MS Windows* 10, macOS* 10.15 | - -The CPU plugin supports inference on Intel® Xeon® with Intel® Advanced Vector Extensions 2 (Intel® AVX2), Intel® Advanced Vector Extensions 512 (Intel® AVX-512), and AVX512_BF16, Intel® Core™ -Processors with Intel® AVX2, Intel Atom® Processors with Intel® Streaming SIMD Extensions (Intel® SSE). - -You can use the `-pc` flag for samples to know which configuration is used by a layer. -This flag shows execution statistics that you can use to get information about layer name, layer type, -execution status, execution time, and the type of the execution primitive. - -## Internal CPU Plugin Optimizations - -The CPU plugin supports several graph optimization algorithms, such as fusing or removing layers. -Refer to the sections below for details. - -> **NOTE**: For layer descriptions, see the [IR Notation Reference](../../ops/opset.md). - -### Lowering Inference Precision - -The CPU plugin follows a default optimization approach. This approach means that inference is made with lower precision if it is possible on a given platform to reach better performance with an acceptable range of accuracy. - -> **NOTE**: For details, see the [Using Bfloat16 Inference](../Bfloat16Inference.md). - -### Fusing Convolution and Simple Layers - -Merge of a convolution layer and any of the simple layers listed below: -- Activation: ReLU, ELU, Sigmoid, Clamp -- Depthwise: ScaleShift, PReLU -- FakeQuantize - -> **NOTE**: You can have any number and order of simple layers. - -A combination of a convolution layer and simple layers results in a single fused layer called -*Convolution*: - -![conv_simple_01] - - -### Fusing Pooling and FakeQuantize Layers - -A combination of Pooling and FakeQuantize layers results in a single fused layer called *Pooling*: - -![pooling_fakequant_01] - -### Fusing FullyConnected and Activation Layers - -A combination of FullyConnected and Activation layers results in a single fused layer called -*FullyConnected*: - -![fullyconnected_activation_01] - - -### Fusing Convolution and Depthwise Convolution Layers Grouped with Simple Layers - -> **NOTE**: This pattern is possible only on CPUs with support of Streaming SIMD Extensions 4.2 -> (SSE 4.2) and Intel AVX2 Instruction Set Architecture (ISA). - -A combination of a group of a Convolution (or Binary Convolution) layer and simple layers and a group of a Depthwise Convolution -layer and simple layers results in a single layer called *Convolution* (or *Binary Convolution*): -> **NOTE**: Depthwise convolution layers should have the same values for the `group`, input channels, and output channels parameters. - -![conv_depth_01] - -### Fusing Convolution and Sum Layers - -A combination of convolution, simple, and Eltwise layers with the sum operation results in a single layer called *Convolution*: - -![conv_sum_relu_01] - -### Fusing a Group of Convolutions - -If a topology contains the following pipeline, a CPU plugin merges split, convolution, and concatenation layers into a single convolution layer with the group parameter: - -![group_convolutions_01] - -> **NOTE**: Parameters of the convolution layers must coincide. - - -### Removing a Power Layer - -CPU plugin removes a Power layer from a topology if it has the following parameters: - - power = 1 - - scale = 1 - - offset = 0 +## Supported inference data types +CPU plugin supports the following data types as inference precision of internal primitives: +- Floating-point data types: + - f32 + - bf16 +- Integer data types: + - i32 +- Quantized data types: + - u8 + - i8 + - u1 -## Supported Configuration Parameters +[Hello Query Device C++ Sample](../../../samples/cpp/hello_query_device/README.md) can be used to print out supported data types for all detected devices. -The plugin supports the configuration parameters listed below. -All parameters must be set with the `InferenceEngine::Core::LoadNetwork()` method. -When specifying key values as raw strings (that is, when using Python API), omit the `KEY_` prefix. -Refer to the OpenVINO samples for usage examples: [Benchmark App](../../../samples/cpp/benchmark_app/README.md). +### Quantized data types specifics -These are general options, also supported by other plugins: +Selected precision of each primitive depends on the operation precision in IR, quantization primitives, and available hardware capabilities. +u1/u8/i8 data types are used for quantized operations only, i.e. those are not selected automatically for non-quantized operations. -| Parameter name | Parameter values | Default | Description | -| :--- | :--- | :--- | :----------------------------------------------------------------------------------------------------------------------------| -| KEY_EXCLUSIVE_ASYNC_REQUESTS | YES/NO | NO | Forces async requests (also from different executable networks) to execute serially. This prevents potential oversubscription| -| KEY_PERF_COUNT | YES/NO | NO | Enables gathering performance counters | +See [low-precision optimization guide](@ref pot_docs_LowPrecisionOptimizationGuide) for more details on how to get quantized model. -CPU-specific settings: +> **NOTE**: Platforms that do not support Intel® AVX512-VNNI have a known "saturation issue" which in some cases leads to reduced computational accuracy for u8/i8 precision calculations. +> See [saturation (overflow) issue section](@ref pot_saturation_issue) to get more information on how to detect such issues and possible workarounds. -| Parameter name | Parameter values | Default | Description | -| :--- | :--- | :--- | :--- | -| KEY_CPU_THREADS_NUM | positive integer values| 0 | Specifies the number of threads that CPU plugin should use for inference. Zero (default) means using all (logical) cores| -| KEY_CPU_BIND_THREAD | YES/NUMA/NO | YES | Binds inference threads to CPU cores. 'YES' (default) binding option maps threads to cores - this works best for static/synthetic scenarios like benchmarks. The 'NUMA' binding is more relaxed, binding inference threads only to NUMA nodes, leaving further scheduling to specific cores to the OS. This option might perform better in the real-life/contended scenarios. Note that for the latency-oriented cases (number of the streams is less or equal to the number of NUMA nodes, see below) both YES and NUMA options limit number of inference threads to the number of hardware cores (ignoring hyper-threading) on the multi-socket machines. | -| KEY_CPU_THROUGHPUT_STREAMS | KEY_CPU_THROUGHPUT_NUMA, KEY_CPU_THROUGHPUT_AUTO, or positive integer values| 1 | Specifies number of CPU "execution" streams for the throughput mode. Upper bound for the number of inference requests that can be executed simultaneously. All available CPU cores are evenly distributed between the streams. The default value is 1, which implies latency-oriented behavior for single NUMA-node machine, with all available cores processing requests one by one. On the multi-socket (multiple NUMA nodes) machine, the best latency numbers usually achieved with a number of streams matching the number of NUMA-nodes.
KEY_CPU_THROUGHPUT_NUMA creates as many streams as needed to accommodate NUMA and avoid associated penalties.
KEY_CPU_THROUGHPUT_AUTO creates bare minimum of streams to improve the performance; this is the most portable option if you don't know how many cores your target machine has (and what would be the optimal number of streams). Note that your application should provide enough parallel slack (for example, run many inference requests) to leverage the throughput mode.
Non-negative integer value creates the requested number of streams. If a number of streams is 0, no internal streams are created and user threads are interpreted as stream master threads.| -| KEY_ENFORCE_BF16 | YES/NO| YES | The name for setting to execute in bfloat16 precision whenever it is possible. This option lets plugin know to downscale the precision where it sees performance benefits from bfloat16 execution. Such option does not guarantee accuracy of the network, you need to verify the accuracy in this mode separately, based on performance and accuracy results. It should be your decision whether to use this option or not. | +### Floating point data types specifics -> **NOTE**: To disable all internal threading, use the following set of configuration parameters: `KEY_CPU_THROUGHPUT_STREAMS=0`, `KEY_CPU_THREADS_NUM=1`, `KEY_CPU_BIND_THREAD=NO`. +Default floating-point precision of a CPU primitive is f32. To support f16 IRs the plugin internally converts all the f16 values to f32 and all the calculations are performed using native f32 precision. +On platforms that natively support bfloat16 calculations (have AVX512_BF16 extension) bf16 type is automatically used instead of f32 to achieve better performance, thus no special steps are required to run a model with bf16 precision. +See the [BFLOAT16 – Hardware Numerics Definition white paper](https://software.intel.com/content/dam/develop/external/us/en/documents/bf16-hardware-numerics-definition-white-paper.pdf) for more details about bfloat16 format. + +Using bf16 precision provides the following performance benefits: + +- Faster multiplication of two bfloat16 numbers because of shorter mantissa of the bfloat16 data. +- Reduced memory consumption since bfloat16 data size is two times less than 32-bit float. + +To check if the CPU device can support the bfloat16 data type use the [query device properties interface](./config_properties.md) to query ov::device::capabilities property, which should contain `BF16` in the list of CPU capabilities: + +@snippet snippets/cpu/Bfloat16Inference0.cpp part0 + +In case if the model was converted to bf16, ov::hint::inference_precision is set to ov::element::bf16 and can be checked via ov::CompiledModel::get_property call. The code below demonstrates how to get the element type: + +@snippet snippets/cpu/Bfloat16Inference1.cpp part1 + +To infer the model in f32 precision instead of bf16 on targets with native bf16 support, set the ov::hint::inference_precision to ov::element::f32. + +@snippet snippets/cpu/Bfloat16Inference2.cpp part2 + +Bfloat16 software simulation mode is available on CPUs with Intel® AVX-512 instruction set that do not support the native `avx512_bf16` instruction. This mode is used for development purposes and it does not guarantee good performance. +To enable the simulation, one have to explicitly set ov::hint::inference_precision to ov::element::bf16. + +> **NOTE**: An exception is thrown in case of setting ov::hint::inference_precision to ov::element::bf16 on CPU without native bfloat16 support or bfloat16 simulation mode. + +> **NOTE**: Due to the reduced mantissa size of the bfloat16 data type, the resulting bf16 inference accuracy may differ from the f32 inference, especially for models that were not trained using the bfloat16 data type. If the bf16 inference accuracy is not acceptable, it is recommended to switch to the f32 precision. + +## Supported features + +### Multi-device execution +If a machine has OpenVINO supported devices other than CPU (for example integrated GPU), then any supported model can be executed on CPU and all the other devices simultaneously. +This can be achieved by specifying `"MULTI:CPU,GPU.0"` as a target device in case of simultaneous usage of CPU and GPU. + +@snippet snippets/cpu/compile_model.cpp compile_model_multi + +See [Multi-device execution page](../multi_device.md) for more details. + +### Multi-stream execution +If either `ov::num_streams(n_streams)` with `n_streams > 1` or `ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)` property is set for CPU plugin, +then multiple streams are created for the model. In case of CPU plugin each stream has its own host thread which means that incoming infer requests can be processed simultaneously. +Each stream is pinned to its own group of physical cores with respect to NUMA nodes physical memory usage to minimize overhead on data transfer between NUMA nodes. + +See [optimization guide](@ref openvino_docs_deployment_optimization_guide_dldt_optimization_guide) for more details. + +> **NOTE**: When it comes to latency, one needs to keep in mind that running only one stream on multi-socket platform may introduce additional overheads on data transfer between NUMA nodes. +> In that case it is better to run inference on one socket (please see [Optimizing for Throughput](../../optimization_guide/dldt_deployment_optimization_tput.md) for details). + +### Dynamic shapes +CPU plugin provides full functional support for models with dynamic shapes in terms of the opset coverage. + +> **NOTE**: CPU plugin does not support tensors with dynamically changing rank. In case of an attempt to infer a model with such tensors, an exception will be thrown. + +Dynamic shapes support introduce some additional overheads on memory management and may limit internal runtime optimizations. +The more degrees of freedom we have, the more difficult it is to achieve the best performance. +The most flexible configuration is the fully undefined shape, when we do not apply any constraints to the shape dimensions, which is the most convenient approach. +But reducing the level of uncertainty will bring performance gains. +We can reduce memory consumption through memory reuse, and as a result achieve better cache locality, which in its turn leads to better inference performance, if we explicitly set dynamic shapes with defined upper bounds. + +@snippet snippets/cpu/dynamic_shape.cpp defined_upper_bound + +Some runtime optimizations works better if the model shapes are known in advance. +Therefore, if the input data shape is not changed between inference calls, it is recommended to use a model with static shapes or reshape the existing model with the static input shape to get the best performance. + +@snippet snippets/cpu/dynamic_shape.cpp static_shape + +See [dynamic shapes guide](../ov_dynamic_shapes.md) for more details. + +### Preprocessing acceleration +CPU plugin supports a full set of the preprocessing operations, providing high performance implementations for them. + +See [preprocessing API guide](../preprocessing_overview.md) for more details. + +@sphinxdirective +.. dropdown:: The CPU plugin support for handling tensor precision conversion is limited to the following ov::element types: + + * bf16 + * f16 + * f32 + * f64 + * i8 + * i16 + * i32 + * i64 + * u8 + * u16 + * u32 + * u64 + * boolean +@endsphinxdirective + +### Models caching +CPU plugin supports Import/Export network capability. If the model caching is enabled via common OpenVINO™ `ov::cache_dir` property, the plugin will automatically create a cached blob inside the specified directory during model compilation. +This cached blob contains some intermediate representation of the network that it has after common runtime optimizations and low precision transformations. +The next time the model is compiled, the cached representation will be loaded to the plugin instead of the initial IR, so the aforementioned transformation steps will be skipped. +These transformations take a significant amount of time during model compilation, so caching this representation reduces time spent for subsequent compilations of the model, +thereby reducing first inference latency (FIL). + +See [model caching overview](@ref openvino_docs_IE_DG_Model_caching_overview) for more details. + +### Extensibility +CPU plugin supports fallback on `ov::Op` reference implementation if the plugin do not have its own implementation for such operation. +That means that [OpenVINO™ Extensibility Mechanism](@ref openvino_docs_Extensibility_UG_Intro) can be used for the plugin extension as well. +To enable fallback on a custom operation implementation, one have to override `ov::Op::evaluate` method in the derived operation class (see [custom OpenVINO™ operations](@ref openvino_docs_Extensibility_UG_add_openvino_ops) for details). + +> **NOTE**: At the moment, custom operations with internal dynamism (when the output tensor shape can only be determined as a result of performing the operation) are not supported by the plugin. + +### Stateful models +CPU plugin supports stateful models without any limitations. + +See [stateful models guide](@ref openvino_docs_IE_DG_network_state_intro) for details. + +## Supported properties +The plugin supports the properties listed below. + +### Read-write properties +All parameters must be set before calling `ov::Core::compile_model()` in order to take effect or passed as additional argument to `ov::Core::compile_model()` + +- ov::enable_profiling +- ov::hint::inference_precision +- ov::hint::performance_mode +- ov::hint::num_request +- ov::num_streams +- ov::affinity +- ov::inference_num_threads + + +### Read-only properties +- ov::cache_dir +- ov::supported_properties +- ov::available_devices +- ov::range_for_async_infer_requests +- ov::range_for_streams +- ov::device::full_name +- ov::device::capabilities + +## External dependencies +For some performance-critical DL operations, the CPU plugin uses optimized implementations from the oneAPI Deep Neural Network Library ([oneDNN](https://github.com/oneapi-src/oneDNN)). + +@sphinxdirective +.. dropdown:: The following operations are implemented using primitives from the OneDNN library: + + * AvgPool + * Concat + * Convolution + * ConvolutionBackpropData + * GroupConvolution + * GroupConvolutionBackpropData + * GRUCell + * GRUSequence + * LRN + * LSTMCell + * LSTMSequence + * MatMul + * MaxPool + * RNNCell + * RNNSequence + * SoftMax +@endsphinxdirective ## See Also * [Supported Devices](Supported_Devices.md) +* [Optimization guide](@ref openvino_docs_optimization_guide_dldt_optimization_guide) +* [СPU plugin developers documentation](https://github.com/openvinotoolkit/openvino/wiki/CPUPluginDevelopersDocs) -[mkldnn_group_conv]: ../img/mkldnn_group_conv.png -[mkldnn_conv_sum]: ../img/mkldnn_conv_sum.png -[mkldnn_conv_sum_result]: ../img/mkldnn_conv_sum_result.png -[conv_simple_01]: ../img/conv_simple_01.png -[pooling_fakequant_01]: ../img/pooling_fakequant_01.png -[fullyconnected_activation_01]: ../img/fullyconnected_activation_01.png -[conv_depth_01]: ../img/conv_depth_01.png -[group_convolutions_01]: ../img/group_convolutions_01.png -[conv_sum_relu_01]: ../img/conv_sum_relu_01.png diff --git a/docs/OV_Runtime_UG/supported_plugins/Device_Plugins.md b/docs/OV_Runtime_UG/supported_plugins/Device_Plugins.md index 250b69e6371..2e79c9a7bc7 100644 --- a/docs/OV_Runtime_UG/supported_plugins/Device_Plugins.md +++ b/docs/OV_Runtime_UG/supported_plugins/Device_Plugins.md @@ -1,4 +1,4 @@ -# Device Plugin Support {#openvino_docs_IE_DG_Device_Plugins} +# Working with devices {#openvino_docs_OV_UG_Working_with_devices} @sphinxdirective @@ -6,30 +6,51 @@ :maxdepth: 1 :hidden: - openvino_docs_IE_DG_InferenceEngine_QueryAPI - openvino_docs_IE_DG_supported_plugins_CPU - openvino_docs_IE_DG_supported_plugins_GPU + openvino_docs_OV_UG_query_api + openvino_docs_OV_UG_supported_plugins_CPU + openvino_docs_OV_UG_supported_plugins_GPU openvino_docs_IE_DG_supported_plugins_VPU - openvino_docs_IE_DG_supported_plugins_GNA - openvino_docs_IE_DG_supported_plugins_AUTO - openvino_docs_IE_DG_supported_plugins_HETERO - openvino_docs_IE_DG_supported_plugins_MULTI - + openvino_docs_OV_UG_supported_plugins_GNA + openvino_docs_OV_UG_supported_plugins_ARM_CPU + @endsphinxdirective -Inference Engine uses a plugin architecture. Inference Engine plugin is a software component that contains complete implementation for inference on a certain Intel® hardware device: CPU, GPU, VPU, GNA, etc. Each plugin implements the unified API and provides additional hardware-specific APIs. +The OpenVINO Runtime provides capabilities to infer deep learning models on the following device types with corresponding plugins: -The Inference Engine provides capabilities to infer deep learning models on the following device types with corresponding plugins: +| Plugin | Device types | +|--------|-------------------------------------------------------------------------------------------------------------------------------------------------------------| +|[CPU](CPU.md) |Intel® Xeon®, Intel® Core™ and Intel® Atom® processors with Intel® Streaming SIMD Extensions (Intel® SSE4.2), Intel® Advanced Vector Extensions 2 (Intel® AVX2), Intel® Advanced Vector Extensions 512 (Intel® AVX-512), Intel® Vector Neural Network Instructions (Intel® AVX512-VNNI) and bfloat16 extension for AVX-512 (Intel® AVX-512_BF16 Extension)| +|[GPU](GPU.md) |Intel® Graphics, including Intel® HD Graphics, Intel® UHD Graphics, Intel® Iris® Graphics, Intel® Xe Graphics, Intel® Xe MAX Graphics | +|[VPUs](VPU.md) |Intel® Neural Compute Stick 2 powered by the Intel® Movidius™ Myriad™ X, Intel® Vision Accelerator Design with Intel® Movidius™ VPUs | +|[GNA](GNA.md) |[Intel® Speech Enabling Developer Kit](https://www.intel.com/content/www/us/en/support/articles/000026156/boards-and-kits/smart-home.html); [Amazon Alexa\* Premium Far-Field Developer Kit](https://developer.amazon.com/en-US/alexa/alexa-voice-service/dev-kits/amazon-premium-voice); [Intel® Pentium® Silver Processors N5xxx, J5xxx and Intel® Celeron® Processors N4xxx, J4xxx (formerly codenamed Gemini Lake)](https://ark.intel.com/content/www/us/en/ark/products/codename/83915/gemini-lake.html): [Intel® Pentium® Silver J5005 Processor](https://ark.intel.com/content/www/us/en/ark/products/128984/intel-pentium-silver-j5005-processor-4m-cache-up-to-2-80-ghz.html), [Intel® Pentium® Silver N5000 Processor](https://ark.intel.com/content/www/us/en/ark/products/128990/intel-pentium-silver-n5000-processor-4m-cache-up-to-2-70-ghz.html), [Intel® Celeron® J4005 Processor](https://ark.intel.com/content/www/us/en/ark/products/128992/intel-celeron-j4005-processor-4m-cache-up-to-2-70-ghz.html), [Intel® Celeron® J4105 Processor](https://ark.intel.com/content/www/us/en/ark/products/128989/intel-celeron-j4105-processor-4m-cache-up-to-2-50-ghz.html), [Intel® Celeron® J4125 Processor](https://ark.intel.com/content/www/us/en/ark/products/197305/intel-celeron-processor-j4125-4m-cache-up-to-2-70-ghz.html), [Intel® Celeron® Processor N4100](https://ark.intel.com/content/www/us/en/ark/products/128983/intel-celeron-processor-n4100-4m-cache-up-to-2-40-ghz.html), [Intel® Celeron® Processor N4000](https://ark.intel.com/content/www/us/en/ark/products/128988/intel-celeron-processor-n4000-4m-cache-up-to-2-60-ghz.html); [Intel® Pentium® Processors N6xxx, J6xxx, Intel® Celeron® Processors N6xxx, J6xxx and Intel Atom® x6xxxxx (formerly codenamed Elkhart Lake)](https://ark.intel.com/content/www/us/en/ark/products/codename/128825/products-formerly-elkhart-lake.html); [Intel® Core™ Processors (formerly codenamed Cannon Lake)](https://ark.intel.com/content/www/us/en/ark/products/136863/intel-core-i3-8121u-processor-4m-cache-up-to-3-20-ghz.html); [10th Generation Intel® Core™ Processors (formerly codenamed Ice Lake)](https://ark.intel.com/content/www/us/en/ark/products/codename/74979/ice-lake.html): [Intel® Core™ i7-1065G7 Processor](https://ark.intel.com/content/www/us/en/ark/products/196597/intel-core-i71065g7-processor-8m-cache-up-to-3-90-ghz.html), [Intel® Core™ i7-1060G7 Processor](https://ark.intel.com/content/www/us/en/ark/products/197120/intel-core-i71060g7-processor-8m-cache-up-to-3-80-ghz.html), [Intel® Core™ i5-1035G4 Processor](https://ark.intel.com/content/www/us/en/ark/products/196591/intel-core-i51035g4-processor-6m-cache-up-to-3-70-ghz.html), [Intel® Core™ i5-1035G7 Processor](https://ark.intel.com/content/www/us/en/ark/products/196592/intel-core-i51035g7-processor-6m-cache-up-to-3-70-ghz.html), [Intel® Core™ i5-1035G1 Processor](https://ark.intel.com/content/www/us/en/ark/products/196603/intel-core-i51035g1-processor-6m-cache-up-to-3-60-ghz.html), [Intel® Core™ i5-1030G7 Processor](https://ark.intel.com/content/www/us/en/ark/products/197119/intel-core-i51030g7-processor-6m-cache-up-to-3-50-ghz.html), [Intel® Core™ i5-1030G4 Processor](https://ark.intel.com/content/www/us/en/ark/products/197121/intel-core-i51030g4-processor-6m-cache-up-to-3-50-ghz.html), [Intel® Core™ i3-1005G1 Processor](https://ark.intel.com/content/www/us/en/ark/products/196588/intel-core-i31005g1-processor-4m-cache-up-to-3-40-ghz.html), [Intel® Core™ i3-1000G1 Processor](https://ark.intel.com/content/www/us/en/ark/products/197122/intel-core-i31000g1-processor-4m-cache-up-to-3-20-ghz.html), [Intel® Core™ i3-1000G4 Processor](https://ark.intel.com/content/www/us/en/ark/products/197123/intel-core-i31000g4-processor-4m-cache-up-to-3-20-ghz.html); [11th Generation Intel® Core™ Processors (formerly codenamed Tiger Lake)](https://ark.intel.com/content/www/us/en/ark/products/codename/88759/tiger-lake.html); [12th Generation Intel® Core™ Processors (formerly codenamed Alder Lake)](https://ark.intel.com/content/www/us/en/ark/products/codename/147470/products-formerly-alder-lake.html)| +|[Arm® CPU](ARM_CPU.md) |Raspberry Pi™ 4 Model B, Apple® Mac mini with M1 chip, NVIDIA® Jetson Nano™, Android™ devices | -| Plugin | Device types | +OpenVINO runtime also has several execution capabilities which work on top of other devices: + +| Capability | Description | |------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------| -|[GPU plugin](GPU.md) |Intel® Processor Graphics, including Intel® HD Graphics and Intel® Iris® Graphics | -|[CPU plugin](CPU.md) |Intel® Xeon® with Intel® Advanced Vector Extensions 2 (Intel® AVX2), Intel® Advanced Vector Extensions 512 (Intel® AVX-512), and AVX512_BF16, Intel® Core™ Processors with Intel® AVX2, Intel® Atom® Processors with Intel® Streaming SIMD Extensions (Intel® SSE) | -|[VPU plugins](VPU.md) (available in the Intel® Distribution of OpenVINO™ toolkit) |Intel® Neural Compute Stick 2 powered by the Intel® Movidius™ Myriad™ X, Intel® Vision Accelerator Design with Intel® Movidius™ VPUs | -|[GNA plugin](GNA.md) (available in the Intel® Distribution of OpenVINO™ toolkit) |Intel® Speech Enabling Developer Kit, Amazon Alexa* Premium Far-Field Developer Kit, Intel® Pentium® Silver J5005 Processor, Intel® Pentium® Silver N5000 Processor, Intel® Celeron® J4005 Processor, Intel® Celeron® J4105 Processor, Intel® Celeron® Processor N4100, Intel® Celeron® Processor N4000, Intel® Core™ i3-8121U Processor, Intel® Core™ i7-1065G7 Processor, Intel® Core™ i7-1060G7 Processor, Intel® Core™ i5-1035G4 Processor, Intel® Core™ i5-1035G7 Processor, Intel® Core™ i5-1035G1 Processor, Intel® Core™ i5-1030G7 Processor, Intel® Core™ i5-1030G4 Processor, Intel® Core™ i3-1005G1 Processor, Intel® Core™ i3-1000G1 Processor, Intel® Core™ i3-1000G4 Processor| -|[Multi-Device plugin](MULTI.md) |Multi-Device plugin enables simultaneous inference of the same network on several Intel® devices in parallel | -|[Auto-Device plugin](AUTO.md) |Auto-Device plugin enables selecting Intel® device for inference automatically | -|[Heterogeneous plugin](HETERO.md) |Heterogeneous plugin enables automatic inference splitting between several Intel® devices (for example if a device doesn't [support certain layers](#supported-layers)). | +|[Multi-Device execution](../multi_device.md) |Multi-Device enables simultaneous inference of the same model on several devices in parallel | +|[Auto-Device selection](../auto_device_selection.md) |Auto-Device selection enables selecting Intel® device for inference automatically | +|[Heterogeneous execution](../hetero_execution.md) |Heterogeneous execution enables automatic inference splitting between several devices (for example if a device doesn't [support certain operation](#supported-layers))| +|[Automatic Batching](../automatic_batching.md) | Auto-Batching plugin enables the batching (on top of the specified device) that is completely transparent to the application | Devices similar to the ones we have used for benchmarking can be accessed using [Intel® DevCloud for the Edge](https://devcloud.intel.com/edge/), a remote development environment with access to Intel® hardware and the latest versions of the Intel® Distribution of the OpenVINO™ Toolkit. [Learn more](https://devcloud.intel.com/edge/get_started/devcloud/) or [Register here](https://inteliot.force.com/DevcloudForEdge/s/). + +## Features support matrix +The table below demonstrates support of key features by OpenVINO device plugins. + +| Capability | [CPU](CPU.md) | [GPU](GPU.md) | [GNA](GNA.md) | [VPU](VPU.md) | [Arm® CPU](ARM_CPU.md) | +| ---------- | --- | --- | --- | --- | --- | +| [Heterogeneous execution](../hetero_execution.md)| Yes | Yes | No | ? | Yes | +| [Multi-device execution](../multi_device.md) | Yes | Yes | Partial | ? | Yes | +| [Automatic batching](../automatic_batching.md) | No | Yes | No | ? | No | +| [Multi-stream execution](@ref openvino_docs_optimization_guide_dldt_optimization_guide) | Yes | Yes | No | ? | Yes | +| [Models caching](../Model_caching_overview.md) | Yes | Partial | Yes | ? | No | +| [Dynamic shapes](../ov_dynamic_shapes.md) | Yes | Partial | No | ? | No | +| Import/Export | Yes | No | Yes | ? | No | +| [Preprocessing acceleration](../preprocessing_overview.md) | Yes | Yes | No | ? | Partial | +| [Stateful models](../network_state_intro.md) | Yes | No | Yes | ? | No | +| [Extensibility](@ref openvino_docs_Extensibility_UG_Intro) | Yes | Yes | No | ? | No | + +For more details on plugin specific feature limitation, see corresponding plugin pages. diff --git a/docs/OV_Runtime_UG/supported_plugins/GNA.md b/docs/OV_Runtime_UG/supported_plugins/GNA.md index 4b177d98c31..af7f7f16ee0 100644 --- a/docs/OV_Runtime_UG/supported_plugins/GNA.md +++ b/docs/OV_Runtime_UG/supported_plugins/GNA.md @@ -1,7 +1,6 @@ -# GNA Plugin {#openvino_docs_IE_DG_supported_plugins_GNA} -## Introducing the GNA Plugin +# GNA device {#openvino_docs_OV_UG_supported_plugins_GNA} -The Intel® Gaussian & Neural Accelerator is a low-power neural coprocessor for continuous inference at the edge. +The Intel® Gaussian & Neural Accelerator (GNA) is a low-power neural coprocessor for continuous inference at the edge. Intel® GNA is not intended to replace typical inference devices such as the CPU, graphics processing unit (GPU), or vision processing unit (VPU). It is designed for offloading @@ -10,371 +9,175 @@ to save power and free CPU resources. The GNA plugin provides a way to run inference on Intel® GNA, as well as in the software execution mode on CPU. -## Devices with Intel® GNA - -Devices with Intel® GNA support: - -* [Intel® Speech Enabling Developer Kit](https://www.intel.com/content/www/us/en/support/articles/000026156/boards-and-kits/smart-home.html) - -* [Amazon Alexa\* Premium Far-Field Developer Kit](https://developer.amazon.com/en-US/alexa/alexa-voice-service/dev-kits/amazon-premium-voice) - -* [Intel® Pentium® Silver Processors N5xxx, J5xxx and Intel® Celeron® Processors N4xxx, J4xxx (formerly codenamed Gemini Lake)](https://ark.intel.com/content/www/us/en/ark/products/codename/83915/gemini-lake.html): - - Intel® Pentium® Silver J5005 Processor - - Intel® Pentium® Silver N5000 Processor - - Intel® Celeron® J4005 Processor - - Intel® Celeron® J4105 Processor - - Intel® Celeron® J4125 Processor - - Intel® Celeron® Processor N4100 - - Intel® Celeron® Processor N4000 - -* [Intel® Pentium® Processors N6xxx, J6xxx, Intel® Celeron® Processors N6xxx, J6xxx and Intel Atom® x6xxxxx (formerly codenamed Elkhart Lake)](https://ark.intel.com/content/www/us/en/ark/products/codename/128825/products-formerly-elkhart-lake.html) - -* [Intel® Core™ Processors (formerly codenamed Cannon Lake)](https://ark.intel.com/content/www/us/en/ark/products/136863/intel-core-i3-8121u-processor-4m-cache-up-to-3-20-ghz.html) - -* [10th Generation Intel® Core™ Processors (formerly codenamed Ice Lake)](https://ark.intel.com/content/www/us/en/ark/products/codename/74979/ice-lake.html): - -* [11th Generation Intel® Core™ Processors (formerly codenamed Tiger Lake)](https://ark.intel.com/content/www/us/en/ark/products/codename/88759/tiger-lake.html). - -* [12th Generation Intel® Core™ Processors (formerly codenamed Alder Lake)](https://ark.intel.com/content/www/us/en/ark/products/codename/147470/products-formerly-alder-lake.html). - -> **NOTE**: On platforms where Intel® GNA is not enabled in the BIOS, the driver cannot be installed, so the GNA plugin uses the software emulation mode only. +For more details on how to configure a machine to use GNA plugin, see [GNA configuration page](@ref openvino_docs_install_guides_configurations_for_intel_gna). ## Intel® GNA Generational Differences -The first and second versions of Intel® GNA found in 10th and 11th generation Intel® Core™ Processors may be considered to be functionally equivalent. Intel® GNA 2.0 provided performance improvement with respect to Intel® GNA 1.0. Starting with 12th Generation Intel® Core™ Processors (formerly codenamed Alder Lake), support for Intel® GNA 3.0 features is being added. +The first (1.0) and second (2.0) versions of Intel® GNA found in 10th and 11th generation Intel® Core™ Processors may be considered to be functionally equivalent. Intel® GNA 2.0 provided performance improvement with respect to Intel® GNA 1.0. Starting with 12th Generation Intel® Core™ Processors (formerly codenamed Alder Lake), support for Intel® GNA 3.0 features is being added. -In the rest of this documentation, "GNA 2.0" refers to Intel® GNA hardware delivered on 10th and 11th generation Intel® Core™ processors, and the term "GNA 3.0" will be used to refer to GNA hardware delivered on 12th generation Intel® Core™ processors. +In the rest of this documentation, "GNA 2.0" refers to Intel® GNA hardware delivered on 10th and 11th generation Intel® Core™ processors, and the term "GNA 3.0" refers to GNA hardware delivered on 12th generation Intel® Core™ processors. -Initially, a limited subset of Intel® GNA 3.0 features are added to the previous feature set including the following: +### Intel® GNA Forward and Backward Compatibility -* **2D VALID Convolution With Small 2D Kernels:** Two-dimensional convolutions with the following kernel dimensions [H,W] are supported: [1,1], [2,2], [3,3], [2,1], [3,1], [4,1], [5,1], [6,1], [7,1], [1,2], or [1,3]. Input tensor dimensions are limited to [1,8,16,16] <= [N,C,H,W] <= [1,120,384,240]. Up to 384 channels C may be used with a subset of kernel sizes (see table below). Up to 256 kernels (output channels) are supported. Pooling is limited to pool shapes of [1,1], [2,2], or [3,3]. Not all combinations of kernel shape and input tensor shape are supported (see the tables below for exact limitations). - -The tables below show that the exact limitation on the input tensor width W depends on the number of input channels C (indicated as Ci below) and the kernel shape. There is much more freedom to choose the input tensor height and number of output channels. - -## Initially Supported Subset of Intel® GNA 2D Convolutions - -The following tables provide a more explicit representation of the Intel(R) GNA 3.0 2D convolution operations initially supported. The limits depend strongly on number of input tensor channels (Ci) and the input tensor width (W). Other factors are kernel height (KH), kernel width (KW), pool height (PH), pool width (PW), horizontal pool step (SH), and vertical pool step (PW). For example, the first table shows that for a 3x3 kernel with max pooling, only square pools are supported, and W is limited to 87 when there are 64 input channels. - -**Table of Maximum Input Tensor Widths (W) vs. Rest of Parameters** (Input and Kernel Precision: 2 bytes) - -|KH|KW|PH|PW|SH|SW|H|W
Ci=8
Co=256|W
Ci=16
Co=256|W
Ci=32
Co=256|W
Ci=64
Co=256|W
Ci=128
Co=256|W
Ci=256
Co=256|W
Ci=384
Co=256| -|:--|:--|:--|:--|:--|:--|:--|:--|:--|:--|:--|:--|:--|:--| -|1|1|1|1|1|1|128|240|240|240|240|240|240|170| -|1|1|1|1|1|1|256|240|240|240|240|240|128|85| -|1|1|1|1|1|1|384|240|240|240|240|170|85|56| -|1|2|1|1|1|1|128|240|240|240|240| | | | -|1|2|1|1|1|1|256|240|240|240|240| | | | -|1|2|1|1|1|1|384|240|240|240|240| | | | -|1|3|1|1|1|1|128|240|240|240|240| | | | -|1|3|1|1|1|1|256|240|240|240|240| | | | -|1|3|1|1|1|1|384|240|240|240|240| | | | -|2|1|1|1|1|1|128|192|192|192|192|192|192|128| -|2|1|1|1|1|1|256|192|192|192|192|192|128|85| -|2|1|1|1|1|1|384|192|192|192|192|170|85|56| -|2|2|1|1|1|1|128|193|193|193|193| | | | -|2|2|1|1|1|1|256|193|193|193|193| | | | -|2|2|1|1|1|1|384|193|193|193|193| | | | -|2|2|2|2|1|1|128|193|193|192|179| | | | -|2|2|2|2|1|1|256|193|193|192|179| | | | -|2|2|2|2|1|1|384|193|193|192|179| | | | -|2|2|2|2|1|2|128|193|193|192|179| | | | -|2|2|2|2|1|2|256|193|193|192|179| | | | -|2|2|2|2|1|2|384|193|193|192|179| | | | -|2|2|2|2|2|1|128|193|193|192|179| | | | -|2|2|2|2|2|1|256|193|193|192|179| | | | -|2|2|2|2|2|1|384|193|193|192|179| | | | -|2|2|2|2|2|2|128|193|193|192|179| | | | -|2|2|2|2|2|2|256|193|193|192|179| | | | -|2|2|2|2|2|2|384|193|193|192|179| | | | -|3|1|1|1|1|1|128|128|128|128|128|128|85|42| -|3|1|1|1|1|1|256|128|128|128|128|128|85|42| -|3|1|1|1|1|1|384|128|128|128|128|128|85|42| -|3|3|1|1|1|1|128|130|130|130|87| | | | -|3|3|1|1|1|1|256|130|130|130|87| | | | -|3|3|1|1|1|1|384|130|130|130|87| | | | -|3|3|2|2|1|1|128|130|130|126|87| | | | -|3|3|2|2|1|1|256|130|130|126|87| | | | -|3|3|2|2|1|1|384|130|130|126|87| | | | -|3|3|2|2|1|2|128|130|130|126|87| | | | -|3|3|2|2|1|2|256|130|130|126|87| | | | -|3|3|2|2|1|2|384|130|130|126|87| | | | -|3|3|2|2|2|1|128|130|130|126|87| | | | -|3|3|2|2|2|1|256|130|130|126|87| | | | -|3|3|2|2|2|1|384|130|130|126|87| | | | -|3|3|2|2|2|2|128|130|130|126|87| | | | -|3|3|2|2|2|2|256|130|130|126|87| | | | -|3|3|2|2|2|2|384|130|130|126|87| | | | -|3|3|3|3|1|1|128|130|128|118|87| | | | -|3|3|3|3|1|1|256|130|128|118|87| | | | -|3|3|3|3|1|1|384|130|128|118|87| | | | -|3|3|3|3|1|2|128|130|128|118|87| | | | -|3|3|3|3|1|2|256|130|128|118|87| | | | -|3|3|3|3|1|2|384|130|128|118|87| | | | -|3|3|3|3|1|3|128|130|128|118|87| | | | -|3|3|3|3|1|3|256|130|128|118|87| | | | -|3|3|3|3|1|3|384|130|128|118|87| | | | -|3|3|3|3|2|1|128|130|128|118|87| | | | -|3|3|3|3|2|1|256|130|128|118|87| | | | -|3|3|3|3|2|1|384|130|128|118|87| | | | -|3|3|3|3|2|2|128|130|128|118|87| | | | -|3|3|3|3|2|2|256|130|128|118|87| | | | -|3|3|3|3|2|2|384|130|128|118|87| | | | -|3|3|3|3|2|3|128|130|128|118|87| | | | -|3|3|3|3|2|3|256|130|128|118|87| | | | -|3|3|3|3|2|3|384|130|128|118|87| | | | -|3|3|3|3|3|1|128|130|128|118|87| | | | -|3|3|3|3|3|1|256|130|128|118|87| | | | -|3|3|3|3|3|1|384|130|128|118|87| | | | -|3|3|3|3|3|2|128|130|128|118|87| | | | -|3|3|3|3|3|2|256|130|128|118|87| | | | -|3|3|3|3|3|2|384|130|128|118|87| | | | -|3|3|3|3|3|3|128|130|128|118|87| | | | -|3|3|3|3|3|3|256|130|128|118|87| | | | -|3|3|3|3|3|3|384|130|128|118|87| | | | -|4|1|1|1|1|1|128|96|96|96|96|96|64|32| -|4|1|1|1|1|1|256|96|96|96|96|96|64|32| -|4|1|1|1|1|1|384|96|96|96|96|96|64|32| -|5|1|1|1|1|1|128|76|76|76|76|51|25| | -|5|1|1|1|1|1|256|76|76|76|76|51|25| | -|5|1|1|1|1|1|384|76|76|76|76|51|25| | -|6|1|1|1|1|1|128|64|64|64|64|42|21| | -|6|1|1|1|1|1|256|64|64|64|64|42|21| | -|6|1|1|1|1|1|384|64|64|64|64|42|21| | -|7|1|1|1|1|1|128|54|54|54|54|36| | | -|7|1|1|1|1|1|256|54|54|54|54|36| | | -|7|1|1|1|1|1|384|54|54|54|54|36| | | - -**Table of Maximum Input Tensor Widths (W) vs. Rest of Parameters** (Input and Kernel Precision: 1 bytes) - -|KH|KW|PH|PW|SH|SW|H|W
Ci=8
Co=256|W
Ci=16
Co=256|W
Ci=32
Co=256|W
Ci=64
Co=256|W
Ci=128
Co=256|W
Ci=256
Co=256|W
Ci=384
Co=256| -|:--|:--|:--|:--|:--|:--|:--|:--|:--|:--|:--|:--|:--|:--| -|1|1|1|1|1|1|128|240|240|240|240|240|240|240| -|1|1|1|1|1|1|256|240|240|240|240|240|240|170| -|1|1|1|1|1|1|384|240|240|240|240|240|170|113| -|1|2|1|1|1|1|128|240|240|240|240|240|240|240| -|1|2|1|1|1|1|256|240|240|240|240|240|240|170| -|1|2|1|1|1|1|384|240|240|240|240|240|170|113| -|1|3|1|1|1|1|128|240|240|240|240|240| | | -|1|3|1|1|1|1|256|240|240|240|240|240| | | -|1|3|1|1|1|1|384|240|240|240|240|240| | | -|2|1|1|1|1|1|128|192|192|192|192|192|192|192| -|2|1|1|1|1|1|256|192|192|192|192|192|192|170| -|2|1|1|1|1|1|384|192|192|192|192|192|170|113| -|2|2|1|1|1|1|128|193|193|193|193|193|193|129| -|2|2|1|1|1|1|256|193|193|193|193|193|193|129| -|2|2|1|1|1|1|384|193|193|193|193|193|170|113| -|3|1|1|1|1|1|128|128|128|128|128|128|128|85| -|3|1|1|1|1|1|256|128|128|128|128|128|128|85| -|3|1|1|1|1|1|384|128|128|128|128|128|128|85| -|3|3|1|1|1|1|128|130|130|130|130|87 | | | -|3|3|1|1|1|1|256|130|130|130|130|87 | | | -|3|3|1|1|1|1|384|130|130|130|130|87 | | | -|4|1|1|1|1|1|128|96|96|96|96|96|96|64| -|4|1|1|1|1|1|256|96|96|96|96|96|96|64| -|4|1|1|1|1|1|384|96|96|96|96|96|96|64| -|5|1|1|1|1|1|128|76|76|76|76|76|51|51| -|5|1|1|1|1|1|256|76|76|76|76|76|51|51| -|5|1|1|1|1|1|384|76|76|76|76|76|51|51| -|6|1|1|1|1|1|128|64|64|64|64|64|42|21| -|6|1|1|1|1|1|256|64|64|64|64|64|42|21| -|6|1|1|1|1|1|384|64|64|64|64|64|42|21| -|7|1|1|1|1|1|128|54|54|54|54|54|36|18| -|7|1|1|1|1|1|256|54|54|54|54|54|36|18| -|7|1|1|1|1|1|384|54|54|54|54|54|36|18| - - -> **NOTE**: The above limitations only apply to the new hardware 2D convolution operation. When possible, the Intel® GNA plugin graph compiler flattens 2D convolutions so that the second generation Intel® GNA 1D convolution operations (without these limitations) may be used. The plugin will also flatten 2D convolutions regardless of the sizes if GNA 2.0 compilation target is selected (see below). - -## Intel® GNA Forward and Backward Compatibility - -In the general case, there is no guarantee that a model compiled for GNA 2.0 will run on GNA 3.0, or vice versa. - -However, in most cases, networks compiled for GNA 2.0 will run as expected on GNA 3.0, although the performance may be worse compared to the case when a network is compiled specifically for the latter. The exception is networks with convolutions with the number of filters greater than 8192 (see the Models and Layers Limitations section). - -Networks compiled for GNA 3.0 should run on GNA 2.0 with incompatible layers emulated on CPU. - -You can use the following options `KEY_GNA_EXEC_TARGET` and `KEY_GNA_COMPILE_TARGET` options to check interoperability (see the Supported Configuration Parameters section below): +When you run a model using the GNA plugin, it is compiled internally for the specific hardware target. It is possible to export compiled model using Import/Export functionality to use it later, but in the general case, there is no guarantee that a model compiled and exported for GNA 2.0 runs on GNA 3.0, or vice versa. @sphinxdirective -.. tab:: C++ - ``KEY_GNA_EXEC_TARGET``, ``KEY_GNA_COMPILE_TARGET`` +.. csv-table:: Interoperability of compile target and hardware target + :header: "Hardware", "Compile target 2.0", "Compile target 3.0" -.. tab:: Python - - ``GNA_EXEC_TARGET``, ``GNA_COMPILE_TARGET`` + "GNA 2.0", "Supported", "Not supported (incompatible layers emulated on CPU)" + "GNA 3.0", "Partially supported", "Supported" @endsphinxdirective -## Drivers and Dependencies +> **NOTE**: In most cases, networks compiled for GNA 2.0 runs as expected on GNA 3.0, although the performance may be worse compared to the case when a network is compiled specifically for the latter. The exception is networks with convolutions with the number of filters greater than 8192 (see the Models and Operations Limitations section). -Intel® GNA hardware requires a driver to be installed on the system. +For optimal work with POT quantized models which includes 2D convolutions on GNA 3.0 hardware, the following requirements should be satisfied. -* Linux\* OS: -[Download Intel® GNA driver for Ubuntu Linux 18.04.3 LTS (with HWE Kernel version 5.4+)](https://storage.openvinotoolkit.org/drivers/gna/) +Choose a compile target depending on the priority: cross-platform execution, performance, memory, or power optimization.. -* Windows\* OS: -Intel® GNA driver for Windows is available through Windows Update\* +Use the following properties to check interoperability in your application: `ov::intel_gna::execution_target` and `ov::intel_gna::compile_target` -## Models and Layers Limitations +[Speech C++ Sample](@ref openvino_inference_engine_samples_speech_sample_README) can be used for experiments (see `-exec_target` and `-compile_target` command line options). -Because of specifics of hardware architecture, Intel® GNA supports a limited set of layers, their kinds and combinations. -For example, you should not expect the GNA Plugin to be able to run computer vision models, except those specifically adapted for the GNA Plugin, because the plugin does not fully support 2D convolutions. +## Software emulation mode -For the list of supported layers, see the **GNA** column of the **Supported Layers** section in [Supported Devices](Supported_Devices.md). +On platforms without GNA hardware support plugin chooses software emulation mode by default. It means, model runs even if you do not have GNA HW within your platform. +GNA plugin enables you to switch the execution between software emulation mode and hardware execution mode after the model is loaded. +For details, see description of the `ov::intel_gna::execution_mode` property. -Limitations include: +## Recovery from Interruption by High-Priority Windows Audio Processes\* -- Only 1D convolutions are natively supported. -- The number of output channels for convolutions must be a multiple of 4. -- The maximum number of filters is 65532 for GNA 2.0 and 8192 for GNA 3.0. -- Permute layer support is limited to the cases where no data reordering is needed or when reordering is happening for two dimensions, at least one of which is not greater than 8. -- Splits and concatenations are supported for continuous portions of memory (e.g., split of 1,2,3,4 to 1,1,3,4 and 1,1,3,4 or concats of 1,2,3,4 and 1,2,3,5 to 2,2,3,4). -- For Multiply, Add and Subtract layers, auto broadcasting is only supported for constant inputs. +GNA is designed for real-time workloads such as noise reduction. +For such workloads, processing should be time constrained, otherwise extra delays may cause undesired effects such as +*audio glitches*. To make sure that processing can satisfy real-time requirements, the GNA driver provides a Quality of Service +(QoS) mechanism, which interrupts requests that might cause high-priority Windows audio processes to miss +the schedule, thereby causing long running GNA tasks to terminate early. -### Support for 2D Convolutions in Previous Generations of GNA Hardware +To prepare the applications correctly, use Automatic QoS Feature described below. -The Intel® GNA 1.0 and 2.0 hardware natively supports only 1D convolutions. +### Automatic QoS Feature on Windows* -However, 2D convolutions can be mapped to 1D when a convolution kernel moves in a single direction. GNA Plugin performs such a transformation for Kaldi `nnet1` convolution. From this perspective, the Intel® GNA hardware convolution operation accepts an `NHWC` input and produces an `NHWC` output. Because OpenVINO™ only supports the `NCHW` layout, you may need to insert `Permute` layers before or after convolutions. - -For example, the Kaldi model optimizer inserts such a permute after convolution for the [rm_cnn4a network](https://storage.openvinotoolkit.org/models_contrib/speech/2021.2/rm_cnn4a_smbr/). This `Permute` layer is automatically removed by the GNA Plugin, because the Intel® GNA hardware convolution layer already produces the required `NHWC` result. - -## Operation Precision - -Intel® GNA essentially operates in the low-precision mode, which represents a mix of 8-bit (`I8`), 16-bit (`I16`), and 32-bit (`I32`) integer computations. Outputs calculated using a reduced integer precision are different from the scores calculated using the floating point format, for example, `FP32` outputs calculated on CPU using the Inference Engine [CPU Plugin](CPU.md). - -Unlike other plugins supporting low-precision execution, the GNA plugin can calculate quantization factors at the model loading time, so you can run a model without calibration using the [Post-Training Optimization Tool](@ref pot_README). -However, this mode may not provide satisfactory accuracy because the internal quantization algorithm is based on heuristics which may or may not be efficient, depending on the model and dynamic range of input data. - -Starting with 2021.4 release of OpenVINO, GNA plugin users are encouraged to use the [POT API Usage sample for GNA](@ref pot_sample_speech_README) to get a model with quantization hints based on statistics for the provided dataset. - -## Execution Modes +Starting with 2021.4.1 release of OpenVINO and 03.00.00.1363 version of Windows* GNA driver, a new execution mode `ov::intel_gna::ExecutionMode::HW_WITH_SW_FBACK` is introduced +to assure that workloads satisfy real-time execution. In this mode, the GNA driver automatically falls back on CPU for a particular infer request +if the HW queue is not empty, so there is no need for explicitly switching between GNA and CPU. @sphinxdirective .. tab:: C++ - ============================ ============================================================================================================================================== - Mode Description - ============================ ============================================================================================================================================== - ``KEY_GNA_AUTO`` Uses Intel® GNA if available, otherwise uses software execution mode on CPU. - ``KEY_GNA_HW`` Uses Intel® GNA if available, otherwise raises an error. - ``KEY_GNA_SW`` *Deprecated*. Executes the GNA-compiled graph on CPU performing calculations in the same precision as the Intel® GNA, but not in the bit-exact mode. - ``KEY_GNA_SW_EXACT`` Executes the GNA-compiled graph on CPU performing calculations in the same precision as the Intel® GNA in the bit-exact mode. - ``KEY_GNA_HW_WITH_SW_FBACK`` Uses Intel® GNA if available, otherwise raises an error. If the hardware queue is not empty, automatically falls back to CPU in the bit-exact mode. - ``KEY_GNA_SW_FP32`` Executes the GNA-compiled graph on CPU but substitutes parameters and calculations from low precision to floating point (``FP32``). - ============================ ============================================================================================================================================== + .. doxygensnippet:: docs/snippets/gna/configure.cpp + :language: cpp + :fragment: [include] + + .. doxygensnippet:: docs/snippets/gna/configure.cpp + :language: cpp + :fragment: [ov_gna_exec_mode_hw_with_sw_fback] .. tab:: Python - ======================== ============================================================================================================================================== - Mode Description - ======================== ============================================================================================================================================== - ``GNA_AUTO`` Uses Intel® GNA if available, otherwise uses software execution mode on CPU. - ``GNA_HW`` Uses Intel® GNA if available, otherwise raises an error. - ``GNA_SW`` *Deprecated*. Executes the GNA-compiled graph on CPU performing calculations in the same precision as the Intel® GNA, but not in the bit-exact mode. - ``GNA_SW_EXACT`` Executes the GNA-compiled graph on CPU performing calculations in the same precision as the Intel® GNA in the bit-exact mode. - ``GNA_HW_WITH_SW_FBACK`` Uses Intel® GNA if available, otherwise raises an error. If the hardware queue is not empty, automatically falls back to CPU in the bit-exact mode. - ``GNA_SW_FP32`` Executes the GNA-compiled graph on CPU but substitutes parameters and calculations from low precision to floating point (``FP32``). - ======================== ============================================================================================================================================== + .. doxygensnippet:: docs/snippets/gna/configure.py + :language: python + :fragment: [import] + + .. doxygensnippet:: docs/snippets/gna/configure.py + :language: python + :fragment: [ov_gna_exec_mode_hw_with_sw_fback] @endsphinxdirective -## Supported Configuration Parameters +> **NOTE**: Due to the "first come - first served" nature of GNA driver and the QoS feature, this mode may lead to increased CPU consumption +if there are several clients using GNA simultaneously. +Even a lightweight competing infer request which has not been cleared at the time when the user's GNA client process makes its request, +can cause the user's request to be executed on CPU, thereby unnecessarily increasing CPU utilization and power. -The plugin supports the configuration parameters listed below. The parameter names correspond to their usage through API keys, such as ``GNAConfigParams::KEY_GNA_DEVICE_MODE`` or ``PluginConfigParams::KEY_PERF_COUNT`` in C++ and ``GNA_DEVICE_MODE`` or ``PERF_COUNT`` in Python. +## Supported inference data types + +Intel® GNA essentially operates in the low-precision mode which represents a mix of 8-bit (`i8`), 16-bit (`i16`), and 32-bit (`i32`) integer computations. + +GNA plugin users are encouraged to use the [Post-Training Optimization Tool](@ref pot_README) to get a model with quantization hints based on statistics for the provided dataset. + +Unlike other plugins supporting low-precision execution, the GNA plugin can calculate quantization factors at the model loading time, so you can run a model without calibration. However, this mode may not provide satisfactory accuracy because the internal quantization algorithm is based on heuristics which may or may not be efficient, depending on the model and dynamic range of input data and this mode is going to be deprecated soon. + +GNA plugin supports the following data types as inference precision of internal primitives +* Quantized data types: + - i16 + - i8 + +[Hello Query Device C++ Sample](@ref openvino_inference_engine_samples_hello_query_device_README) can be used to print out supported data types for all detected devices. + +[POT API Usage sample for GNA](@ref pot_sample_speech_README) demonstrates how a model can be quantized for GNA using POT API in 2 modes: +* Accuracy (i16 weights) +* Performance (i8 weights) + +For POT quantized model `ov::hint::inference_precision` property has no effect except cases described in Support for 2D Convolutions using POT. + +## Supported features + +### Models caching +Cache for GNA plugin may be enabled via common OpenVINO `ov::cache_dir` property due to import/export functionality support (see below). + +See [Model caching overview page](@ref openvino_docs_IE_DG_Model_caching_overview) for more details. + +### Import/Export + +The GNA plugin supports import/export capability which helps to significantly decrease first inference time. The model compile target is the same as the execution target by default. The default value for the execution target corresponds to available hardware, or latest hardware version supported by the plugin (i.e., GNA 3.0) if there is no GNA HW in the system. + +If you are willing to export a model for a specific version of GNA HW, please use the `ov::intel_gna::compile_target` property and then export the model: @sphinxdirective .. tab:: C++ - +----------------------------------+-------------------------+---------------+-----------------------------------------------------------------+ - | Parameter Name | Values | Default Value | Description | - +==================================+=========================+===============+=================================================================+ - | ``KEY_GNA_EXEC_TARGET`` | ``TARGET_2_0``, | *see below* | Defines the execution target. | - | | ``TARGET_3_0`` | | | - +----------------------------------+-------------------------+---------------+-----------------------------------------------------------------+ - | ``KEY_GNA_COMPILE_TARGET`` | ``TARGET_2_0``, | *see below* | Defines the compilation target. | - | | ``TARGET_3_0`` | | | - +----------------------------------+-------------------------+---------------+-----------------------------------------------------------------+ - | ``KEY_GNA_COMPACT_MODE`` | ``YES``, ``NO`` | ``NO`` | Enables I/O buffers reuse to save space. | - | | | | Makes debugging harder. | - +----------------------------------+-------------------------+---------------+-----------------------------------------------------------------+ - | ``KEY_GNA_SCALE_FACTOR`` | FP32 number | 1.0 | Sets the scale factor to use for input quantization. | - +----------------------------------+-------------------------+---------------+-----------------------------------------------------------------+ - | ``KEY_GNA_DEVICE_MODE`` | ``GNA_AUTO``, | ``GNA_AUTO`` | One of the modes described | - | | ``GNA_HW``, | | in `Execution Modes <#execution-modes>`_. | - | | ``GNA_HW_WITH_SW_FBACK``| | | - | | ``GNA_SW_EXACT``, | | | - | | ``GNA_SW_FP32`` | | | - +----------------------------------+-------------------------+---------------+-----------------------------------------------------------------+ - | ``KEY_GNA_FIRMWARE_MODEL_IMAGE`` | ``std::string`` | ``""`` | Sets the name for the embedded model binary dump file. | - +----------------------------------+-------------------------+---------------+-----------------------------------------------------------------+ - | ``KEY_GNA_PRECISION`` | ``I16``, ``I8`` | ``I16`` | Sets the preferred integer weight resolution for quantization | - | | | | (ignored for models produced using POT). | - +----------------------------------+-------------------------+---------------+-----------------------------------------------------------------+ - | ``KEY_PERF_COUNT`` | ``YES``, ``NO`` | ``NO`` | Turns on performance counters reporting. | - +----------------------------------+-------------------------+---------------+-----------------------------------------------------------------+ - - The parameters are passed as ``std::map`` on ``InferenceEngine::Core::LoadNetwork`` or ``InferenceEngine::SetConfig``. - - Normally, you do not need to select the execution target (``KEY_GNA_EXEC_TARGET``) and compilation target (``KEY_GNA_COMPILE_TARGET``). The default value for the execution target corresponds to available hardware, or latest hardware version supported by the plugin (i.e., GNA 3.0) if there is no GNA HW in the system. The compilation target is the same as the execution target by default. However, you may want to change the targets, for example, if you want to check how a model compiled for one generation would behave on the other generation (using the software emulation mode), or if you are willing to export a model for a specific version of GNA HW. - - You can change the ``KEY_GNA_DEVICE_MODE`` parameter at run time using ``InferenceEngine::ExecutableNetwork::SetConfig``, which works for any value excluding ``GNA_SW_FP32``. This enables you to switch the execution between software emulation mode and hardware execution mode after the model is loaded. + .. doxygensnippet:: docs/snippets/gna/import_export.cpp + :language: cpp + :fragment: [ov_gna_export] .. tab:: Python - +----------------------------------+-------------------------+---------------+-----------------------------------------------------------------+ - | Parameter Name | Values | Default Value | Description | - +==================================+=========================+===============+=================================================================+ - | ``GNA_EXEC_TARGET`` | ``TARGET_2_0``, | _see below_ | Defines the execution target. | - | | ``TARGET_3_0`` | | | - +----------------------------------+-------------------------+---------------+-----------------------------------------------------------------+ - | ``GNA_COMPILE_TARGET`` | ``TARGET_2_0``, | _see below_ | Defines the compilation target. | - | | ``TARGET_3_0`` | | | - +----------------------------------+-------------------------+---------------+-----------------------------------------------------------------+ - | ``GNA_COMPACT_MODE`` | ``YES``, ``NO`` | ``NO`` | Enables I/O buffers reuse to save space. | - | | | | Makes debugging harder. | - +----------------------------------+-------------------------+---------------+-----------------------------------------------------------------+ - | ``GNA_SCALE_FACTOR`` | FP32 number | 1.0 | Sets the scale factor to use for input quantization. | - +----------------------------------+-------------------------+---------------+-----------------------------------------------------------------+ - | ``KEY_GNA_DEVICE_MODE`` | ``GNA_AUTO``, | ``GNA_AUTO`` | One of the modes described | - | | ``GNA_HW``, | | in `Execution Modes <#execution-modes>`_. | - | | ``GNA_HW_WITH_SW_FBACK``| | | - | | ``GNA_SW_EXACT``, | | | - | | ``GNA_SW_FP32`` | | | - +----------------------------------+-------------------------+---------------+-----------------------------------------------------------------+ - | ``GNA_FIRMWARE_MODEL_IMAGE`` | ``string`` | ``""`` | Sets the name for the embedded model binary dump file. | - +----------------------------------+-------------------------+---------------+-----------------------------------------------------------------+ - | ``GNA_PRECISION`` | ``I16``, ``I8`` | ``I16`` | Sets the preferred integer weight resolution for quantization | - | | | | (ignored for models produced using POT). | - +----------------------------------+-------------------------+---------------+-----------------------------------------------------------------+ - | ``PERF_COUNT`` | ``YES``, ``NO`` | ``NO`` | Turns on performance counters reporting. | - +----------------------------------+-------------------------+---------------+-----------------------------------------------------------------+ - - The parameters are passed as strings to `IECore.load_network `_. - - Normally, you do not need to select the execution target (``GNA_EXEC_TARGET``) and compilation target (``GNA_COMPILE_TARGET``). The default value for the execution target corresponds to available hardware, or latest hardware version supported by the plugin (i.e., GNA 3.0) if there is no GNA HW in the system. The compilation target is the same as the execution target by default. However, you may want to change the targets, for example, if you want to check how a model compiled for one generation would behave on the other generation (using the SW emulation mode), or if you are willing to export a model for a specific version of GNA HW. - - You can change the ``GNA_DEVICE_MODE`` parameter at run time by sending a configuration dict to the `IECore.load_network `_ call, which works for any value excluding ``GNA_SW_FP32``. This enables you to switch the execution between software emulation mode and hardware execution mode after the model is loaded. + .. doxygensnippet:: docs/snippets/gna/import_export.py + :language: python + :fragment: [ov_gna_export] @endsphinxdirective -## How to Interpret Performance Counters -With the following methods, you can collect performance counters that provides various performance data about execution on GNA: +Import model: @sphinxdirective .. tab:: C++ - ``InferenceEngine::InferRequest::GetPerformanceCounts`` - - The returned map stores a counter description as a key, and a counter value in the ``realTime_uSec`` field of the ``InferenceEngineProfileInfo`` structure. - + .. doxygensnippet:: docs/snippets/gna/import_export.cpp + :language: cpp + :fragment: [ov_gna_import] .. tab:: Python - ``openvino.inference_engine.InferRequest.get_perf_counts`` + .. doxygensnippet:: docs/snippets/gna/import_export.py + :language: python + :fragment: [ov_gna_import] - The returned map stores a counter description as a key, and a counter value in the ``real_time`` field. +@endsphinxdirective + +[Compile Tool](@ref openvino_inference_engine_tools_compile_tool_README) or [Speech C++ Sample](@ref openvino_inference_engine_samples_speech_sample_README) can be used to compile model. + +### Stateful models +GNA plugin natively supports stateful models. + +Please refer to [Stateful models] (@ref openvino_docs_IE_DG_network_state_intro) for more details about such models. + +> **NOTE**: Typically, GNA is used in streaming scenarios, when minimizing the latency is important. Taking into account that POT does not support the `TensorIterator` operation, the recommendation is to use the `--transform` option of the Model Optimizer to apply `LowLatency2` transformation when converting an original model. + +### Profiling +The GNA plugin allows to turn on profiling using the `ov::enable_profiling` property. +With the following methods, you can collect profiling information that provides various performance data about execution on GNA: + +@sphinxdirective +.. tab:: C++ + + ``ov::InferRequest::get_profiling_info`` + +.. tab:: Python + + ``openvino.runtime.InferRequest.get_profiling_info`` @endsphinxdirective @@ -385,109 +188,154 @@ seconds = cycles / frequency ``` Refer to the table below to learn about the frequency of Intel® GNA inside a particular processor: -Processor | Frequency of Intel® GNA ----|--- -Intel® Core™ processors| 400MHz -Intel® processors formerly codenamed Elkhart Lake | 200MHz -Intel® processors formerly codenamed Gemini Lake | 200MHz + +@sphinxdirective + +.. csv-table:: Frequency of Intel® GNA inside a particular processor + :header: "Processor", "Frequency of Intel® GNA, MHz" + + "Intel® Core™ processors", 400 + "Intel® processors formerly codenamed Elkhart Lake", 200 + "Intel® processors formerly codenamed Gemini Lake", 200 + +@endsphinxdirective Performance counters provided for the time being: -* Scoring request performance results +* Inference request performance results * Number of total cycles spent on scoring in hardware including compute and memory stall cycles * Number of stall cycles spent in hardware -## Network Batch Size +## Supported properties +The plugin supports the properties listed below. -Intel® GNA plugin supports the processing of context-windowed speech frames in batches of 1-8 frames in one -input blob using the following methods: +### Read-write properties +The following parameters must be set before model compilation in order to take effect or passed as additional argument to `ov::Core::compile_model()`: + +- ov::cache_dir +- ov::enable_profiling +- ov::hint::inference_precision +- ov::hint::num_requests +- ov::intel_gna::compile_target +- ov::intel_gna::firmware_model_image_path +- ov::intel_gna::execution_target +- ov::intel_gna::pwl_design_algorithm +- ov::intel_gna::pwl_max_error_percent +- ov::intel_gna::scale_factors_per_input + +These parameters can be changed after model compilation `ov::CompiledModel::set_property`: +- ov::hint::performance_mode +- ov::intel_gna::execution_mode +- ov::log::level + +### Read-only properties +- ov::available_devices +- ov::device::capabilities +- ov::device::full_name +- ov::intel_gna::library_full_version +- ov::optimal_number_of_infer_requests +- ov::range_for_async_infer_requests +- ov::supported_properties + +## Limitations + +### Models and Operations Limitations + +Because of specifics of hardware architecture, Intel® GNA supports a limited set of operations, their kinds and combinations. +For example, you should not expect the GNA Plugin to be able to run computer vision models, except those specifically adapted for the GNA Plugin, because the plugin does not fully support 2D convolutions. + +Limitations include: + +- Only 1D convolutions are natively supported on the HW prior to GNA 3.0; 2D convolutions have specific limitations (see the table below). +- The number of output channels for convolutions must be a multiple of 4. +- The maximum number of filters is 65532 for GNA 2.0 and 8192 for GNA 3.0. +- Transpose layer support is limited to the cases where no data reordering is needed or when reordering is happening for two dimensions, at least one of which is not greater than 8. +- Splits and concatenations are supported for continuous portions of memory (e.g., split of 1,2,3,4 to 1,1,3,4 and 1,1,3,4 or concats of 1,2,3,4 and 1,2,3,5 to 2,2,3,4). +- For Multiply, Add and Subtract layers, auto broadcasting is only supported for constant inputs. + +#### Support for 2D Convolutions + +The Intel® GNA 1.0 and 2.0 hardware natively supports only 1D convolutions. However, 2D convolutions can be mapped to 1D when a convolution kernel moves in a single direction. + +Initially, a limited subset of Intel® GNA 3.0 features are added to the previous feature set including the following: + +* **2D VALID Convolution With Small 2D Kernels:** Two-dimensional convolutions with the following kernel dimensions [H,W] are supported: [1,1], [2,2], [3,3], [2,1], [3,1], [4,1], [5,1], [6,1], [7,1], [1,2], or [1,3]. Input tensor dimensions are limited to [1,8,16,16] <= [N,C,H,W] <= [1,120,384,240]. Up to 384 channels C may be used with a subset of kernel sizes (see table below). Up to 256 kernels (output channels) are supported. Pooling is limited to pool shapes of [1,1], [2,2], or [3,3]. Not all combinations of kernel shape and input tensor shape are supported (see the tables below for exact limitations). + +The tables below show that the exact limitation on the input tensor width W depends on the number of input channels C (indicated as Ci below) and the kernel shape. There is much more freedom to choose the input tensor height and number of output channels. + +The following tables provide a more explicit representation of the Intel(R) GNA 3.0 2D convolution operations initially supported. The limits depend strongly on number of input tensor channels (Ci) and the input tensor width (W). Other factors are kernel height (KH), kernel width (KW), pool height (PH), pool width (PW), horizontal pool step (SH), and vertical pool step (PW). For example, the first table shows that for a 3x3 kernel with max pooling, only square pools are supported, and W is limited to 87 when there are 64 input channels. + +@sphinxdirective + +:download:`Table of Maximum Input Tensor Widths (W) vs. Rest of Parameters (Input and Kernel Precision: i16) <../../../docs/OV_Runtime_UG/supported_plugins/files/GNA_Maximum_Input_Tensor_Widths_i16.csv>` + +:download:`Table of Maximum Input Tensor Widths (W) vs. Rest of Parameters (Input and Kernel Precision: i8) <../../../docs/OV_Runtime_UG/supported_plugins/files/GNA_Maximum_Input_Tensor_Widths_i8.csv>` + +@endsphinxdirective + +> **NOTE**: The above limitations only apply to the new hardware 2D convolution operation. When possible, the Intel® GNA plugin graph compiler flattens 2D convolutions so that the second generation Intel® GNA 1D convolution operations (without these limitations) may be used. The plugin will also flatten 2D convolutions regardless of the sizes if GNA 2.0 compilation target is selected (see below). + +#### Support for 2D Convolutions using POT + +For POT to successfully work with the models including GNA3.0 2D convolutions, the following requirements must be met: +* All convolution parameters are natively supported by HW (see tables above) +* The runtime precision is explicitly set by the `ov::hint::inference_precision` property as `i8` for the models produced by the `performance mode` of POT, and as `i16` for the models produced by the `accuracy mode` of POT. + +### Batch Size Limitation + +Intel® GNA plugin supports the processing of context-windowed speech frames in batches of 1-8 frames. + +Please refer to [Layout API overview](@ref openvino_docs_OV_Runtime_UG_Layout_Overview) to determine batch dimension. + +To set layout of model inputs in runtime use [Preprocessing API](@ref openvino_docs_OV_Runtime_UG_Preprocessing_Overview): @sphinxdirective .. tab:: C++ - ``InferenceEngine::ICNNNetwork::setBatchSize`` + .. doxygensnippet:: docs/snippets/gna/set_batch.cpp + :language: cpp + :fragment: [include] + + .. doxygensnippet:: docs/snippets/gna/set_batch.cpp + :language: cpp + :fragment: [ov_gna_set_nc_layout] .. tab:: Python - `IENetwork.batch_size `_ + .. doxygensnippet:: docs/snippets/gna/set_batch.py + :language: python + :fragment: [import] + + .. doxygensnippet:: docs/snippets/gna/set_batch.py + :language: python + :fragment: [ov_gna_set_nc_layout] @endsphinxdirective -Increasing batch size only improves efficiency of `Fully Connected` layers. - -> **NOTE**: For networks with `Convolutional`, `LSTM`, or `Memory` layers, the only supported batch size is 1. - -## Compatibility with Heterogeneous Plugin - -Heterogeneous plugin was tested with the Intel® GNA as a primary device and CPU as a secondary device. To run inference of networks with layers unsupported by the GNA plugin, such as Softmax, use the Heterogeneous plugin with the `HETERO:GNA,CPU` configuration. - -> **NOTE**: Due to limitation of the Intel® GNA backend library, heterogenous support is limited to cases where in the resulted sliced graph, only one subgraph is scheduled to run on GNA\_HW or GNA\_SW devices. - -## Recovery from Interruption by High-Priority Windows Audio Processes\* - -GNA is designed for real-time workloads such as noise reduction. -For such workloads, processing should be time constrained, otherwise extra delays may cause undesired effects such as -*audio glitches*. To make sure that processing can satisfy real-time requirements, the GNA driver provides a Quality of Service -(QoS) mechanism, which interrupts requests that might cause high-priority Windows audio processes to miss -the schedule, thereby causing long running GNA tasks to terminate early. - -Applications should be prepared for this situation. - -If an inference in the `GNA_HW` mode cannot be executed because of such an interruption, then the `wait` method returns the following status code: +then set batch size: @sphinxdirective .. tab:: C++ - ``InferRequest::Wait()`` returns status code ``StatusCode::INFER_NOT_STARTED``. + .. doxygensnippet:: docs/snippets/gna/set_batch.cpp + :language: cpp + :fragment: [ov_gna_set_batch_size] .. tab:: Python - `InferRequest.wait `_ returns status code `INFER_NOT_STARTED`. + .. doxygensnippet:: docs/snippets/gna/set_batch.py + :language: python + :fragment: [ov_gna_set_batch_size] @endsphinxdirective -In future releases, it will be changed to a more meaningful status code. +Increasing batch size only improves efficiency of `MatMul` layers. -Any application working with GNA must properly react to this code. -One of the strategies to adapt an application: +> **NOTE**: For models with `Convolution`, `LSTMCell`, or `ReadValue`/`Assign` operations, the only supported batch size is 1. -1. Immediately switch to the GNA_SW_EXACT emulation mode: -@sphinxdirective -.. tab:: C++ +### Compatibility with Heterogeneous mode - .. code-block:: cpp - - std::map newConfig; - newConfig[GNAConfigParams::KEY_GNA_DEVICE_MODE] = Parameter("GNA_SW_EXACT"); - executableNet.SetConfig(newConfig); - -.. tab:: Python - - .. code-block:: python - - from openvino.inference_engine import IECore - - ie = IECore() - new_cfg = {'GNA_DEVICE_MODE' : 'GNA_SW_EXACT'} - net = ie.read_network(model=path_to_model) - exec_net = ie.load_network(network=net, device_name="GNA", config=new_cfg) - -@endsphinxdirective - -2. Resubmit and switch back to GNA_HW expecting that the competing application has finished. - - > **NOTE**: This method is deprecated since a new automatic QoS mode has been introduced in 2021.4.1 release of OpenVINO™ (see below). - -## GNA3 Automatic QoS Feature on Windows* - -Starting with 2021.4.1 release of OpenVINO and 03.00.00.1363 version of Windows* GNA driver, a new execution mode `GNA_HW_WITH_SW_FBACK` is introduced -to assure that workloads satisfy real-time execution. In this mode, the GNA driver automatically falls back on CPU for a particular infer request -if the HW queue is not empty, so there is no need for explicitly switching between GNA and CPU. - -> **NOTE**: Due to the "first come - first served" nature of GNA driver and the QoS feature, this mode may lead to increased CPU consumption -if there are several clients using GNA simultaneously. -Even a lightweight competing infer request which has not been cleared at the time when the user's GNA client process makes its request, -can cause the user's request to be executed on CPU, thereby unnecessarily increasing CPU utilization and power. +[Heterogeneous execution](@ref openvino_docs_OV_UG_Hetero_execution) is currently not supported by GNA plugin. ## See Also diff --git a/docs/OV_Runtime_UG/supported_plugins/GPU.md b/docs/OV_Runtime_UG/supported_plugins/GPU.md index 8e55e4f647d..7099ccc307b 100644 --- a/docs/OV_Runtime_UG/supported_plugins/GPU.md +++ b/docs/OV_Runtime_UG/supported_plugins/GPU.md @@ -1,4 +1,4 @@ -# GPU Plugin {#openvino_docs_IE_DG_supported_plugins_GPU} +# GPU device {#openvino_docs_OV_UG_supported_plugins_GPU} @sphinxdirective @@ -6,21 +6,27 @@ :maxdepth: 1 :hidden: - openvino_docs_IE_DG_supported_plugins_GPU_RemoteBlob_API - + openvino_docs_OV_UG_supported_plugins_GPU_RemoteTensor_API @endsphinxdirective -The GPU plugin uses the Intel® Compute Library for Deep Neural Networks (clDNN) to infer deep neural networks. -clDNN is an open source performance library for Deep Learning (DL) applications intended for acceleration of Deep Learning Inference on Intel® Processor Graphics including Intel® HD Graphics, Intel® Iris® Graphics, Intel® Iris® Xe Graphics, and Intel® Iris® Xe MAX graphics. -For an in-depth description of clDNN, see [Inference Engine source files](https://github.com/openvinotoolkit/openvino/tree/master/src/plugins/intel_gpu/) and [Accelerate Deep Learning Inference with Intel® Processor Graphics](https://software.intel.com/en-us/articles/accelerating-deep-learning-inference-with-intel-processor-graphics). +The GPU plugin is OpenCL based plugin for inference of deep neural networks on Intel GPUs including integrated and discrete ones. +For an in-depth description of GPU plugin, see +- [GPU plugin developers documentation](https://github.com/openvinotoolkit/openvino/wiki/GPUPluginDevelopersDocs) +- [OpenVINO Runtime GPU plugin source files](https://github.com/openvinotoolkit/openvino/tree/master/src/plugins/intel_gpu/) +- [Accelerate Deep Learning Inference with Intel® Processor Graphics](https://software.intel.com/en-us/articles/accelerating-deep-learning-inference-with-intel-processor-graphics). + +The GPU plugin is a part of the Intel® Distribution of OpenVINO™ toolkit. + +See [GPU configuration page](@ref openvino_docs_install_guides_configurations_for_intel_gpu) for more details on how to configure machine to use GPU plugin. ## Device Naming Convention -* Devices are enumerated as "GPU.X" where `X={0, 1, 2,...}`. Only Intel® GPU devices are considered. -* If the system has an integrated GPU, it always has id=0 ("GPU.0"). +* Devices are enumerated as `"GPU.X"` where `X={0, 1, 2,...}`. Only Intel® GPU devices are considered. +* If the system has an integrated GPU, it always has id=0 (`"GPU.0"`). * Other GPUs have undefined order that depends on the GPU driver. -* "GPU" is an alias for "GPU.0" +* `"GPU"` is an alias for `"GPU.0"` * If the system doesn't have an integrated GPU, then devices are enumerated starting from 0. +* For GPUs with multi-tile architecture (multiple sub-devices in OpenCL terms) specific tile may be addresed as `"GPU.X.Y"` where `X,Y={0, 1, 2,...}`, `X` - id of the GPU device, `Y` - id of the tile within device `X` For demonstration purposes, see the [Hello Query Device C++ Sample](../../../samples/cpp/hello_query_device/README.md) that can print out the list of available devices with associated indices. Below is an example output (truncated to the device names only): @@ -36,122 +42,191 @@ Available devices: Device: HDDL ``` -## Optimizations +Then device name can be passed to `ov::Core::compile_model()` method: -The plugin supports algorithms that fuse several operations into one optimized operation. Refer to the sections below for details. +@sphinxdirective -> **NOTE**: For operation descriptions, see the [IR Notation Reference](../../ops/opset.md). +.. tab:: Running on default device -### Fusing Convolution and Simple Layers + .. doxygensnippet:: docs/snippets/gpu/compile_model.cpp + :language: cpp + :fragment: [compile_model_default_gpu] -Merge of a Convolution layer and any of the simple layers listed below: -- Activation: ReLU, ELU, Sigmoid, Clamp, and others -- Depthwise: ScaleShift, PReLU -- FakeQuantize +.. tab:: Running on specific GPU -> **NOTE**: You can have any number and order of simple layers. + .. doxygensnippet:: docs/snippets/gpu/compile_model.cpp + :language: cpp + :fragment: [compile_model_gpu_with_id] -A combination of a Convolution layer and simple layers results in a single fused layer called -*Convolution*: -![conv_simple_01] +.. tab:: Running on specific tile + + .. doxygensnippet:: docs/snippets/gpu/compile_model.cpp + :language: cpp + :fragment: [compile_model_gpu_with_id_and_tile] + +@endsphinxdirective + +## Supported inference data types +GPU plugin supports the following data types as inference precision of internal primitives: + +- Floating-point data types: + - f32 + - f16 +- Quantized data types: + - u8 + - i8 + - u1 + +Selected precision of each primitive depends on the operation precision in IR, quantization primitives, and available hardware capabilities. +u1/u8/i8 data types are used for quantized operations only, i.e. those are not selected automatically for non-quantized operations. +See [low-precision optimization guide](@ref pot_docs_LowPrecisionOptimizationGuide) for more details on how to get quantized model. + +Floating-point precision of a GPU primitive is selected based on operation precision in IR except [compressed f16 IR form](../../MO_DG/prepare_model/FP16_Compression.md) which is executed in f16 precision. + +> **NOTE**: Hardware acceleration for i8/u8 precision may be unavailable on some platforms. In that case model is executed in floating-point precision taken from IR. Hardware support of u8/i8 acceleration can be queried via `ov::device::capabilities` property. + +[Hello Query Device C++ Sample](../../../samples/cpp/hello_query_device/README.md) can be used to print out supported data types for all detected devices. + +## Supported features + +### Multi-device execution +If a machine has multiple GPUs (for example integrated GPU and discrete Intel GPU), then any supported model can be executed on all GPUs simultaneously. +This can be achieved by specifying `"MULTI:GPU.1,GPU.0"` as a target device. + +@snippet snippets/gpu/compile_model.cpp compile_model_multi + +See [Multi-device execution page](../multi_device.md) for more details. + +### Automatic batching +GPU plugin is capable of reporting `ov::max_batch_size` and `ov::optimal_batch_size` metrics with respect to the current hardware platform and model, +thus automatic batching is automatically enabled when `ov::optimal_batch_size` is > 1 and `ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)` is set. +Alternatively it can be enabled explicitly via the device notion, e.g. `"BATCH:GPU"`. + +@sphinxdirective + +.. tab:: Batching via BATCH plugin + + .. doxygensnippet:: docs/snippets/gpu/compile_model.cpp + :language: cpp + :fragment: [compile_model_batch_plugin] + +.. tab:: Batching via throughput hint + + .. doxygensnippet:: docs/snippets/gpu/compile_model.cpp + :language: cpp + :fragment: [compile_model_auto_batch] + +@endsphinxdirective + +See [Automatic batching page](../automatic_batching.md) for more details. + +### Multi-stream execution +If either `ov::num_streams(n_streams)` with `n_streams > 1` or `ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)` property is set for GPU plugin, +then multiple streams are created for the model. In case of GPU plugin each stream has its own host thread and associated OpenCL queue +which means that incoming infer requests can be processed simultaneously. + +> **NOTE**: Simultaneous scheduling of kernels to different queues doesn't mean that the kernels are actually executed in parallel on GPU device. The actual behavior depends on the hardware architecture, and in some cases the execution may be serialized inside the GPU driver. + +When multiple inferences of the same model need to be executed in parallel, multi-stream feature is preferrable over multiple instances of the model or application, +since implementation of streams in GPU plugin supports weights memory sharing across streams, thus memory consumption may be less comparing to the other approaches. + +See [optimization guide](@ref openvino_docs_deployment_optimization_guide_dldt_optimization_guide) for more details. + +### Dynamic shapes +GPU plugin supports dynamic shapes for batch dimension only (specified as 'N' in the [layouts terms](../layout_overview.md)) with fixed upper bound. Any other dynamic dimensions are unsupported. Internally GPU plugin creates +`log2(N)` (`N` - is an upper bound for batch dimension here) low-level execution graphs for batch sizes equal to powers of 2 to emulate dynamic behavior, so that incoming infer request with specific batch size is executed via minimal combination of internal networks. +For example, batch size 33 may be executed via 2 internal networks with batch size 32 and 1. + +> **NOTE**: Such approach requires much more memory and overall model compilation time is significantly bigger comparing to static batch scenario. + +The code snippet below demonstrates how to use dynamic batch in simple scenarios: + +@snippet snippets/gpu/dynamic_batch.cpp dynamic_batch + +See [dynamic shapes guide](../ov_dynamic_shapes.md) for more details. + +### Preprocessing acceleration +GPU plugin has the following additional preprocessing options: +- `ov::intel_gpu::memory_type::surface` and `ov::intel_gpu::memory_type::buffer` values for `ov::preprocess::InputTensorInfo::set_memory_type()` preprocessing method. These values are intended to be used to provide a hint for the plugin on the type of input Tensors that will be set in runtime to generate proper kernels. + +@snippet snippets/gpu/preprocessing.cpp init_preproc + +With such preprocessing GPU plugin will expect `ov::intel_gpu::ocl::ClImage2DTensor` (or derived) to be passed for each NV12 plane via `ov::InferRequest::set_tensor()` or `ov::InferRequest::set_tensors()` methods. + +Refer to [RemoteTensor API](./GPU_RemoteTensor_API.md) for usage examples. + +See [preprocessing API guide](../preprocessing_overview.md) for more details. + +### Models caching +Cache for GPU plugin may be enabled via common OpenVINO `ov::cache_dir` property. GPU plugin implementation supports only compiled kernels caching, +thus all plugin specific model transformations are executed on each `ov::Core::compile_model()` call regardless `cache_dir` option, but since +the kernels compilation is a bottleneck in the model loading process, significant load time reduction can be achieved with `ov::cache_dir` property enabled. + +See [Model caching overview page](../Model_caching_overview.md) for more details. + +### Extensibility +See [GPU Extensibility](@ref openvino_docs_Extensibility_UG_GPU) page. + +### GPU context and memory sharing via RemoteTensor API +See [RemoteTensor API of GPU Plugin](GPU_RemoteTensor_API.md). -### Fusing Pooling and FakeQuantize Layers +## Supported properties +The plugin supports the properties listed below. -A combination of Pooling and FakeQuantize layers results in a single fused layer called *Pooling*: -![pooling_fakequant_01] +### Read-write properties +All parameters must be set before calling `ov::Core::compile_model()` in order to take effect or passed as additional argument to `ov::Core::compile_model()` -### Fusing Activation Layers +- ov::cache_dir +- ov::enable_profiling +- ov::hint::model_priority +- ov::hint::performance_mode +- ov::hint::num_requests +- ov::num_streams +- ov::compilation_num_threads +- ov::device::id +- ov::intel_gpu::hint::host_task_priority +- ov::intel_gpu::hint::queue_priority +- ov::intel_gpu::hint::queue_throttle +- ov::intel_gpu::enable_loop_unrolling -Given the linear pattern, an Activation layer can be fused into other layers: +### Read-only properties +- ov::supported_properties +- ov::available_devices +- ov::range_for_async_infer_requests +- ov::range_for_streams +- ov::optimal_batch_size +- ov::max_batch_size +- ov::device::full_name +- ov::device::type +- ov::device::gops +- ov::device::capabilities +- ov::intel_gpu::device_total_mem_size +- ov::intel_gpu::uarch_version +- ov::intel_gpu::execution_units_count +- ov::intel_gpu::memory_statistics -![fullyconnected_activation_01] +## Limitations +In some cases GPU plugin may implicitly execute several primitives on CPU using internal implementations which may lead to increase of CPU utilization. +Below is the list of such operations: +- Proposal +- NonMaxSuppression +- DetectionOutput + +The behavior depends on specific parameters of the operations and hardware configuration. -### Fusing Convolution and Sum Layers +## GPU Performance Checklist: Summary +Since the OpenVINO relies on the OpenCL™ kernels for the GPU implementation. Thus, many general OpenCL tips apply: +- Prefer `FP16` inference precision over `FP32`, as the Model Optimizer can generate both variants and the `FP32` is default. Also, consider [int8 inference](../Int8Inference.md) +- Try to group individual infer jobs by using [automatic batching](../automatic_batching.md) +- Consider [caching](../Model_caching_overview.md) to minimize model load time +- If your application is simultaneously using the inference on the CPU or otherwise loads the host heavily, make sure that the OpenCL driver threads do not starve. You can use [CPU configuration options](./CPU.md) to limit number of inference threads for the CPU plugin. +- Even in the GPU-only scenario, a GPU driver might occupy a CPU core with spin-looped polling for completion. If the _CPU_ utilization is a concern, consider the dedicated referenced in this document. Notice that this option might increase the inference latency, so consider combining with multiple GPU streams or [throughput performance hints](../performance_hints.md). +- When operating media inputs consider [remote tensors API of the GPU Plugin](./GPU_RemoteTensor_API.md). -A combination of Convolution, Simple, and Eltwise layers with the sum operation results in a single layer called *Convolution*: -![conv_sum_relu_01] - -### Fusing a Group of Convolutions - -If a topology contains the following pipeline, a GPU plugin merges Split, Convolution, and Concatenation layers into a single Convolution layer with the group parameter: -> **NOTE**: Parameters of the Convolution layers must coincide. - -![group_convolutions_01] - -### Optimizing Layers Out - -The following layers are optimized out under certain conditions: - * Crop - * Concatenate - * Reshape - * Flatten - * Split - * Copy - -### Load-Time Execution - -Some layers are executed during the load time, not during the inference. One of such layers is PriorBox. - - -## CPU Executed Layers - -The following layers are not accelerated on the GPU and executed on the host CPU instead: -* Proposal -* NonMaxSuppression -* PriorBox -* DetectionOutput - -## Supported Configuration Parameters - -The plugin supports the configuration parameters listed below. -All parameters must be set before calling InferenceEngine::Core::LoadNetwork() in order to take effect. -When specifying key values as raw strings (that is, when using Python API), omit the `KEY_` prefix. - -| Parameter Name | Parameter Values | Default | Description | -|---------------------|-----------------------------|-----------------|-----------------------------------------------------------| -| `KEY_CACHE_DIR` | `""` | `""` | Specifies a directory where compiled OCL binaries can be cached. First model loading generates the cache, and all subsequent LoadNetwork calls use precompiled kernels which significantly improves load time. If empty - caching is disabled | -| `KEY_PERF_COUNT` | `YES` / `NO` | `NO` | Collect performance counters during inference | -| `KEY_CONFIG_FILE` | `" [ ...]"` | `""` | Load custom layer configuration files | -| `KEY_GPU_HOST_`
`TASK_PRIORITY` | `GPU_HOST_TASK_PRIORITY_` | `GPU_HOST_TASK_PRIORITY_MEDIUM` | This key instructs the GPU plugin which cpu core type of TBB affinity used in load network.
This option has 3 types of levels: HIGH, LOW, and ANY. It is only affected on Hybrid CPUs.
- LOW - instructs the GPU Plugin to use LITTLE cores if they are available
- MEDIUM (DEFAULT) - instructs the GPU Plugin to use any available cores (BIG or LITTLE cores)
- HIGH - instructs the GPU Plugin to use BIG cores if they are available | -| `KEY_GPU_PLUGIN_`
`PRIORITY` | `<0-3>` | `0` | OpenCL queue priority (before usage, make sure your OpenCL driver supports appropriate extension)
Higher value means higher priority for OpenCL queue. 0 disables the setting. **Deprecated**. Please use KEY_GPU_MODEL_PRIORITY | -| `KEY_GPU_PLUGIN_`
`THROTTLE` | `<0-3>` | `2` | OpenCL queue throttling (before usage, make sure your OpenCL driver supports appropriate extension)
Lower value means lower driver thread priority and longer sleep time for it. Has no effect if the driver does not support reqired hint. | -| `KEY_CLDNN_ENABLE_`
`FP16_FOR_QUANTIZED_`
`MODELS` | `YES` / `NO` | `YES` | Allows using FP16+INT8 mixed precision mode, so non-quantized parts of a model will be executed in FP16 precision for FP16 IR. Does not affect quantized FP32 IRs | -| `KEY_GPU_NV12_`
`TWO_INPUTS` | `YES` / `NO` | `NO` | Controls preprocessing logic for nv12 input. If it's set to YES, then device graph will expect that user will set biplanar nv12 blob as input wich will be directly passed to device execution graph. Otherwise, preprocessing via GAPI is used to convert NV12->BGR, thus GPU graph have to expect single input | -| `KEY_GPU_THROUGHPUT_`
`STREAMS` | `KEY_GPU_THROUGHPUT_AUTO`, or positive integer| 1 | Specifies a number of GPU "execution" streams for the throughput mode (upper bound for a number of inference requests that can be executed simultaneously).
This option is can be used to decrease GPU stall time by providing more effective load from several streams. Increasing the number of streams usually is more effective for smaller topologies or smaller input sizes. Note that your application should provide enough parallel slack (e.g. running many inference requests) to leverage full GPU bandwidth. Additional streams consume several times more GPU memory, so make sure the system has enough memory available to suit parallel stream execution. Multiple streams might also put additional load on CPU. If CPU load increases, it can be regulated by setting an appropriate `KEY_GPU_PLUGIN_THROTTLE` option value (see above). If your target system has relatively weak CPU, keep throttling low.
The default value is 1, which implies latency-oriented behavior.
`KEY_GPU_THROUGHPUT_AUTO` creates bare minimum of streams to improve the performance; this is the most portable option if you are not sure how many resources your target machine has (and what would be the optimal number of streams).
A positive integer value creates the requested number of streams. | -| `KEY_EXCLUSIVE_ASYNC_`
`REQUESTS` | `YES` / `NO` | `NO` | Forces async requests (also from different executable networks) to execute serially.| -| `KEY_GPU_MAX_NUM_`
`THREADS` | `integer value` | `maximum # of HW threads available in host environment` | Specifies the number of CPU threads that can be used for GPU engine, e.g, JIT compilation of GPU kernels or cpu kernel processing within GPU plugin. The default value is set as the number of maximum available threads in host environment to minimize the time for LoadNetwork, where the GPU kernel build time occupies a large portion. Note that if the specified value is larger than the maximum available # of threads or less than zero, it is set as maximum available # of threads. It can be specified with a smaller number than the available HW threads according to the usage scenario, e.g., when the user wants to assign more CPU threads while GPU plugin is running. Note that setting this value with lower number will affect not only the network loading time but also the cpu layers of GPU networks that are optimized with multi-threading. | -| `KEY_GPU_ENABLE_`
`LOOP_UNROLLING` | `YES` / `NO` | `YES` | Enables recurrent layers such as TensorIterator or Loop with fixed iteration count to be unrolled. It is turned on by default. Turning this key on will achieve better inference performance for loops with not too many iteration counts (less than 16, as a rule of thumb). Turning this key off will achieve better performance for both graph loading time and inference time with many iteration counts (greater than 16). Note that turning this key on will increase the graph loading time in proportion to the iteration counts. Thus, this key should be turned off if graph loading time is considered to be most important target to optimize. | -| `KEY_CLDNN_PLUGIN_`
`PRIORITY` | `<0-3>` | `0` | OpenCL queue priority (before usage, make sure your OpenCL driver supports appropriate extension)
Higher value means higher priority for OpenCL queue. 0 disables the setting. **Deprecated**. Please use KEY_GPU_MODEL_PRIORITY | -| `KEY_CLDNN_PLUGIN_`
`THROTTLE` | `<0-3>` | `0` | OpenCL queue throttling (before usage, make sure your OpenCL driver supports appropriate extension)
Lower value means lower driver thread priority and longer sleep time for it. 0 disables the setting. **Deprecated**. Please use KEY_GPU_PLUGIN_THROTTLE | -| `KEY_CLDNN_GRAPH_`
`DUMPS_DIR` | `""` | `""` | clDNN graph optimizer stages dump output directory (in GraphViz format) **Deprecated**. Will be removed in the next release | -| `KEY_CLDNN_SOURCES_`
`DUMPS_DIR` | `""` | `""` | Final optimized clDNN OpenCL sources dump output directory. **Deprecated**. Will be removed in the next release | -| `KEY_DUMP_KERNELS` | `YES` / `NO` | `NO` | Dump the final kernels used for custom layers. **Deprecated**. Will be removed in the next release | -| `KEY_TUNING_MODE` | `TUNING_DISABLED`
`TUNING_CREATE`
`TUNING_USE_EXISTING` | `TUNING_DISABLED` | Disable inference kernel tuning
Create tuning file (expect much longer runtime)
Use an existing tuning file. **Deprecated**. Will be removed in the next release | -| `KEY_TUNING_FILE` | `""` | `""` | Tuning file to create / use. **Deprecated**. Will be removed in the next release | - -## Quering GPU specific metric keys -* MEMORY_STATISTICS : Returns overall memory statistics of `GPU` device allocated by engine with allocation types. If the network has `TensorIterator` or `Loop` operation which is not unrolled, there will be additional allocation at the first inference phase. In such a case, querying for `MEMORY_STATISTICS` should be done after first inference for more accurate result. The code below demonstrates how to query overall memory statistics of `GPU` device: - -@snippet snippets/GPU_Metric0.cpp part0 - -* MAX_BATCH_SIZE : Returns maximum batch size for a given network which is not only executable but also does not lose performance due to the memory swap impact. Note that the returned value may not aligned to power of 2. Also, MODEL_PTR is the required option for this metric since the available max batch size depends on the model size. If the MODEL_PTR is not given, it will return 1. The example code to set the required and optional configs for this metic is available in the following snippet: - -@snippet snippets/GPU_Metric1.cpp part1 - -* OPTIMAL_BATCH_SIZE : Returns _optimal_ batch size for a given network on the given GPU device. The returned value is aligned to power of 2. Also, MODEL_PTR is the required option for this metric since the optimal batch size highly depends on the model. If the MODEL_PTR is not given, the value of 1 is returned. The example code to set the required and optional configs for this metric is available in the following snippet: - -@snippet snippets/GPU_Metric1.cpp part2 -## GPU Context and Video Memory Sharing RemoteBlob API - -See [RemoteBlob API of GPU Plugin](GPU_RemoteBlob_API.md) ## See Also * [Supported Devices](Supported_Devices.md) - -[conv_simple_01]: ../img/conv_simple_01.png -[pooling_fakequant_01]: ../img/pooling_fakequant_01.png -[fullyconnected_activation_01]: ../img/fullyconnected_activation_01.png -[group_convolutions_01]: ../img/group_convolutions_01.png -[conv_sum_relu_01]: ../img/conv_sum_relu_01.png +* [Optimization guide](@ref openvino_docs_optimization_guide_dldt_optimization_guide) +* [GPU plugin developers documentation](https://github.com/openvinotoolkit/openvino/wiki/GPUPluginDevelopersDocs) diff --git a/docs/OV_Runtime_UG/supported_plugins/GPU_RemoteBlob_API.md b/docs/OV_Runtime_UG/supported_plugins/GPU_RemoteBlob_API.md deleted file mode 100644 index a5232b580d9..00000000000 --- a/docs/OV_Runtime_UG/supported_plugins/GPU_RemoteBlob_API.md +++ /dev/null @@ -1,141 +0,0 @@ -Remote Blob API of GPU Plugin {#openvino_docs_IE_DG_supported_plugins_GPU_RemoteBlob_API} -================================ - -The GPU plugin implementation of the `RemoteContext` and `RemoteBlob` interfaces supports GPU -pipeline developers who need video memory sharing and interoperability with existing native APIs -such as OpenCL\*, Microsoft DirectX\*, or VAAPI\*. -Using these interfaces allows you to avoid any memory copy overhead when plugging the OpenVINO™ inference -into an existing GPU pipeline. It also enables OpenCL kernels participating in the pipeline to become -native buffer consumers or producers of the OpenVINO™ inference. -Since the GPU plugin works on top of the clDNN library, the functionality above is also implemented -using OpenCL and its sharing extensions provided by Intel®. - -There are two interoperability scenarios supported by the Remote Blob API: - -* GPU plugin context and memory objects can be constructed from low-level device, display, or memory -handles and used to create the OpenVINO™ `ExecutableNetwork` or `Blob` class. -* OpenCL context or buffer handles can be obtained from existing GPU plugin objects, and used in OpenCL processing. - -Class and function declarations for the API are defined in the following files: -* Windows\*: `gpu/gpu_context_api_ocl.hpp` and `gpu/gpu_context_api_dx.hpp` -* Linux\*: `gpu/gpu_context_api_ocl.hpp` and `gpu/gpu_context_api_va.hpp` - -The most common way to enable the interaction of your application with the Remote Blob API is to use user-side utility classes -and functions that consume or produce native handles directly. - -## Execution Context User-Side Wrappers - -GPU plugin classes that implement the `RemoteContext` interface are responsible for context sharing. -Obtaining a pointer to a context object is the first step of sharing pipeline objects. -The context object of the GPU plugin directly wraps OpenCL context, setting a scope for sharing -`ExecutableNetwork` and `RemoteBlob` objects. -To create such objects within user context, explicitly provide the context to the plugin using the -`make_shared_context()` overloaded function. Depending on the platform, the function accepts the -`cl_context` handle, the pointer to the `ID3D11Device` interface, or the `VADisplay` handle, and -returns a smart pointer to the `RemoteContext` plugin object. - -If you do not provide any user context, the plugin uses its default internal context. -The plugin attempts to use the same internal context object as long as plugin options are kept the same. -Therefore, all ExecutableNetwork objects created during this time share the same context. -Once the plugin options are changed, the internal context is replaced by the new one. - -To request the current default context of the plugin, call the `GetDefaultContext()` method of the core engine. -To request the internal context of the given `ExecutableNetwork`, use the `GetContext()` method. - -## Shared Blob User-Side Wrappers - -The classes that implement the `RemoteBlob` interface are both wrappers for native API -memory handles (which can be obtained from them at any time) and act just like regular OpenVINO™ -`Blob` objects. - -Once you obtain the context, you can use it to compile a new `ExecutableNetwork` or create `RemoteBlob` -objects. -For network compilation, use a dedicated flavor of `LoadNetwork()`, which accepts the context as an -additional parameter. - -To create a shared blob from a native memory handle, use `make_shared_blob()` overloaded functions -that can accept the `cl::Buffer`, `cl::Image2D`, `cl_mem` handles, and either `ID3D11Buffer`, -`ID3D11Texture2D` pointers or the `VASurfaceID` handle. -All `make_shared_blob()` flavors return a smart pointer to the `Blob` object, which can be directly -passed to the `SetBlob() `method of an inference request object. - -## Direct NV12 video surface input - -To support the direct consumption of a hardware video decoder output, plugin accepts two-plane video -surfaces as arguments for the `make_shared_blob_nv12()` function, which creates an `NV12Blob` object -and returns a smart pointer to it, which is cast to `Blob::Ptr`. - -To ensure that the plugin generates the correct execution graph for the NV12 dual-plane input, set -the `CLDNNConfigParams::KEY_CLDNN_NV12_TWO_INPUTS` plugin configuration flag to `PluginConfigParams::YES`. - -## Context & queue sharing - -GPU plugin supports creation of shared context from `cl_command_queue` handle. In that case -opencl context handle is extracted from given queue via OpenCL™ API, and the queue itself is used inside -the plugin for further execution of inference primitives. Sharing of the queue changes behavior of `StartAsync()` -method to guarantee that submission of inference primitives into given queue is finished before -returning of control back to calling thread. - -This sharing mechanism allows to do pipeline synchronization on app side and avoid blocking of host thread -on waiting for completion of inference. Pseudocode may look as follows: - -@snippet snippets/GPU_RemoteBlob_API3.cpp part0 - -### Limitations - - - Some primitives in GPU plugin may block host thread on waiting for previous primitives before adding its kernels - to the command queue. In such cases `StartAsync()` call takes much more time to return control to the calling thread - as internally it waits for partial or full network completion. - Examples of operations: Loop, TensorIterator, DetectionOutput, NonMaxSuppression - - Synchronization of pre/post processing jobs and inference pipeline inside shared queue is the user responsibility - - Throughput mode is not available when queue sharing is used, i.e. only single stream can be used for each executable network. - -## Low-Level Methods and Their Parameter Description - -The high-level wrappers above bring a direct dependency on native APIs to the user program. -If you want to avoid the dependency, you still can directly use the `CreateContext()`, -`CreateBlob()`, and `getParams()` methods. -On this level, native handles are re-interpreted as void pointers and all arguments are passed -using `std::map` containers that are filled with `std::string, InferenceEngine::Parameter` pairs. -Two types of map entries are possible: descriptor and container. The first map entry is a -descriptor, which sets the expected structure and possible parameter values of the map. - -**Parameter Map Entries** - -| Key Name | Description and Possible Parameter Values | -|----------------|---------------------------------------------------------------------| -| `CONTEXT_TYPE` | Describes the type of the shared context in a map. Can be `OCL` (for pure OpenCL context) or `VA_SHARED` (for context shared with a video decoding device). | -| `OCL_CONTEXT` | Contains the OpenCL context handle. | -| `OCL_QUEUE` | Contains the OpenCL queue handle if queue sharing is needed. | -| `VA_DEVICE` | Contains the native video decoding device handle. Can be `VADisplay` or `ID3D11Device` (a pointer). | -| `SHARED_MEM_TYPE` | Describes the type of the shared memory buffer in a map. Can be `OCL_BUFFER` (clBuffer), `OCL_IMAGE2D` (clImage2D), `VA_SURFACE()`, or `DX_BUFFER`. | -| `MEM_HANDLE` | Contains the OpenCL memory handle. | -| `DEV_OBJECT_HANDLE` | Contains the native video decoder surface handle. | -| `VA_PLANE` | Contains the NV12 video decoder surface plane index. Can be `0` or `1`. | - -> **NOTE**: To initialize the entry key and value, use the `GPU_PARAM_KEY()` or `GPU_PARAM_VALUE()` macro. - -## Examples - -Refer to the sections below to see pseudo-code of usage examples. - -> **NOTE**: For low-level parameter usage examples, see the source code of user-side wrappers from the include files mentioned above. - -### OpenCL Kernel Execution on a Shared Buffer - -This example uses the OpenCL context obtained from an executable network object. - -@snippet snippets/GPU_RemoteBlob_API0.cpp part0 - -### Running GPU Plugin Inference within User-Supplied Shared Context - -@snippet snippets/GPU_RemoteBlob_API1.cpp part1 - -### Direct Consuming of the NV12 VAAPI Video Decoder Surface on Linux - -@snippet snippets/GPU_RemoteBlob_API2.cpp part2 - -## See Also - -* InferenceEngine::Core -* InferenceEngine::RemoteBlob diff --git a/docs/OV_Runtime_UG/supported_plugins/GPU_RemoteTensor_API.md b/docs/OV_Runtime_UG/supported_plugins/GPU_RemoteTensor_API.md new file mode 100644 index 00000000000..638f4735293 --- /dev/null +++ b/docs/OV_Runtime_UG/supported_plugins/GPU_RemoteTensor_API.md @@ -0,0 +1,324 @@ +Remote Tensor API of GPU Plugin {#openvino_docs_OV_UG_supported_plugins_GPU_RemoteTensor_API} +================================ + +The GPU plugin implementation of the `ov::RemoteContext` and `ov::RemoteTensor` interfaces supports GPU +pipeline developers who need video memory sharing and interoperability with existing native APIs +such as OpenCL\*, Microsoft DirectX\*, or VAAPI\*. +Using of these interfaces allows you to avoid any memory copy overhead when plugging the OpenVINO™ inference +into an existing GPU pipeline. It also enables OpenCL kernels participating in the pipeline to become +native buffer consumers or producers of the OpenVINO™ inference. + +There are two interoperability scenarios supported by the Remote Tensor API: + +* GPU plugin context and memory objects can be constructed from low-level device, display, or memory +handles and used to create the OpenVINO™ `ov::CompiledModel` or `ov::Tensor` objects. +* OpenCL context or buffer handles can be obtained from existing GPU plugin objects, and used in OpenCL processing on the application side. + +Class and function declarations for the API are defined in the following files: +* Windows\*: `openvino/runtime/intel_gpu/ocl/ocl.hpp` and `openvino/runtime/intel_gpu/ocl/dx.hpp` +* Linux\*: `openvino/runtime/intel_gpu/ocl/ocl.hpp` and `openvino/runtime/intel_gpu/ocl/va.hpp` + +The most common way to enable the interaction of your application with the Remote Tensor API is to use user-side utility classes +and functions that consume or produce native handles directly. + +## Context sharing between application and GPU plugin + +GPU plugin classes that implement the `ov::RemoteContext` interface are responsible for context sharing. +Obtaining a context object is the first step of sharing pipeline objects. +The context object of the GPU plugin directly wraps OpenCL context, setting a scope for sharing +`ov::CompiledModel` and `ov::RemoteTensor` objects. `ov::RemoteContext` object can be either created on top ov +existing handle from native api or retrieved from GPU plugin. + +Once you obtain the context, you can use it to compile a new `ov::CompiledModel` or create `ov::RemoteTensor` +objects. +For network compilation, use a dedicated flavor of `ov::Core::compile_model()`, which accepts the context as an +additional parameter. + +### Creation of RemoteContext from native handle +To create `ov::RemoteContext` object for user context, explicitly provide the context to the plugin using constructor for one +of `ov::RemoteContext` derived classes. + +@sphinxdirective + +.. tab:: Linux + + .. tab:: Create from cl_context + + .. doxygensnippet:: docs/snippets/gpu/remote_objects_creation.cpp + :language: cpp + :fragment: [context_from_cl_context] + + .. tab:: Create from cl_queue + + .. doxygensnippet:: docs/snippets/gpu/remote_objects_creation.cpp + :language: cpp + :fragment: [context_from_cl_queue] + + .. tab:: Create from VADisplay + + .. doxygensnippet:: docs/snippets/gpu/remote_objects_creation.cpp + :language: cpp + :fragment: [context_from_va_display] + +.. tab:: Windows + + .. tab:: Create from cl_context + + .. doxygensnippet:: docs/snippets/gpu/remote_objects_creation.cpp + :language: cpp + :fragment: [context_from_cl_context] + + .. tab:: Create from cl_queue + + .. doxygensnippet:: docs/snippets/gpu/remote_objects_creation.cpp + :language: cpp + :fragment: [context_from_cl_queue] + + .. tab:: Create from ID3D11Device + + .. doxygensnippet:: docs/snippets/gpu/remote_objects_creation.cpp + :language: cpp + :fragment: [context_from_d3d_device] + +@endsphinxdirective + + +### Getting RemoteContext from the plugin +If you do not provide any user context, the plugin uses its default internal context. +The plugin attempts to use the same internal context object as long as plugin options are kept the same. +Therefore, all `ov::CompiledModel` objects created during this time share the same context. +Once the plugin options are changed, the internal context is replaced by the new one. + +To request the current default context of the plugin use one of the following methods: + +@sphinxdirective + +.. tab:: Get context from Core + + .. doxygensnippet:: docs/snippets/gpu/remote_objects_creation.cpp + :language: cpp + :fragment: [default_context_from_core] + +.. tab:: Get context from CompiledModel + + .. doxygensnippet:: docs/snippets/gpu/remote_objects_creation.cpp + :language: cpp + :fragment: [default_context_from_model] + + +@endsphinxdirective + +## Memory sharing between application and GPU plugin + +The classes that implement the `ov::RemoteTensor` interface are the wrappers for native API +memory handles (which can be obtained from them at any time). + +To create a shared tensor from a native memory handle, use dedicated `create_tensor`or `create_tensor_nv12` methods +of the `ov::RemoteContext` sub-classes. +`ov::intel_gpu::ocl::ClContext` has multiple overloads of `create_tensor` methods which allow to wrap pre-allocated native handles with `ov::RemoteTensor` +object or request plugin to allocate specific device memory. See code snippets below for more details. + +@sphinxdirective + +.. tab:: Wrap native handles + + .. tab:: USM pointer + + .. doxygensnippet:: docs/snippets/gpu/remote_objects_creation.cpp + :language: cpp + :fragment: [wrap_usm_pointer] + + .. tab:: cl_mem + + .. doxygensnippet:: docs/snippets/gpu/remote_objects_creation.cpp + :language: cpp + :fragment: [wrap_cl_mem] + + .. tab:: cl::Buffer + + .. doxygensnippet:: docs/snippets/gpu/remote_objects_creation.cpp + :language: cpp + :fragment: [wrap_cl_buffer] + + .. tab:: cl::Image2D + + .. doxygensnippet:: docs/snippets/gpu/remote_objects_creation.cpp + :language: cpp + :fragment: [wrap_cl_image] + + .. tab:: biplanar NV12 surface + + .. doxygensnippet:: docs/snippets/gpu/remote_objects_creation.cpp + :language: cpp + :fragment: [wrap_nv12_surface] + +.. tab:: Allocate device memory + + .. tab:: USM host memory + + .. doxygensnippet:: docs/snippets/gpu/remote_objects_creation.cpp + :language: cpp + :fragment: [allocate_usm_host] + + .. tab:: USM device memory + + .. doxygensnippet:: docs/snippets/gpu/remote_objects_creation.cpp + :language: cpp + :fragment: [allocate_usm_device] + + .. tab:: cl::Buffer + + .. doxygensnippet:: docs/snippets/gpu/remote_objects_creation.cpp + :language: cpp + :fragment: [allocate_cl_buffer] + +@endsphinxdirective + +`ov::intel_gpu::ocl::D3DContext` and `ov::intel_gpu::ocl::VAContext` classes are derived from `ov::intel_gpu::ocl::ClContext`, +thus they provide functionality described above and extends it +to allow creation of `ov::RemoteTensor` objects from `ID3D11Buffer`, `ID3D11Texture2D` pointers or the `VASurfaceID` handle respectively. + +## Direct NV12 video surface input + +To support the direct consumption of a hardware video decoder output, plugin accepts two-plane video +surfaces as arguments for the `create_tensor_nv12()` function, which creates a pair or `ov::RemoteTensor` +objects which represents Y and UV planes. + +To ensure that the plugin generates the correct execution graph for the NV12 dual-plane input, static preprocessing +should be added before model compilation: + +@snippet snippets/gpu/preprocessing.cpp init_preproc + +Since `ov::intel_gpu::ocl::ClImage2DTensor` (and derived classes) doesn't support batched surfaces, in cases when batching and surface sharing are required +at the same time, user need to set inputs via `ov::InferRequest::set_tensors` method with vector of shared surfaces for each plane: + +@sphinxdirective + +.. tab:: Single batch + + .. doxygensnippet:: docs/snippets/gpu/preprocessing.cpp + :language: cpp + :fragment: [single_batch] + +.. tab:: Multiple batches + + .. doxygensnippet:: docs/snippets/gpu/preprocessing.cpp + :language: cpp + :fragment: [batched_case] + + +@endsphinxdirective + +I420 color format can be processed in similar way + +## Context & queue sharing + +GPU plugin supports creation of shared context from `cl_command_queue` handle. In that case +opencl context handle is extracted from given queue via OpenCL™ API, and the queue itself is used inside +the plugin for further execution of inference primitives. Sharing of the queue changes behavior of `ov::InferRequest::start_async()` +method to guarantee that submission of inference primitives into given queue is finished before +returning of control back to calling thread. + +This sharing mechanism allows to do pipeline synchronization on app side and avoid blocking of host thread +on waiting for completion of inference. Pseudocode may look as follows: + +@sphinxdirective +.. raw:: html + +
+ +@endsphinxdirective + +@snippet snippets/gpu/queue_sharing.cpp queue_sharing + +@sphinxdirective +.. raw:: html + +
+ +@endsphinxdirective + +### Limitations + + - Some primitives in GPU plugin may block host thread on waiting for previous primitives before adding its kernels + to the command queue. In such cases `ov::InferRequest::start_async()` call takes much more time to return control to the calling thread + as internally it waits for partial or full network completion. + Examples of operations: Loop, TensorIterator, DetectionOutput, NonMaxSuppression + - Synchronization of pre/post processing jobs and inference pipeline inside shared queue is the user responsibility + - Throughput mode is not available when queue sharing is used, i.e. only single stream can be used for each compiled model. + +## Low-Level Methods for RemoteContext and RemoteTensor creation + +The high-level wrappers above bring a direct dependency on native APIs to the user program. +If you want to avoid the dependency, you still can directly use the `ov::Core::create_context()`, +`ov::RemoteContext::create_tensor()`, and `ov::RemoteContext::get_params()` methods. +On this level, native handles are re-interpreted as void pointers and all arguments are passed +using `ov::AnyMap` containers that are filled with `std::string, ov::Any` pairs. +Two types of map entries are possible: descriptor and container. The first map entry is a +descriptor, which sets the expected structure and possible parameter values of the map. + +Refer to `openvino/runtime/intel_gpu/remote_properties.hpp` header file for possible low-level properties and their description. + +## Examples + +Refer to the sections below to see pseudo-code of usage examples. + +> **NOTE**: For low-level parameter usage examples, see the source code of user-side wrappers from the include files mentioned above. + + +@sphinxdirective +.. raw:: html + +
+ +@endsphinxdirective + +This example uses the OpenCL context obtained from an compiled model object. + +@snippet snippets/gpu/context_sharing.cpp context_sharing_get_from_ov + +@sphinxdirective +.. raw:: html + +
+ +@endsphinxdirective + + +@sphinxdirective +.. raw:: html + +
+ +@endsphinxdirective + +@snippet snippets/gpu/context_sharing.cpp context_sharing_user_handle + +@sphinxdirective +.. raw:: html + +
+ +@endsphinxdirective + + +@sphinxdirective +.. raw:: html + +
+ +@endsphinxdirective + +@snippet snippets/gpu/context_sharing_va.cpp context_sharing_va + +@sphinxdirective +.. raw:: html + +
+ +@endsphinxdirective + +## See Also + +* ov::Core +* ov::RemoteTensor diff --git a/docs/OV_Runtime_UG/supported_plugins/HDDL.md b/docs/OV_Runtime_UG/supported_plugins/HDDL.md index 6d7784aa9ad..bb05719a223 100644 --- a/docs/OV_Runtime_UG/supported_plugins/HDDL.md +++ b/docs/OV_Runtime_UG/supported_plugins/HDDL.md @@ -1,13 +1,17 @@ -# HDDL Plugin {#openvino_docs_IE_DG_supported_plugins_HDDL} +# HDDL device {#openvino_docs_OV_UG_supported_plugins_HDDL} ## Introducing the HDDL Plugin -The Inference Engine HDDL plugin was developed for inference with neural networks on Intel® Vision Accelerator Design with Intel® Movidius™ VPUs. It is designed for use cases that require large throughput for deep learning inference, up to dozens of times more than the MYRIAD Plugin. +The OpenVINO Runtime HDDL plugin was developed for inference with neural networks on Intel® Vision Accelerator Design with Intel® Movidius™ VPUs. It is designed for use cases that require large throughput for deep learning inference, up to dozens of times more than the MYRIAD Plugin. ## Configuring the HDDL Plugin To configure your Intel® Vision Accelerator Design With Intel® Movidius™ on supported operating systems, refer to the Steps for Intel® Vision Accelerator Design with Intel® Movidius™ VPUs section in the installation guides for [Linux](../../install_guides/installing-openvino-linux.md) or [Windows](../../install_guides/installing-openvino-windows.md). +> **NOTE**: The HDDL and Myriad plugins may cause conflicts when used at the same time. +> To ensure proper operation in such a case, the number of booted devices needs to be limited in the 'hddl_autoboot.config' file. +> Otherwise, the HDDL plugin will boot all available Intel® Movidius™ Myriad™ X devices. + ## Supported networks To see the list of supported networks for the HDDL plugin, refer to the list on the [MYRIAD Plugin page](MYRIAD.md). diff --git a/docs/OV_Runtime_UG/supported_plugins/HETERO.md b/docs/OV_Runtime_UG/supported_plugins/HETERO.md deleted file mode 100644 index 85cf286df95..00000000000 --- a/docs/OV_Runtime_UG/supported_plugins/HETERO.md +++ /dev/null @@ -1,256 +0,0 @@ -# Heterogeneous Plugin {#openvino_docs_IE_DG_supported_plugins_HETERO} - -## Introducing the Heterogeneous Plugin (C++) - -@sphinxdirective -.. raw:: html - -
C++
-@endsphinxdirective - -The heterogeneous plugin enables computing the inference of one network on several devices. The purposes of executing networks in heterogeneous mode are to: - -* Utilize the power of accelerators to process the heaviest parts of the network and to execute unsupported layers on fallback devices like the CPU -* Utilize all available hardware more efficiently during one inference - -The execution through heterogeneous plugin can be divided into two independent steps: - -1. Setting of hardware affinity to layers -2. Loading a network to the Heterogeneous plugin, splitting the network to parts, and executing them through the plugin - -These steps are decoupled. The setting of affinity can be done automatically using the fallback policy or in manual mode. - -The fallback automatic policy causes "greedy" behavior and assigns all layers that can be executed on certain device according to the priorities you specify (for example, HETERO:GPU,CPU). -Automatic policy does not take into account plugin peculiarities such as the inability to infer some layers without other special layers placed before or after that layer. The plugin is responsible for solving such cases. If the device plugin does not support the subgraph topology constructed by the HETERO plugin, then you should set affinity manually. - -### Details of Splitting Network and Execution -During loading of the network to the Heterogeneous plugin, the network is divided into separate parts and loaded to dedicated plugins. -Intermediate blobs between these subgraphs are allocated automatically in the most efficient way. - -### Sample Usage - -Inference Engine sample programs can use the Heterogeneous plugin used with the `-d` option: - -```sh -./hello_classification /squeezenet1.1.xml /picture.jpg HETERO:GPU,CPU -``` -where: -- `HETERO` stands for the Heterogeneous plugin -- `GPU,CPU` points to fallback policy with priority on GPU and fallback to CPU - -You can point more than two devices: `-d HETERO:MYRIAD,GPU,CPU` - - -### Annotation of Layers per Device and Default Fallback Policy - -Default fallback policy decides which layer goes to which device automatically according to the support in dedicated plugins (GPU, CPU, MYRIAD). - -Another way to annotate a network is to set affinity manually using `ngraph::Node::get_rt_info` with key `affinity`: - -@snippet snippets/HETERO0.cpp part0 - -The fallback policy does not work if even one layer has an initialized affinity. The sequence should be to call automating affinity settings and then fix manually. - -> **NOTE**: If you set affinity manually, be careful because currently Inference Engine plugins don't support constant (`Constant`->`Result`) and empty (`Parameter`->`Result`) networks. Please avoid such subgraphs when you set affinity manually. - -@snippet snippets/HETERO1.cpp part1 - -If you rely on the default affinity distribution, you can avoid calling InferenceEngine::Core::QueryNetwork and just call InferenceEngine::Core::LoadNetwork instead: - -@snippet snippets/HETERO2.cpp part2 - -> **NOTE**: `InferenceEngine::Core::QueryNetwork` does not depend on affinities set by a user. Instead, it queries for layer support based on device capabilities. - -### Handling Difficult Topologies - -Some topologies are not friendly to heterogeneous execution on some devices or cannot be executed at all with this plugin -Examples are networks having activation layers that are not supported on the primary device. -If transmitting data from one part of a network to another part in heterogeneous mode takes more time than in normal mode, it may not make sense to execute them in heterogeneous mode. -In this case, you can define the heaviest part manually and set the affinity to avoid sending data back and forth many times during one inference. - -### Execution Precision -Precision for inference in the heterogeneous plugin is defined by: -* Precision of IR -* Ability of final plugins to execute in precision defined in IR - -For example, if you want to execute GPU with CPU fallback with FP16 on GPU, you need to use only FP16 IR. - -### Analyzing Performance Heterogeneous Execution -After enabling the KEY_HETERO_DUMP_GRAPH_DOT config key (shown in code snippet below), you can dump GraphViz* `.dot` files with annotations of devices per layer. - -The Heterogeneous plugin can generate two files: - -* `hetero_affinity_.dot` - annotation of affinities per layer. This file is written to the disk only if default fallback policy was executed -* `hetero_subgraphs_.dot` - annotation of affinities per graph. This file is written to the disk during execution of `ICNNNetwork::LoadNetwork()` for the Heterogeneous plugin - -@snippet snippets/HETERO3.cpp part3 - -You can use the GraphViz* utility or a file converter to view the images. On the Ubuntu* operating system, you can use xdot: - -* `sudo apt-get install xdot` -* `xdot hetero_subgraphs.dot` - -You can use performance data (in sample applications, it is the option `-pc`) to get the performance data on each subgraph. - -Here is an example of the output for Googlenet v1 running on HDDL with fallback to CPU: - -``` -subgraph1: 1. input preprocessing (mean data/HDDL):EXECUTED layerType: realTime: 129 cpu: 129 execType: -subgraph1: 2. input transfer to DDR:EXECUTED layerType: realTime: 201 cpu: 0 execType: -subgraph1: 3. HDDL execute time:EXECUTED layerType: realTime: 3808 cpu: 0 execType: -subgraph1: 4. output transfer from DDR:EXECUTED layerType: realTime: 55 cpu: 0 execType: -subgraph1: 5. HDDL output postprocessing:EXECUTED layerType: realTime: 7 cpu: 7 execType: -subgraph1: 6. copy to IE blob:EXECUTED layerType: realTime: 2 cpu: 2 execType: -subgraph2: out_prob: NOT_RUN layerType: Output realTime: 0 cpu: 0 execType: unknown -subgraph2: prob: EXECUTED layerType: SoftMax realTime: 10 cpu: 10 execType: ref -Total time: 4212 microseconds -``` -### See Also -[Supported Devices](Supported_Devices.md) - -## Introducing the Heterogeneous Plugin (Python) - -@sphinxdirective -.. raw:: html - -
Python
-@endsphinxdirective - -The heterogeneous plugin enables computing the inference of one network on several devices. The purposes of executing networks in heterogeneous mode are to: - -* Utilize the power of accelerators to process the heaviest parts of the network and to execute unsupported layers on fallback devices like the CPU -* Utilize all available hardware more efficiently during one inference - -The execution through heterogeneous plugin can be divided into two independent steps: - -1. Setting of hardware affinity to layers -2. Loading a network to the Heterogeneous plugin, splitting the network to parts, and executing them through the plugin - -These steps are decoupled. The setting of affinity can be done automatically using the fallback policy or in manual mode. - -The fallback automatic policy causes "greedy" behavior and assigns all layers that can be executed on certain device according to the priorities you specify (for example, HETERO:GPU,CPU). -Automatic policy does not take into account plugin peculiarities such as the inability to infer some layers without other special layers placed before or after that layer. The plugin is responsible for solving such cases. If the device plugin does not support the subgraph topology constructed by the HETERO plugin, then you should set affinity manually. - -Some of the topologies are not well-supported for heterogeneous execution on some devices or cannot be executed in this mode at all. Examples of such networks are those having activation layers which are not supported on the primary device. If transmitting data from one part of a network to another part in heterogeneous mode takes more time than in normal mode, it may not make sense to execute them in heterogeneous mode. In this case, you can define the most compute intense part manually and set the affinity to avoid sending data back and forth many times during one inference. - -### Use Default Layer Affinities - -To use the default affinities, call `load_network` with the "HETERO" device, with an optional list of devices to consider. - -```python -from openvino.inference_engine import IECore - -ie = IECore() -net = ie.read_network(model=path_to_model) -exec_net = ie.load_network(network=net, device_name='HETERO:GPU,CPU') -``` - - -### Annotation of Layers per Device and Default Fallback Policy - -Default fallback policy decides which layer goes to which device automatically according to the support in dedicated plugins (GPU, CPU, MYRIAD). - -Another way to annotate a network is to set affinity manually using code. - -### Set Affinity of All Layers to CPU -```python -import ngraph as ng -from openvino.inference_engine import IECore - -ie = IECore() -# Read a network in IR or ONNX format -net = ie.read_network(path_to_model) -# Create an Ngraph (graph) function from the network -ng_func = ng.function_from_cnn(net) -for node in ng_func.get_ordered_ops(): - rt_info = node.get_rt_info() - rt_info["affinity"] = "CPU" -``` - - -The fallback policy does not work if even one layer has an initialized affinity. The sequence should be calling the default affinity settings and then setting the layers manually. - -> **NOTE**: If you set affinity manually, be aware that currently Inference Engine plugins do not support constant (*Constant -> Result*) and empty (*Parameter -> Result*) networks. Please avoid these subgraphs when you set affinity manually. - -### Example - Manually Setting Layer Affinities - -```python -import ngraph as ng -from openvino.inference_engine import IECore - -ie = IECore() -# Read a network in IR or ONNX format -net = ie.read_network(path_to_model) -ng_func = ng.function_from_cnn(net) - -for node in ng_func.get_ordered_ops(): - rt_info = node.get_rt_info() - rt_info["affinity"] = "CPU" - -# Load the network on the target device -exec_net = ie.load_network(network=net, device_name='HETERO:FPGA,CPU') -``` - -> **NOTE**: `ie.query_network` does not depend on affinities set by a user, but queries for layer support based on device capabilities. - -### Details of Splitting Network and Execution - -During the loading of the network to the heterogeneous plugin, the network is divided into separate parts and loaded to dedicated plugins. Intermediate blobs between these sub graphs are allocated automatically in the most efficient way. - -### Execution Precision - -The precision for inference in the heterogeneous plugin is defined by: -* Precision of IR -* Ability of final plugins to execute in precision defined in IR - -For example, if you want to execute GPU with CPU fallback with FP16 on GPU, you need to use only FP16 IR. - -OpenVINO samples can be used with the following command: -```sh -./hello_classification /squeezenet1.1.xml /picture.jpg HETERO:GPU,CPU -``` - -where `HETERO` stands for the heterogeneous plugin. - -You can point to more than two devices, for example: `-d HETERO:MYRIAD,GPU,CPU` - -### Analyzing Heterogeneous Execution - -After enabling the KEY_HETERO_DUMP_GRAPH_DOT config key, you can dump GraphViz* .dot files with annotations of devices per layer. - -The heterogeneous plugin can generate two files: - -* `hetero_affinity_.dot` - annotation of affinities per layer. This file is written to the disk only if the default fallback policy was executed -* `hetero_subgraphs_.dot` - annotation of affinities per graph. This file is written to the disk during execution of `ICNNNetwork::LoadNetwork()` for the heterogeneous plugin - -#### To Generate the .dot Files - -```python -ie = IECore() -ie.set_config( config={'HETERO_DUMP_GRAPH_DOT' : 'YES'}, device_name='HETERO') -``` - -You can use the GraphViz* utility or a file converter to view the images. On the Ubuntu* operating system, you can use xdot: - -* `sudo apt-get install xdot` -* `xdot hetero_subgraphs.dot` - -You can use performance data (in sample applications, it is the option `-pc`) to get the performance data on each subgraph. - -Here is an example of the output for Googlenet v1 running on HDDL with fallback to CPU: - -``` -subgraph1: 1. input preprocessing (mean data/HDDL):EXECUTED layerType: realTime: 129 cpu: 129 execType: -subgraph1: 2. input transfer to DDR:EXECUTED layerType: realTime: 201 cpu: 0 execType: -subgraph1: 3. HDDL execute time:EXECUTED layerType: realTime: 3808 cpu: 0 execType: -subgraph1: 4. output transfer from DDR:EXECUTED layerType: realTime: 55 cpu: 0 execType: -subgraph1: 5. HDDL output postprocessing:EXECUTED layerType: realTime: 7 cpu: 7 execType: -subgraph1: 6. copy to IE blob:EXECUTED layerType: realTime: 2 cpu: 2 execType: -subgraph2: out_prob: NOT_RUN layerType: Output realTime: 0 cpu: 0 execType: unknown -subgraph2: prob: EXECUTED layerType: SoftMax realTime: 10 cpu: 10 execType: ref -Total time: 4212 microseconds -``` - - -### See Also -[Supported Devices](Supported_Devices.md) diff --git a/docs/OV_Runtime_UG/supported_plugins/MYRIAD.md b/docs/OV_Runtime_UG/supported_plugins/MYRIAD.md index 53addae9c51..d6ecdf5ce51 100644 --- a/docs/OV_Runtime_UG/supported_plugins/MYRIAD.md +++ b/docs/OV_Runtime_UG/supported_plugins/MYRIAD.md @@ -1,13 +1,17 @@ -# MYRIAD Plugin {#openvino_docs_IE_DG_supported_plugins_MYRIAD} +# MYRIAD device {#openvino_docs_OV_UG_supported_plugins_MYRIAD} ## Introducing MYRIAD Plugin -The Inference Engine MYRIAD plugin has been developed for inference of neural networks on Intel® Neural Compute Stick 2. +The OpenVINO Runtime MYRIAD plugin has been developed for inference of neural networks on Intel® Neural Compute Stick 2. ## Configuring the MYRIAD Plugin To configure your Intel® Vision Accelerator Design With Intel® Movidius™ on supported operating systemss, refer to the Steps for Intel® Vision Accelerator Design with Intel® Movidius™ VPUs section in the installation guides for [Linux](../../install_guides/installing-openvino-linux.md) or [Windows](../../install_guides/installing-openvino-windows.md). + > **NOTE**: The HDDL and MYRIAD plugins may cause conflicts when used at the same time. +> To ensure proper operation in such a case, the number of booted devices needs to be limited in the 'hddl_autoboot.config' file. +> Otherwise, the HDDL plugin will boot all available Intel® Movidius™ Myriad™ X devices. + ## Supported Configuration Parameters See VPU common configuration parameters for the [VPU Plugins](VPU.md). diff --git a/docs/OV_Runtime_UG/supported_plugins/Supported_Devices.md b/docs/OV_Runtime_UG/supported_plugins/Supported_Devices.md index ad13af79d3f..0a192d31e89 100644 --- a/docs/OV_Runtime_UG/supported_plugins/Supported_Devices.md +++ b/docs/OV_Runtime_UG/supported_plugins/Supported_Devices.md @@ -1,11 +1,11 @@ Supported Devices {#openvino_docs_IE_DG_supported_plugins_Supported_Devices} ================== -The Inference Engine can infer models in different formats with various input and output formats. This section provides supported and optimal configurations per device. In OpenVINO™ documentation, "device" refers to an Intel® processors used for inference, which can be a supported CPU, GPU, VPU (vision processing unit), or GNA (Gaussian neural accelerator coprocessor), or a combination of those devices. +The OpenVINO Runtime can infer models in different formats with various input and output formats. This section provides supported and optimal configurations per device. In OpenVINO™ documentation, "device" refers to an Intel® processors used for inference, which can be a supported CPU, GPU, VPU (vision processing unit), or GNA (Gaussian neural accelerator coprocessor), or a combination of those devices. > **NOTE**: With OpenVINO™ 2020.4 release, Intel® Movidius™ Neural Compute Stick is no longer supported. -The Inference Engine provides unique capabilities to infer deep learning models on the following device types with corresponding plugins: +The OpenVINO Runtime provides unique capabilities to infer deep learning models on the following device types with corresponding plugins: | Plugin | Device types | |------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------| @@ -13,9 +13,10 @@ The Inference Engine provides unique capabilities to infer deep learning models |[CPU plugin](CPU.md) |Intel® Xeon® with Intel® Advanced Vector Extensions 2 (Intel® AVX2), Intel® Advanced Vector Extensions 512 (Intel® AVX-512), and AVX512_BF16, Intel® Core™ Processors with Intel® AVX2, Intel® Atom® Processors with Intel® Streaming SIMD Extensions (Intel® SSE) | |[VPU plugins](VPU.md) (available in the Intel® Distribution of OpenVINO™ toolkit) |Intel® Neural Compute Stick 2 powered by the Intel® Movidius™ Myriad™ X, Intel® Vision Accelerator Design with Intel® Movidius™ VPUs | |[GNA plugin](GNA.md) (available in the Intel® Distribution of OpenVINO™ toolkit) |Intel® Speech Enabling Developer Kit, Amazon Alexa* Premium Far-Field Developer Kit, Intel® Pentium® Silver J5005 Processor, Intel® Pentium® Silver N5000 Processor, Intel® Celeron® J4005 Processor, Intel® Celeron® J4105 Processor, Intel® Celeron® Processor N4100, Intel® Celeron® Processor N4000, Intel® Core™ i3-8121U Processor, Intel® Core™ i7-1065G7 Processor, Intel® Core™ i7-1060G7 Processor, Intel® Core™ i5-1035G4 Processor, Intel® Core™ i5-1035G7 Processor, Intel® Core™ i5-1035G1 Processor, Intel® Core™ i5-1030G7 Processor, Intel® Core™ i5-1030G4 Processor, Intel® Core™ i3-1005G1 Processor, Intel® Core™ i3-1000G1 Processor, Intel® Core™ i3-1000G4 Processor| -|[Multi-Device plugin](MULTI.md) |Multi-Device plugin enables simultaneous inference of the same network on several Intel® devices in parallel | -|[Auto-Device plugin](AUTO.md) |Auto-Device plugin enables selecting Intel® device for inference automatically | -|[Heterogeneous plugin](HETERO.md) |Heterogeneous plugin enables automatic inference splitting between several Intel® devices (for example if a device doesn't [support certain layers](#supported-layers)). | +|[Arm® CPU plugin](ARM_CPU.md) (unavailable in the Intel® Distribution of OpenVINO™ toolkit) |Raspberry Pi™ 4 Model B, Apple® Mac mini with M1 chip, NVIDIA® Jetson Nano™, Android™ devices | +|[Multi-Device execution](../multi_device.md) |Multi-Device execution enables simultaneous inference of the same model on several devices in parallel | +|[Auto-Device plugin](../auto_device_selection.md) |Auto-Device plugin enables selecting Intel® device for inference automatically | +|[Heterogeneous plugin](../hetero_execution.md) |Heterogeneous execution enables automatic inference splitting between several devices (for example if a device doesn't [support certain operation](#supported-layers)). | Devices similar to the ones we have used for benchmarking can be accessed using [Intel® DevCloud for the Edge](https://devcloud.intel.com/edge/), a remote development environment with access to Intel® hardware and the latest versions of the Intel® Distribution of the OpenVINO™ Toolkit. [Learn more](https://devcloud.intel.com/edge/get_started/devcloud/) or [Register here](https://inteliot.force.com/DevcloudForEdge/s/). @@ -28,6 +29,7 @@ The table below shows the plugin libraries and additional dependencies for Linux | MYRIAD | `libopenvino_intel_myriad_plugin.so` | `libusb.so` | `openvino_intel_myriad_plugin.dll`| `usb.dll` | `libopenvino_intel_myriad_plugin.so` | `libusb.dylib` | | HDDL | `libintel_hddl_plugin.so` | `libbsl.so`, `libhddlapi.so`, `libmvnc-hddl.so` | `intel_hddl_plugin.dll` | `bsl.dll`, `hddlapi.dll`, `json-c.dll`, `libcrypto-1_1-x64.dll`, `libssl-1_1-x64.dll`, `mvnc-hddl.dll` | Is not supported | - | | GNA | `libopenvino_intel_gna_plugin.so` | `libgna.so`, | `openvino_intel_gna_plugin.dll` | `gna.dll` | Is not supported | - | +| Arm® CPU | `libopenvino_arm_cpu_plugin.so` | | Is not supported | - | `libopenvino_arm_cpu_plugin.so` | - | | HETERO | `libopenvino_hetero_plugin.so` | Same as for selected plugins | `openvino_hetero_plugin.dll` | Same as for selected plugins | `libopenvino_hetero_plugin.so` | Same as for selected plugins | | MULTI | `libopenvino_auto_plugin.so` | Same as for selected plugins | `openvino_auto_plugin.dll` | Same as for selected plugins | `libopenvino_auto_plugin.so` | Same as for selected plugins | | AUTO | `libopenvino_auto_plugin.so` | Same as for selected plugins | `openvino_auto_plugin.dll` | Same as for selected plugins | `libopenvino_auto_plugin.so` | Same as for selected plugins | @@ -35,7 +37,7 @@ The table below shows the plugin libraries and additional dependencies for Linux ## Supported Configurations -The Inference Engine can inference models in different formats with various input and output formats. +The OpenVINO Runtime can inference models in different formats with various input and output formats. This page shows supported and optimal configurations for each plugin. ### Terminology @@ -66,48 +68,53 @@ For example, the CHW value at index (c,h,w) is physically located at index (c\*H ### Supported Model Formats -|Plugin |FP32 |FP16 |I8 | -|:-------------|:----------------------:|:----------------------:|:----------------------:| -|CPU plugin |Supported and preferred |Supported |Supported | -|GPU plugin |Supported |Supported and preferred |Supported\* | -|VPU plugins |Not supported |Supported |Not supported | -|GNA plugin |Supported |Supported |Not supported | -
\* - currently, only limited set of topologies might benefit from enabling I8 model on GPU
-For [Multi-Device](MULTI.md) and [Heterogeneous](HETERO.md) execution +|Plugin |FP32 |FP16 |I8 | +|:------------------|:----------------------:|:----------------------:|:----------------------:| +|CPU plugin |Supported and preferred |Supported |Supported | +|GPU plugin |Supported |Supported and preferred |Supported | +|VPU plugins |Not supported |Supported |Not supported | +|GNA plugin |Supported |Supported |Not supported | +|Arm® CPU plugin |Supported and preferred |Supported |Supported (partially) | + +For [Multi-Device](../multi_device.md) and [Heterogeneous](../hetero_execution.md) executions the supported models formats depends on the actual underlying devices. _Generally, FP16 is preferable as it is most ubiquitous and performant_. ### Supported Input Precision -|Plugin |FP32 |FP16 |U8 |U16 |I8 |I16 | -|:-------------|:--------:|:-------------:|:-------------:|:-------------:|:------------:|:-------------:| -|CPU plugin |Supported |Not supported |Supported |Supported |Not supported |Supported | -|GPU plugin |Supported |Supported\* |Supported\* |Supported\* |Not supported |Supported\* | -|VPU plugins |Supported |Supported |Supported |Not supported |Not supported |Not supported | -|GNA plugin |Supported |Not supported |Supported |Not supported |Supported |Supported | +|Plugin |FP32 |FP16 |U8 |U16 |I8 |I16 | +|:------------------|:--------:|:-------------:|:-------------:|:-------------:|:------------:|:-------------:| +|CPU plugin |Supported |Not supported |Supported |Supported |Not supported |Supported | +|GPU plugin |Supported |Supported\* |Supported\* |Supported\* |Not supported |Supported\* | +|VPU plugins |Supported |Supported |Supported |Not supported |Not supported |Not supported | +|GNA plugin |Supported |Not supported |Supported |Not supported |Supported |Supported | +|Arm® CPU plugin |Supported |Supported |Supported |Supported |Not supported |Not supported |
\* - Supported via `SetBlob` only, `GetBlob` returns FP32
-For [Multi-Device](MULTI.md) and [Heterogeneous](HETERO.md) execution +For [Multi-Device](../multi_device.md) and [Heterogeneous](../hetero_execution.md) executions the supported input precision depends on the actual underlying devices. _Generally, U8 is preferable as it is most ubiquitous_. ### Supported Output Precision -|Plugin |FP32 |FP16 | -|:-------------|:--------:|:------------:| -|CPU plugin |Supported |Not supported | -|GPU plugin |Supported |Supported | -|VPU plugins |Supported |Supported | -|GNA plugin |Supported |Not supported | -For [Multi-Device](MULTI.md) and [Heterogeneous](HETERO.md) execution +|Plugin |FP32 |FP16 | +|:------------------|:--------:|:------------:| +|CPU plugin |Supported |Not supported | +|GPU plugin |Supported |Supported | +|VPU plugins |Supported |Supported | +|GNA plugin |Supported |Not supported | +|Arm® CPU plugin |Supported |Supported | + +For [Multi-Device](../multi_device.md) and [Heterogeneous](../hetero_execution.md) executions the supported output precision depends on the actual underlying devices. _Generally, FP32 is preferable as it is most ubiquitous_. ### Supported Input Layout -|Plugin |NCDHW |NCHW |NHWC |NC | -|:-------------|:------------:|:------------:|:------------:|:------------:| -|CPU plugin |Supported |Supported |Supported |Supported | -|GPU plugin |Supported |Supported |Supported |Supported | -|VPU plugins |Supported |Supported |Supported |Supported | -|GNA plugin |Not supported |Supported |Supported |Supported | +|Plugin |NCDHW |NCHW |NHWC |NC | +|:------------------|:------------:|:------------:|:------------:|:------------:| +|CPU plugin |Supported |Supported |Supported |Supported | +|GPU plugin |Supported |Supported |Supported |Supported | +|VPU plugins |Supported |Supported |Supported |Supported | +|GNA plugin |Not supported |Supported |Supported |Supported | +|Arm® CPU plugin |Not supported |Supported |Supported |Supported | ### Supported Output Layout @@ -116,161 +123,163 @@ the supported output precision depends on the actual underlying devices. _Gener |Layout |NCDHW|NCHW |CHW |NC |C | For setting relevant configuration, refer to the -[Integrate with Customer Application New Request API](../Integrate_with_customer_application_new_API.md) topic +[Integrate with Customer Application](../integrate_with_your_application.md) topic (step 3 "Configure input and output"). ### Supported Layers The following layers are supported by the plugins and by [Shape Inference feature](../ShapeInference.md): -| Layers | GPU | CPU | VPU | GNA | ShapeInfer | -|:-------------------------------|:-------------:|:-------------:|:-------------:|:-------------:|:-------------:| -| Abs | Supported | Supported\*\* | Supported | Not Supported | Supported | -| Acos | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| Acosh | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| Activation-Clamp | Supported |Supported\*\*\*| Supported | Supported | Supported | -| Activation-ELU | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Activation-Exp | Supported |Supported\*\*\*| Supported | Supported | Supported | -| Activation-Leaky ReLU | Supported |Supported\*\*\*| Supported | Supported | Supported | -| Activation-Not | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Activation-PReLU | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Activation-ReLU | Supported |Supported\*\*\*| Supported | Supported | Supported | -| Activation-ReLU6 | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Activation-Sigmoid/Logistic | Supported |Supported\*\*\*| Supported | Supported | Supported | -| Activation-TanH | Supported |Supported\*\*\*| Supported | Supported | Supported | -| ArgMax | Supported | Supported\*\* | Supported | Not Supported | Supported | -| Asin | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| Asinh | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| Atan | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| Atanh | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| BatchNormalization | Supported | Supported | Supported | Not Supported | Supported | -| BinaryConvolution | Supported | Supported | Not Supported | Not Supported | Supported | -| Broadcast | Supported | Supported\*\* | Supported | Not Supported | Supported | -| Ceil | Supported | Supported\*\* | Supported | Not Supported | Supported | -| Concat | Supported |Supported\*\*\*| Supported | Supported | Supported | -| Const | Supported | Supported | Supported | Supported | Not Supported | -| Convolution-Dilated | Supported | Supported | Supported | Not Supported | Supported | -| Convolution-Dilated 3D | Supported | Supported | Not Supported | Not Supported | Not Supported | -| Convolution-Grouped | Supported | Supported | Supported | Not Supported | Supported | -| Convolution-Grouped 3D | Supported | Supported | Not Supported | Not Supported | Not Supported | -| Convolution-Ordinary | Supported | Supported | Supported | Supported\* | Supported | -| Convolution-Ordinary 3D | Supported | Supported | Not Supported | Not Supported | Not Supported | -| Cos | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| Cosh | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| Crop | Supported | Supported | Supported | Supported | Supported | -| CTCGreedyDecoder | Supported\*\* | Supported\*\* | Supported\* | Not Supported | Supported | -| Deconvolution | Supported | Supported | Supported | Not Supported | Supported | -| Deconvolution 3D | Supported | Supported | Not Supported | Not Supported | Not Supported | -| DeformableConvolution | Supported | Supported | Not Supported | Not Supported | Supported | -| DepthToSpace | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| DetectionOutput | Supported | Supported\*\* | Supported\* | Not Supported | Supported | -| Eltwise-And | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Eltwise-Add | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Eltwise-Div | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Eltwise-Equal | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Eltwise-FloorMod | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Eltwise-Greater | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Eltwise-GreaterEqual | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Eltwise-Less | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Eltwise-LessEqual | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Eltwise-LogicalAnd | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Eltwise-LogicalOr | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Eltwise-LogicalXor | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Eltwise-Max | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Eltwise-Min | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Eltwise-Mul | Supported |Supported\*\*\*| Supported | Supported | Supported | -| Eltwise-NotEqual | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Eltwise-Pow | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Eltwise-Prod | Supported |Supported\*\*\*| Supported | Supported | Supported | -| Eltwise-SquaredDiff | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Eltwise-Sub | Supported |Supported\*\*\*| Supported | Supported | Supported | -| Eltwise-Sum | Supported |Supported\*\*\*| Supported | Supported | Supported | -| Erf | Supported | Supported\*\* | Supported | Not Supported | Supported | -| Exp | Supported | Supported | Supported | Supported | Supported | -| FakeQuantize | Not Supported | Supported | Not Supported | Not Supported | Supported | -| Fill | Not Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| Flatten | Supported | Supported | Supported | Not Supported | Supported | -| Floor | Supported | Supported\*\* | Supported | Not Supported | Supported | -| FullyConnected (Inner Product) | Supported |Supported\*\*\*| Supported | Supported | Supported | -| Gather | Supported | Supported\*\* | Supported | Not Supported | Supported | -| GatherTree | Not Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| Gemm | Supported | Supported | Supported | Not Supported | Supported | -| GRN | Supported\*\* | Supported\*\* | Supported | Not Supported | Supported | -| HardSigmoid | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| Interp | Supported\*\* | Supported\*\* | Supported | Not Supported | Supported\* | -| Log | Supported | Supported\*\* | Supported | Supported | Supported | -| LRN (Norm) | Supported | Supported | Supported | Not Supported | Supported | -| LSTMCell | Supported | Supported | Supported | Supported | Not Supported | -| GRUCell | Supported | Supported | Not Supported | Not Supported | Not Supported | -| RNNCell | Supported | Supported | Not Supported | Not Supported | Not Supported | -| LSTMSequence | Supported | Supported | Supported | Not Supported | Not Supported | -| GRUSequence | Supported | Supported | Not Supported | Not Supported | Not Supported | -| RNNSequence | Supported | Supported | Not Supported | Not Supported | Not Supported | -| LogSoftmax | Supported | Supported\*\* | Not Supported | Not Supported | Not Supported | -| Memory | Not Supported | Supported | Not Supported | Supported | Supported | -| MVN | Supported | Supported\*\* | Supported\* | Not Supported | Supported | -| Neg | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| NonMaxSuppression | Not Supported | Supported\*\* | Supported | Not Supported | Supported | -| Normalize | Supported | Supported\*\* | Supported\* | Not Supported | Supported | -| OneHot | Supported | Supported\*\* | Supported | Not Supported | Supported | -| Pad | Supported | Supported\*\* | Supported\* | Not Supported | Supported | -| Permute | Supported | Supported | Supported | Supported\* | Supported | -| Pooling(AVG,MAX) | Supported | Supported | Supported | Supported | Supported | -| Pooling(AVG,MAX) 3D | Supported | Supported | Not Supported | Not Supported | Not Supported | -| Power | Supported | Supported\*\* | Supported | Supported\* | Supported | -| PowerFile | Not Supported | Supported\*\* | Not Supported | Not Supported | Not Supported | -| PriorBox | Supported | Supported\*\* | Supported | Not Supported | Supported | -| PriorBoxClustered | Supported\*\* | Supported\*\* | Supported | Not Supported | Supported | -| Proposal | Supported | Supported\*\* | Supported | Not Supported | Supported | -| PSROIPooling | Supported | Supported\*\* | Supported | Not Supported | Supported | -| Range | Not Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| Reciprocal | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| ReduceAnd | Supported | Supported\*\* | Supported | Not Supported | Supported | -| ReduceL1 | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| ReduceL2 | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| ReduceLogSum | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| ReduceLogSumExp | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| ReduceMax | Supported | Supported\*\* | Supported | Not Supported | Supported | -| ReduceMean | Supported | Supported\*\* | Supported | Not Supported | Supported | -| ReduceMin | Supported | Supported\*\* | Supported | Not Supported | Supported | -| ReduceOr | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| ReduceProd | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| ReduceSum | Supported | Supported\*\* | Supported | Not Supported | Supported | -| ReduceSumSquare | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| RegionYolo | Supported | Supported\*\* | Supported | Not Supported | Supported | -| ReorgYolo | Supported | Supported\*\* | Supported | Not Supported | Supported | -| Resample | Supported | Supported\*\* | Supported | Not Supported | Supported | -| Reshape | Supported |Supported\*\*\*| Supported | Supported | Supported\* | -| ReverseSequence | Supported | Supported\*\* | Supported | Not Supported | Supported | -| RNN | Not Supported | Supported | Supported | Not Supported | Not Supported | -| ROIPooling | Supported\* | Supported | Supported | Not Supported | Supported | -| ScaleShift | Supported |Supported\*\*\*| Supported\* | Supported | Supported | -| ScatterUpdate | Not Supported | Supported\*\* | Supported | Not Supported | Supported | -| Select | Supported | Supported | Supported | Not Supported | Supported | -| Selu | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| ShuffleChannels | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| Sign | Supported | Supported\*\* | Supported | Not Supported | Supported | -| Sin | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| Sinh | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| SimplerNMS | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| Slice | Supported |Supported\*\*\*| Supported | Supported | Supported | -| SoftMax | Supported |Supported\*\*\*| Supported | Not Supported | Supported | -| Softplus | Supported | Supported\*\* | Supported | Not Supported | Supported | -| Softsign | Supported | Supported\*\* | Not Supported | Supported | Supported | -| SpaceToDepth | Not Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| SpatialTransformer | Not Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| Split | Supported |Supported\*\*\*| Supported | Supported | Supported | -| Squeeze | Supported | Supported\*\* | Supported | Supported | Supported | -| StridedSlice | Supported | Supported\*\* | Supported | Not Supported | Supported | -| Tan | Supported | Supported\*\* | Not Supported | Not Supported | Supported | -| TensorIterator | Not Supported | Supported | Supported | Supported | Not Supported | -| Tile | Supported\*\* |Supported\*\*\*| Supported | Not Supported | Supported | -| TopK | Supported | Supported\*\* | Supported | Not Supported | Supported | -| Unpooling | Supported | Not Supported | Not Supported | Not Supported | Not Supported | -| Unsqueeze | Supported | Supported\*\* | Supported | Supported | Supported | -| Upsampling | Supported | Not Supported | Not Supported | Not Supported | Not Supported | +| Layers | GPU | CPU | VPU | GNA | Arm® CPU | ShapeInfer | +|:-------------------------------|:-------------:|:-------------:|:-------------:|:-------------:|:---------------:|:-------------:| +| Abs | Supported | Supported\*\* | Supported | Not Supported | Supported | Supported | +| Acos | Supported | Supported\*\* | Not Supported | Not Supported |Supported\*\*\*\*|Supported | +| Acosh | Supported | Supported\*\* | Not Supported | Not Supported |Supported\*\*\*\*|Supported | +| Activation-Clamp | Supported |Supported\*\*\*| Supported | Supported | Supported | Supported | +| Activation-ELU | Supported |Supported\*\*\*| Supported | Not Supported | Supported | Supported | +| Activation-Exp | Supported |Supported\*\*\*| Supported | Supported | Supported | Supported | +| Activation-Leaky ReLU | Supported |Supported\*\*\*| Supported | Supported | Not Supported | Supported | +| Activation-Not | Supported |Supported\*\*\*| Supported | Not Supported | Not Supported | Supported | +| Activation-PReLU | Supported |Supported\*\*\*| Supported | Not Supported | Supported | Supported | +| Activation-ReLU | Supported |Supported\*\*\*| Supported | Supported | Supported | Supported | +| Activation-ReLU6 | Supported |Supported\*\*\*| Supported | Not Supported | Not Supported | Supported | +| Activation-Sigmoid/Logistic | Supported |Supported\*\*\*| Supported | Supported | Supported | Supported | +| Activation-TanH | Supported |Supported\*\*\*| Supported | Supported | Supported | Supported | +| ArgMax | Supported | Supported\*\* | Supported | Not Supported | Not Supported | Supported | +| Asin | Supported | Supported\*\* | Not Supported | Not Supported |Supported\*\*\*\*| Supported | +| Asinh | Supported | Supported\*\* | Not Supported | Not Supported |Supported\*\*\*\*| Supported | +| Atan | Supported | Supported\*\* | Not Supported | Not Supported |Supported\*\*\*\*| Supported | +| Atanh | Supported | Supported\*\* | Not Supported | Not Supported |Supported\*\*\*\*| Supported | +| BatchNormalization | Supported | Supported | Supported | Not Supported | Supported | Supported | +| BinaryConvolution | Supported | Supported | Not Supported | Not Supported | Not Supported | Supported | +| Broadcast | Supported | Supported\*\* | Supported | Not Supported | Supported | Supported | +| Ceil | Supported | Supported\*\* | Supported | Not Supported | Supported | Supported | +| Concat | Supported |Supported\*\*\*| Supported | Supported | Supported | Supported | +| Const | Supported | Supported | Supported | Supported | Supported | Not Supported | +| Convolution-Dilated | Supported | Supported | Supported | Not Supported | Supported | Supported | +| Convolution-Dilated 3D | Supported | Supported | Not Supported | Not Supported | Not Supported | Not Supported | +| Convolution-Grouped | Supported | Supported | Supported | Not Supported | Supported | Supported | +| Convolution-Grouped 3D | Supported | Supported | Not Supported | Not Supported | Not Supported | Not Supported | +| Convolution-Ordinary | Supported | Supported | Supported | Supported\* | Supported | Supported | +| Convolution-Ordinary 3D | Supported | Supported | Not Supported | Not Supported | Not Supported | Not Supported | +| Cos | Supported | Supported\*\* | Not Supported | Not Supported |Supported\*\*\*\*| Supported | +| Cosh | Supported | Supported\*\* | Not Supported | Not Supported |Supported\*\*\*\*| Supported | +| Crop | Supported | Supported | Supported | Supported | Not Supported | Supported | +| CTCGreedyDecoder | Supported\*\* | Supported\*\* | Supported\* | Not Supported |Supported\*\*\*\*| Supported | +| Deconvolution | Supported | Supported | Supported | Not Supported | Not Supported | Supported | +| Deconvolution 3D | Supported | Supported | Not Supported | Not Supported | Not Supported | Not Supported | +| DeformableConvolution | Supported | Supported | Not Supported | Not Supported | Not Supported | Supported | +| DepthToSpace | Supported | Supported\*\* | Not Supported | Not Supported | Supported\* | Supported | +| DetectionOutput | Supported | Supported\*\* | Supported\* | Not Supported |Supported\*\*\*\*| Supported | +| Eltwise-And | Supported |Supported\*\*\*| Supported | Not Supported | Supported | Supported | +| Eltwise-Add | Supported |Supported\*\*\*| Supported | Not Supported | Supported | Supported | +| Eltwise-Div | Supported |Supported\*\*\*| Supported | Not Supported | Supported | Supported | +| Eltwise-Equal | Supported |Supported\*\*\*| Supported | Not Supported | Supported\* | Supported | +| Eltwise-FloorMod | Supported |Supported\*\*\*| Supported | Not Supported |Supported\*\*\*\*| Supported | +| Eltwise-Greater | Supported |Supported\*\*\*| Supported | Not Supported | Supported | Supported | +| Eltwise-GreaterEqual | Supported |Supported\*\*\*| Supported | Not Supported | Supported | Supported | +| Eltwise-Less | Supported |Supported\*\*\*| Supported | Not Supported | Supported\* | Supported | +| Eltwise-LessEqual | Supported |Supported\*\*\*| Supported | Not Supported | Supported\* | Supported | +| Eltwise-LogicalAnd | Supported |Supported\*\*\*| Supported | Not Supported | Supported | Supported | +| Eltwise-LogicalOr | Supported |Supported\*\*\*| Supported | Not Supported | Supported | Supported | +| Eltwise-LogicalXor | Supported |Supported\*\*\*| Supported | Not Supported | Supported | Supported | +| Eltwise-Max | Supported |Supported\*\*\*| Supported | Not Supported | Supported | Supported | +| Eltwise-Min | Supported |Supported\*\*\*| Supported | Not Supported | Supported | Supported | +| Eltwise-Mul | Supported |Supported\*\*\*| Supported | Supported | Supported | Supported | +| Eltwise-NotEqual | Supported |Supported\*\*\*| Supported | Not Supported | Supported\* | Supported | +| Eltwise-Pow | Supported |Supported\*\*\*| Supported | Not Supported | Supported | Supported | +| Eltwise-Prod | Supported |Supported\*\*\*| Supported | Supported | Not Supported | Supported | +| Eltwise-SquaredDiff | Supported |Supported\*\*\*| Supported | Not Supported | Supported | Supported | +| Eltwise-Sub | Supported |Supported\*\*\*| Supported | Supported | Supported | Supported | +| Eltwise-Sum | Supported |Supported\*\*\*| Supported | Supported |Supported\*\*\*\*| Supported | +| Erf | Supported | Supported\*\* | Supported | Not Supported |Supported\*\*\*\*| Supported | +| Exp | Supported | Supported | Supported | Supported | Supported | Supported | +| FakeQuantize | Not Supported | Supported | Not Supported | Not Supported | Supported\* | Supported | +| Fill | Not Supported | Supported\*\* | Not Supported | Not Supported | Not Supported | Supported | +| Flatten | Supported | Supported | Supported | Not Supported | Not Supported | Supported | +| Floor | Supported | Supported\*\* | Supported | Not Supported | Supported | Supported | +| FullyConnected (Inner Product) | Supported |Supported\*\*\*| Supported | Supported | Supported | Supported | +| Gather | Supported | Supported\*\* | Supported | Not Supported | Supported\* | Supported | +| GatherTree | Not Supported | Supported\*\* | Not Supported | Not Supported |Supported\*\*\*\*| Supported | +| Gemm | Supported | Supported | Supported | Not Supported | Not Supported | Supported | +| GRN | Supported\*\* | Supported\*\* | Supported | Not Supported | Supported | Supported | +| HardSigmoid | Supported | Supported\*\* | Not Supported | Not Supported |Supported\*\*\*\*| Supported | +| Interp | Supported\*\* | Supported\*\* | Supported | Not Supported | Supported\* | Supported\* | +| Log | Supported | Supported\*\* | Supported | Supported | Supported | Supported | +| LRN (Norm) | Supported | Supported | Supported | Not Supported | Supported\* | Supported | +| LSTMCell | Supported | Supported | Supported | Supported | Supported | Not Supported | +| GRUCell | Supported | Supported | Not Supported | Not Supported | Supported | Not Supported | +| RNNCell | Supported | Supported | Not Supported | Not Supported | Supported | Not Supported | +| LSTMSequence | Supported | Supported | Supported | Not Supported |Supported\*\*\*\*| Not Supported | +| GRUSequence | Supported | Supported | Not Supported | Not Supported |Supported\*\*\*\*| Not Supported | +| RNNSequence | Supported | Supported | Not Supported | Not Supported |Supported\*\*\*\*| Not Supported | +| LogSoftmax | Supported | Supported\*\* | Not Supported | Not Supported | Supported | Not Supported | +| Memory | Not Supported | Supported | Not Supported | Supported | Not Supported | Supported | +| MVN | Supported | Supported\*\* | Supported\* | Not Supported | Supported\* | Supported | +| Neg | Supported | Supported\*\* | Not Supported | Not Supported | Supported | Supported | +| NonMaxSuppression | Not Supported | Supported\*\* | Supported | Not Supported |Supported\*\*\*\*| Supported | +| Normalize | Supported | Supported\*\* | Supported\* | Not Supported | Supported\* | Supported | +| OneHot | Supported | Supported\*\* | Supported | Not Supported |Supported\*\*\*\*| Supported | +| Pad | Supported | Supported\*\* | Supported\* | Not Supported | Supported\* | Supported | +| Permute | Supported | Supported | Supported | Supported\* | Not Supported | Supported | +| Pooling(AVG,MAX) | Supported | Supported | Supported | Supported | Supported | Supported | +| Pooling(AVG,MAX) 3D | Supported | Supported | Not Supported | Not Supported | Supported\* | Not Supported | +| Power | Supported | Supported\*\* | Supported | Supported\* | Supported | Supported | +| PowerFile | Not Supported | Supported\*\* | Not Supported | Not Supported | Not Supported | Not Supported | +| PriorBox | Supported | Supported\*\* | Supported | Not Supported | Supported | Supported | +| PriorBoxClustered | Supported\*\* | Supported\*\* | Supported | Not Supported | Supported | Supported | +| Proposal | Supported | Supported\*\* | Supported | Not Supported |Supported\*\*\*\*| Supported | +| PSROIPooling | Supported | Supported\*\* | Supported | Not Supported |Supported\*\*\*\*| Supported | +| Range | Not Supported | Supported\*\* | Not Supported | Not Supported | Not Supported | Supported | +| Reciprocal | Supported | Supported\*\* | Not Supported | Not Supported | Not Supported | Supported | +| ReduceAnd | Supported | Supported\*\* | Supported | Not Supported |Supported\*\*\*\*| Supported | +| ReduceL1 | Supported | Supported\*\* | Not Supported | Not Supported | Supported | Supported | +| ReduceL2 | Supported | Supported\*\* | Not Supported | Not Supported | Supported | Supported | +| ReduceLogSum | Supported | Supported\*\* | Not Supported | Not Supported | Supported | Supported | +| ReduceLogSumExp | Supported | Supported\*\* | Not Supported | Not Supported | Not Supported | Supported | +| ReduceMax | Supported | Supported\*\* | Supported | Not Supported | Supported | Supported | +| ReduceMean | Supported | Supported\*\* | Supported | Not Supported | Supported | Supported | +| ReduceMin | Supported | Supported\*\* | Supported | Not Supported | Supported | Supported | +| ReduceOr | Supported | Supported\*\* | Not Supported | Not Supported |Supported\*\*\*\*| Supported | +| ReduceProd | Supported | Supported\*\* | Not Supported | Not Supported | Supported | Supported | +| ReduceSum | Supported | Supported\*\* | Supported | Not Supported | Supported | Supported | +| ReduceSumSquare | Supported | Supported\*\* | Not Supported | Not Supported | Not Supported | Supported | +| RegionYolo | Supported | Supported\*\* | Supported | Not Supported |Supported\*\*\*\*| Supported | +| ReorgYolo | Supported | Supported\*\* | Supported | Not Supported | Supported | Supported | +| Resample | Supported | Supported\*\* | Supported | Not Supported | Not Supported | Supported | +| Reshape | Supported |Supported\*\*\*| Supported | Supported | Supported | Supported\* | +| ReverseSequence | Supported | Supported\*\* | Supported | Not Supported |Supported\*\*\*\*| Supported | +| RNN | Not Supported | Supported | Supported | Not Supported | Supported | Not Supported | +| ROIPooling | Supported\* | Supported | Supported | Not Supported |Supported\*\*\*\*| Supported | +| ScaleShift | Supported |Supported\*\*\*| Supported\* | Supported | Not Supported | Supported | +| ScatterUpdate | Not Supported | Supported\*\* | Supported | Not Supported | Not Supported | Supported | +| Select | Supported | Supported | Supported | Not Supported | Supported | Supported | +| Selu | Supported | Supported\*\* | Not Supported | Not Supported |Supported\*\*\*\*| Supported | +| ShuffleChannels | Supported | Supported\*\* | Not Supported | Not Supported | Supported | Supported | +| Sign | Supported | Supported\*\* | Supported | Not Supported | Supported | Supported | +| Sin | Supported | Supported\*\* | Not Supported | Not Supported | Supported | Supported | +| Sinh | Supported | Supported\*\* | Not Supported | Not Supported |Supported\*\*\*\*| Supported | +| SimplerNMS | Supported | Supported\*\* | Not Supported | Not Supported | Not Supported | Supported | +| Slice | Supported |Supported\*\*\*| Supported | Supported | Not Supported | Supported | +| SoftMax | Supported |Supported\*\*\*| Supported | Not Supported | Supported | Supported | +| Softplus | Supported | Supported\*\* | Supported | Not Supported | Supported | Supported | +| Softsign | Supported | Supported\*\* | Not Supported | Supported | Not Supported | Supported | +| SpaceToDepth | Not Supported | Supported\*\* | Not Supported | Not Supported | Supported\* | Supported | +| SpatialTransformer | Not Supported | Supported\*\* | Not Supported | Not Supported | Not Supported | Supported | +| Split | Supported |Supported\*\*\*| Supported | Supported | Supported | Supported | +| Squeeze | Supported | Supported\*\* | Supported | Supported | Supported | Supported | +| StridedSlice | Supported | Supported\*\* | Supported | Not Supported | Supported\* | Supported | +| Tan | Supported | Supported\*\* | Not Supported | Not Supported |Supported\*\*\*\*| Supported | +| TensorIterator | Not Supported | Supported | Supported | Supported | Supported | Not Supported | +| Tile | Supported\*\* |Supported\*\*\*| Supported | Not Supported | Supported | Supported | +| TopK | Supported | Supported\*\* | Supported | Not Supported |Supported\*\*\*\*| Supported | +| Unpooling | Supported | Not Supported | Not Supported | Not Supported | Not Supported | Not Supported | +| Unsqueeze | Supported | Supported\*\* | Supported | Supported | Supported | Supported | +| Upsampling | Supported | Not Supported | Not Supported | Not Supported | Not Supported | Not Supported | \*- support is limited to the specific parameters. Refer to "Known Layers Limitation" section for the device [from the list of supported](Supported_Devices.md). -\*\*- support is implemented via [Extensibility mechanism](../Extensibility_DG/Intro.md). +\*\*- support is implemented via [Extensibility mechanism](../../Extensibility_UG/Intro.md). \*\*\*- supports NCDHW layout. + +\*\*\*\*- support is implemented via runtime reference. diff --git a/docs/OV_Runtime_UG/supported_plugins/VPU.md b/docs/OV_Runtime_UG/supported_plugins/VPU.md index 62a390fdbb5..e1b2efcdf91 100644 --- a/docs/OV_Runtime_UG/supported_plugins/VPU.md +++ b/docs/OV_Runtime_UG/supported_plugins/VPU.md @@ -1,4 +1,4 @@ -# VPU Plugins {#openvino_docs_IE_DG_supported_plugins_VPU} +# VPU devices {#openvino_docs_IE_DG_supported_plugins_VPU} @sphinxdirective @@ -6,12 +6,12 @@ :maxdepth: 1 :hidden: - openvino_docs_IE_DG_supported_plugins_MYRIAD - openvino_docs_IE_DG_supported_plugins_HDDL + openvino_docs_OV_UG_supported_plugins_MYRIAD + openvino_docs_OV_UG_supported_plugins_HDDL @endsphinxdirective -This chapter provides information on the Inference Engine plugins that enable inference of deep learning models on the supported VPU devices: +This chapter provides information on the OpenVINO Runtime plugins that enable inference of deep learning models on the supported VPU devices: * Intel® Neural Compute Stick 2 powered by the Intel® Movidius™ Myriad™ X — Supported by the [MYRIAD Plugin](MYRIAD.md) * Intel® Vision Accelerator Design with Intel® Movidius™ VPUs — Supported by the [HDDL Plugin](HDDL.md) @@ -138,7 +138,7 @@ In a perfect pipeline this time should be near zero, which means that the data w **Get the following message when running inference with the VPU plugin: "[VPU] Cannot convert layer due to unsupported layer type "** -This means that your topology has a layer that is unsupported by your target VPU plugin. To resolve this issue, you can implement the custom layer for the target device using the [Inference Engine Extensibility mechanism](../Extensibility_DG/Intro.md). Or, to quickly get a working prototype, you can use the heterogeneous scenario with the default fallback policy (see the [HETERO Plugin](HETERO.md) section). Use the HETERO plugin with a fallback device that supports this layer, for example, CPU: `HETERO:MYRIAD,CPU`. +This means that your topology has a layer that is unsupported by your target VPU plugin. To resolve this issue, you can implement the custom layer for the target device using the [OpenVINO™ Extensibility mechanism](../../Extensibility_UG/Intro.md). Or, to quickly get a working prototype, you can use the heterogeneous scenario with the default fallback policy (see the [Heterogeneous execution](../hetero_execution.md) section). Use the HETERO mode with a fallback device that supports this layer, for example, CPU: `HETERO:MYRIAD,CPU`. For a list of VPU-supported layers, see the Supported Layers section of the [Supported Devices](Supported_Devices.md) page. ## Known Layers Limitations diff --git a/docs/OV_Runtime_UG/supported_plugins/config_properties.md b/docs/OV_Runtime_UG/supported_plugins/config_properties.md new file mode 100644 index 00000000000..5b0fef66c20 --- /dev/null +++ b/docs/OV_Runtime_UG/supported_plugins/config_properties.md @@ -0,0 +1,226 @@ +# Query device properties, configuration {#openvino_docs_OV_UG_query_api} + +## Query device properties and devices configuration + +The OpenVINO™ toolkit supports inferencing with several types of devices (processors or accelerators). +This section provides a high-level description of the process of querying of different device properties and configuration values at runtime. + +OpenVINO runtime has two types of properties: +- Read only properties which provides information about the devices (such device name, termal, execution capabilities, etc) and information about ov::CompiledModel to understand what configuration values were used to compile the model with. +- Mutable properties which are primarily used to configure ov::Core::compile_model process and affect final inference on the specific set of devices. Such properties can be set globally per device via ov::Core::set_property or locally for particular model in ov::Core::compile_model and ov::Core::query_model calls. + +OpenVINO property is represented as a named constexpr variable with a given string name and type (see ). Example: +``` +static constexpr Property, PropertyMutability::RO> available_devices{"AVAILABLE_DEVICES"}; +``` +represents a read-only property with C++ name `ov::available_devices`, string name `AVAILABLE_DEVICES` and type `std::vector`. + +Refer to the [Hello Query Device С++ Sample](../../../samples/cpp/hello_query_device/README.md) sources and the [Multi-Device execution](../multi_device.md) documentation for examples of using setting and getting properties in user applications. + +### Get a set of available devices + +Based on read-only property `ov::available_devices`, OpenVINO Core collects information about currently available devices enabled by OpenVINO plugins and returns information using the `ov::Core::get_available_devices` method: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_properties_api.cpp + :language: cpp + :fragment: [get_available_devices] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_properties_api.py + :language: python + :fragment: [get_available_devices] + +@endsphinxdirective + +The function returns a list of available devices, for example: + +``` +MYRIAD.1.2-ma2480 +MYRIAD.1.4-ma2480 +CPU +GPU.0 +GPU.1 +``` + +If there are more than one instance of a specific device, the devices are enumerated with `.suffix` where `suffix` is a unique string identifier. Each device name can then be passed to: + +* `ov::Core::compile_model` to load the model to a specific device with specific configuration properties. +* `ov::Core::get_property` to get common or device specific properties. +* All other methods of the `ov::Core` class that accept `deviceName`. + +### Working with properties in Your Code + +The `ov::Core` class provides the following method to query device information, set or get different device configuration properties: + +* `ov::Core::get_property` - Gets the current value of a specific property. +* `ov::Core::set_property` - Sets a new value for the property globally for specified `device_name`. + +The `ov::CompiledModel` class is also extended to support the properties: + +* `ov::CompiledModel::get_property` +* `ov::CompiledModel::set_property` + +For documentation about OpenVINO common device-independent properties, refer to `openvino/runtime/properties.hpp`. Device specific configuration keys can be found in corresponding device folders (for example, `openvino/runtime/intel_gpu/properties.hpp`). + +### Working with properties via Core + +#### Getting device properties + +The code below demonstrates how to query `HETERO` device priority of devices which will be used to infer the model: + +@snippet snippets/ov_properties_api.cpp hetero_priorities + +> **NOTE**: All properties have a type, which is specified during property declaration. Based on this, actual type under `auto` is automatically deduced by C++ compiler. + +To extract device properties such as available devices (`ov::available_devices`), device name (`ov::device::full_name`), supported properties (`ov::supported_properties`), and others, use the `ov::Core::get_property` method: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_properties_api.cpp + :language: cpp + :fragment: [cpu_device_name] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_properties_api.py + :language: python + :fragment: [cpu_device_name] + +@endsphinxdirective + +A returned value appears as follows: `Intel(R) Core(TM) i7-8700 CPU @ 3.20GHz`. + +> **NOTE**: In order to understand a list of supported properties on `ov::Core` or `ov::CompiledModel` levels, use `ov::supported_properties` which contains a vector of supported property names. Properties which can be changed, has `ov::PropertyName::is_mutable` returning the `true` value. Most of the properites which are changable on ov::Core level, cannot be changed once the model is compiled, so it becomes immutable read-only property. + +#### Configure a work with a model + +`ov::Core` methods like: + +* `ov::Core::compile_model` +* `ov::Core::import_model` +* `ov::Core::query_model` + +accept variadic list of properties as last arguments. Each property in such parameters lists should be used as function call to pass property value with specified property type. + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_properties_api.cpp + :language: cpp + :fragment: [compile_model_with_property] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_properties_api.py + :language: python + :fragment: [compile_model_with_property] + +@endsphinxdirective + +The example below specifies hints that a model should be compiled to be inferenced with multiple inference requests in parallel to achive best throughput while inference should be performed without accuracy loss with FP32 precision. + +#### Setting properties globally + +`ov::Core::set_property` with a given device name should be used to set global configuration properties which are the same accross multiple `ov::Core::compile_model`, `ov::Core::query_model`, etc. calls, while setting property on the specific `ov::Core::compile_model` call applies properties only for current call: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_properties_api.cpp + :language: cpp + :fragment: [core_set_property_then_compile] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_properties_api.py + :language: python + :fragment: [core_set_property_then_compile] + +@endsphinxdirective + +### Properties on CompiledModel level + +#### Getting property + +The `ov::CompiledModel::get_property` method is used to get property values the compiled model has been created with or a compiled model level property such as `ov::optimal_number_of_infer_requests`: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_properties_api.cpp + :language: cpp + :fragment: [optimal_number_of_infer_requests] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_properties_api.py + :language: python + :fragment: [optimal_number_of_infer_requests] + +@endsphinxdirective + +Or the current temperature of the `MYRIAD` device: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_properties_api.cpp + :language: cpp + :fragment: [device_thermal] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_properties_api.py + :language: python + :fragment: [device_thermal] + +@endsphinxdirective + +Or the number of threads that would be used for inference on `CPU` device: + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_properties_api.cpp + :language: cpp + :fragment: [inference_num_threads] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_properties_api.py + :language: python + :fragment: [inference_num_threads] + +@endsphinxdirective + +#### Setting properties for compiled model + +The only mode that supports this method is [Multi-Device execution](../multi_device.md): + +@sphinxdirective + +.. tab:: C++ + + .. doxygensnippet:: docs/snippets/ov_properties_api.cpp + :language: cpp + :fragment: [multi_device] + +.. tab:: Python + + .. doxygensnippet:: docs/snippets/ov_properties_api.py + :language: python + :fragment: [multi_device] + +@endsphinxdirective diff --git a/docs/OV_Runtime_UG/supported_plugins/files/GNA_Maximum_Input_Tensor_Widths_i16.csv b/docs/OV_Runtime_UG/supported_plugins/files/GNA_Maximum_Input_Tensor_Widths_i16.csv new file mode 100644 index 00000000000..e4f9df38ee0 --- /dev/null +++ b/docs/OV_Runtime_UG/supported_plugins/files/GNA_Maximum_Input_Tensor_Widths_i16.csv @@ -0,0 +1,27976 @@ +KH, KW, SH, SW, PH, PW, SH, SW, H, W (Ci=8/Co=256), W (Ci=16/Co=256), W (Ci=32/Co=256), W (Ci=64/Co=256), W (Ci=128/Co=256), W (Ci=256/Co=256), W (Ci=384/Co=256) +1, 1, 1, 1, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240, 240, 170 +1, 1, 1, 1, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240, 128, 85 +1, 1, 1, 1, 1, 1, 1, 1, 384, 240, 240, 240, 240, 170, 85, 56 +1, 2, 1, 1, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 2, 1, 1, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 2, 1, 1, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 2, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 2, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 2, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 3, 1, 1, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 3, 1, 1, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 3, 1, 1, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 3, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 3, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 3, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 3, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 3, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 3, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 4, 1, 1, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 4, 1, 1, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 4, 1, 1, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 4, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 4, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 4, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 4, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 4, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 4, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 4, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 4, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 4, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 5, 1, 1, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 5, 1, 1, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 5, 1, 1, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 5, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 5, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 5, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 5, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 5, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 5, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 5, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 5, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 5, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 5, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 5, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 5, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 6, 1, 1, 1, 1, 1, 1, 128, 240, 240, 240,,,, +1, 6, 1, 1, 1, 1, 1, 1, 256, 240, 240, 240,,,, +1, 6, 1, 1, 1, 1, 1, 1, 384, 240, 240, 240,,,, +1, 6, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240,,,, +1, 6, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240,,,, +1, 6, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240,,,, +1, 6, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240,,,, +1, 6, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240,,,, +1, 6, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240,,,, +1, 6, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +1, 6, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +1, 6, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +1, 6, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +1, 6, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +1, 6, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +1, 6, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +1, 6, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +1, 6, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +1, 7, 1, 1, 1, 1, 1, 1, 128, 240, 240, 240,,,, +1, 7, 1, 1, 1, 1, 1, 1, 256, 240, 240, 240,,,, +1, 7, 1, 1, 1, 1, 1, 1, 384, 240, 240, 240,,,, +1, 7, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240,,,, +1, 7, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240,,,, +1, 7, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240,,,, +1, 7, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240,,,, +1, 7, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240,,,, +1, 7, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240,,,, +1, 7, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +1, 7, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +1, 7, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +1, 7, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +1, 7, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +1, 7, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +1, 7, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +1, 7, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +1, 7, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +1, 7, 1, 7, 1, 1, 1, 1, 128, 240, 240, 240,,,, +1, 7, 1, 7, 1, 1, 1, 1, 256, 240, 240, 240,,,, +1, 7, 1, 7, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 1, 1, 1, 1, 1, 1, 1, 128, 192, 192, 192, 192, 192, 192, 128 +2, 1, 1, 1, 1, 1, 1, 1, 256, 192, 192, 192, 192, 192, 128, 85 +2, 1, 1, 1, 1, 1, 1, 1, 384, 192, 192, 192, 192, 170, 85, 56 +2, 1, 2, 1, 1, 1, 1, 1, 128, 192, 192, 192, 192, 192, 192, 128 +2, 1, 2, 1, 1, 1, 1, 1, 256, 192, 192, 192, 192, 192, 128, 85 +2, 1, 2, 1, 1, 1, 1, 1, 384, 192, 192, 192, 192, 170, 85, 56 +2, 2, 1, 1, 1, 1, 1, 1, 128, 193, 193, 193, 193,,, +2, 2, 1, 1, 1, 1, 1, 1, 256, 193, 193, 193, 193,,, +2, 2, 1, 1, 1, 1, 1, 1, 384, 193, 193, 193, 193,,, +2, 2, 1, 1, 2, 2, 1, 1, 128, 193, 193, 192, 179,,, +2, 2, 1, 1, 2, 2, 1, 1, 256, 193, 193, 192, 179,,, +2, 2, 1, 1, 2, 2, 1, 1, 384, 193, 193, 192, 179,,, +2, 2, 1, 1, 2, 2, 1, 2, 128, 193, 193, 192, 179,,, +2, 2, 1, 1, 2, 2, 1, 2, 256, 193, 193, 192, 179,,, +2, 2, 1, 1, 2, 2, 1, 2, 384, 193, 193, 192, 179,,, +2, 2, 1, 1, 2, 2, 2, 1, 128, 193, 193, 192, 179,,, +2, 2, 1, 1, 2, 2, 2, 1, 256, 193, 193, 192, 179,,, +2, 2, 1, 1, 2, 2, 2, 1, 384, 193, 193, 192, 179,,, +2, 2, 1, 1, 2, 2, 2, 2, 128, 193, 193, 192, 179,,, +2, 2, 1, 1, 2, 2, 2, 2, 256, 193, 193, 192, 179,,, +2, 2, 1, 1, 2, 2, 2, 2, 384, 193, 193, 192, 179,,, +2, 2, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 2, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 2, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 2, 1, 2, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 2, 1, 2, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 2, 1, 2, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 2, 1, 2, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 2, 1, 2, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 2, 1, 2, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 2, 1, 2, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 2, 1, 2, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 2, 1, 2, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 2, 1, 2, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 2, 1, 2, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 2, 1, 2, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 2, 2, 1, 1, 1, 1, 1, 128, 193, 193, 193, 193,,, +2, 2, 2, 1, 1, 1, 1, 1, 256, 193, 193, 193, 193,,, +2, 2, 2, 1, 1, 1, 1, 1, 384, 193, 193, 193, 193,,, +2, 2, 2, 1, 2, 2, 1, 1, 128, 193, 193, 192, 179,,, +2, 2, 2, 1, 2, 2, 1, 1, 256, 193, 193, 192, 179,,, +2, 2, 2, 1, 2, 2, 1, 1, 384, 193, 193, 192, 179,,, +2, 2, 2, 1, 2, 2, 1, 2, 128, 193, 193, 192, 179,,, +2, 2, 2, 1, 2, 2, 1, 2, 256, 193, 193, 192, 179,,, +2, 2, 2, 1, 2, 2, 1, 2, 384, 193, 193, 192, 179,,, +2, 2, 2, 1, 2, 2, 2, 1, 128, 193, 193, 192, 179,,, +2, 2, 2, 1, 2, 2, 2, 1, 256, 193, 193, 192, 179,,, +2, 2, 2, 1, 2, 2, 2, 1, 384, 193, 193, 192, 179,,, +2, 2, 2, 1, 2, 2, 2, 2, 128, 193, 193, 192, 179,,, +2, 2, 2, 1, 2, 2, 2, 2, 256, 193, 193, 192, 179,,, +2, 2, 2, 1, 2, 2, 2, 2, 384, 193, 193, 192, 179,,, +2, 2, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 2, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 2, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 2, 2, 2, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 2, 2, 2, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 2, 2, 2, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 2, 2, 2, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 2, 2, 2, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 2, 2, 2, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 2, 2, 2, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 2, 2, 2, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 2, 2, 2, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 2, 2, 2, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 2, 2, 2, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 2, 2, 2, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 3, 1, 1, 1, 1, 1, 1, 128, 194, 194, 194, 194,,, +2, 3, 1, 1, 1, 1, 1, 1, 256, 194, 194, 194, 194,,, +2, 3, 1, 1, 1, 1, 1, 1, 384, 194, 194, 194, 194,,, +2, 3, 1, 1, 2, 2, 1, 1, 128, 194, 194, 186, 167,,, +2, 3, 1, 1, 2, 2, 1, 1, 256, 194, 194, 186, 167,,, +2, 3, 1, 1, 2, 2, 1, 1, 384, 194, 194, 186, 167,,, +2, 3, 1, 1, 2, 2, 1, 2, 128, 194, 194, 186, 167,,, +2, 3, 1, 1, 2, 2, 1, 2, 256, 194, 194, 186, 167,,, +2, 3, 1, 1, 2, 2, 1, 2, 384, 194, 194, 186, 167,,, +2, 3, 1, 1, 2, 2, 2, 1, 128, 194, 194, 186, 167,,, +2, 3, 1, 1, 2, 2, 2, 1, 256, 194, 194, 186, 167,,, +2, 3, 1, 1, 2, 2, 2, 1, 384, 194, 194, 186, 167,,, +2, 3, 1, 1, 2, 2, 2, 2, 128, 194, 194, 186, 167,,, +2, 3, 1, 1, 2, 2, 2, 2, 256, 194, 194, 186, 167,,, +2, 3, 1, 1, 2, 2, 2, 2, 384, 194, 194, 186, 167,,, +2, 3, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 3, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 3, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 3, 1, 2, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 3, 1, 2, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 3, 1, 2, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 3, 1, 2, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 3, 1, 2, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 3, 1, 2, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 3, 1, 2, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 3, 1, 2, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 3, 1, 2, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 3, 1, 2, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 3, 1, 2, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 3, 1, 2, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 3, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 3, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 3, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 3, 1, 3, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 3, 1, 3, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 3, 1, 3, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 3, 1, 3, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 3, 1, 3, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 3, 1, 3, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 3, 1, 3, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 3, 1, 3, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 3, 1, 3, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 3, 1, 3, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 3, 1, 3, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 3, 1, 3, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 3, 2, 1, 1, 1, 1, 1, 128, 194, 194, 194, 194,,, +2, 3, 2, 1, 1, 1, 1, 1, 256, 194, 194, 194, 194,,, +2, 3, 2, 1, 1, 1, 1, 1, 384, 194, 194, 194, 194,,, +2, 3, 2, 1, 2, 2, 1, 1, 128, 194, 194, 186, 167,,, +2, 3, 2, 1, 2, 2, 1, 1, 256, 194, 194, 186, 167,,, +2, 3, 2, 1, 2, 2, 1, 1, 384, 194, 194, 186, 167,,, +2, 3, 2, 1, 2, 2, 1, 2, 128, 194, 194, 186, 167,,, +2, 3, 2, 1, 2, 2, 1, 2, 256, 194, 194, 186, 167,,, +2, 3, 2, 1, 2, 2, 1, 2, 384, 194, 194, 186, 167,,, +2, 3, 2, 1, 2, 2, 2, 1, 128, 194, 194, 186, 167,,, +2, 3, 2, 1, 2, 2, 2, 1, 256, 194, 194, 186, 167,,, +2, 3, 2, 1, 2, 2, 2, 1, 384, 194, 194, 186, 167,,, +2, 3, 2, 1, 2, 2, 2, 2, 128, 194, 194, 186, 167,,, +2, 3, 2, 1, 2, 2, 2, 2, 256, 194, 194, 186, 167,,, +2, 3, 2, 1, 2, 2, 2, 2, 384, 194, 194, 186, 167,,, +2, 3, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 3, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 3, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 3, 2, 2, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 3, 2, 2, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 3, 2, 2, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 3, 2, 2, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 3, 2, 2, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 3, 2, 2, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 3, 2, 2, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 3, 2, 2, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 3, 2, 2, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 3, 2, 2, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 3, 2, 2, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 3, 2, 2, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 3, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 3, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 3, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 3, 2, 3, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 3, 2, 3, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 3, 2, 3, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 3, 2, 3, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 3, 2, 3, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 3, 2, 3, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 3, 2, 3, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 3, 2, 3, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 3, 2, 3, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 3, 2, 3, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 3, 2, 3, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 3, 2, 3, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 4, 1, 1, 1, 1, 1, 1, 128, 195, 195, 195, 195,,, +2, 4, 1, 1, 1, 1, 1, 1, 256, 195, 195, 195, 195,,, +2, 4, 1, 1, 1, 1, 1, 1, 384, 195, 195, 195, 195,,, +2, 4, 1, 1, 2, 2, 1, 1, 128, 195, 194, 181, 155,,, +2, 4, 1, 1, 2, 2, 1, 1, 256, 195, 194, 181, 155,,, +2, 4, 1, 1, 2, 2, 1, 1, 384, 195, 194, 181, 155,,, +2, 4, 1, 1, 2, 2, 1, 2, 128, 195, 194, 181, 155,,, +2, 4, 1, 1, 2, 2, 1, 2, 256, 195, 194, 181, 155,,, +2, 4, 1, 1, 2, 2, 1, 2, 384, 195, 194, 181, 155,,, +2, 4, 1, 1, 2, 2, 2, 1, 128, 195, 194, 181, 155,,, +2, 4, 1, 1, 2, 2, 2, 1, 256, 195, 194, 181, 155,,, +2, 4, 1, 1, 2, 2, 2, 1, 384, 195, 194, 181, 155,,, +2, 4, 1, 1, 2, 2, 2, 2, 128, 195, 194, 181, 155,,, +2, 4, 1, 1, 2, 2, 2, 2, 256, 195, 194, 181, 155,,, +2, 4, 1, 1, 2, 2, 2, 2, 384, 195, 194, 181, 155,,, +2, 4, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 4, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 4, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 4, 1, 2, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 4, 1, 2, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 4, 1, 2, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 4, 1, 2, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 4, 1, 2, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 4, 1, 2, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 4, 1, 2, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 4, 1, 2, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 4, 1, 2, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 4, 1, 2, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 4, 1, 2, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 4, 1, 2, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 4, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 4, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 4, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 4, 1, 3, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 4, 1, 3, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 4, 1, 3, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 4, 1, 3, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 4, 1, 3, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 4, 1, 3, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 4, 1, 3, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 4, 1, 3, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 4, 1, 3, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 4, 1, 3, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 4, 1, 3, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 4, 1, 3, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 4, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 4, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 4, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 4, 1, 4, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 4, 1, 4, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 4, 1, 4, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 4, 1, 4, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 4, 1, 4, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 4, 1, 4, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 4, 1, 4, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 4, 1, 4, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 4, 1, 4, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 4, 1, 4, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 4, 1, 4, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 4, 1, 4, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 4, 2, 1, 1, 1, 1, 1, 128, 195, 195, 195, 195,,, +2, 4, 2, 1, 1, 1, 1, 1, 256, 195, 195, 195, 195,,, +2, 4, 2, 1, 1, 1, 1, 1, 384, 195, 195, 195, 195,,, +2, 4, 2, 1, 2, 2, 1, 1, 128, 195, 194, 181, 155,,, +2, 4, 2, 1, 2, 2, 1, 1, 256, 195, 194, 181, 155,,, +2, 4, 2, 1, 2, 2, 1, 1, 384, 195, 194, 181, 155,,, +2, 4, 2, 1, 2, 2, 1, 2, 128, 195, 194, 181, 155,,, +2, 4, 2, 1, 2, 2, 1, 2, 256, 195, 194, 181, 155,,, +2, 4, 2, 1, 2, 2, 1, 2, 384, 195, 194, 181, 155,,, +2, 4, 2, 1, 2, 2, 2, 1, 128, 195, 194, 181, 155,,, +2, 4, 2, 1, 2, 2, 2, 1, 256, 195, 194, 181, 155,,, +2, 4, 2, 1, 2, 2, 2, 1, 384, 195, 194, 181, 155,,, +2, 4, 2, 1, 2, 2, 2, 2, 128, 195, 194, 181, 155,,, +2, 4, 2, 1, 2, 2, 2, 2, 256, 195, 194, 181, 155,,, +2, 4, 2, 1, 2, 2, 2, 2, 384, 195, 194, 181, 155,,, +2, 4, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 4, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 4, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 4, 2, 2, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 4, 2, 2, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 4, 2, 2, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 4, 2, 2, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 4, 2, 2, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 4, 2, 2, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 4, 2, 2, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 4, 2, 2, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 4, 2, 2, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 4, 2, 2, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 4, 2, 2, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 4, 2, 2, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 4, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 4, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 4, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 4, 2, 3, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 4, 2, 3, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 4, 2, 3, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 4, 2, 3, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 4, 2, 3, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 4, 2, 3, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 4, 2, 3, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 4, 2, 3, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 4, 2, 3, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 4, 2, 3, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 4, 2, 3, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 4, 2, 3, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 4, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 4, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 4, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 4, 2, 4, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 4, 2, 4, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 4, 2, 4, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 4, 2, 4, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 4, 2, 4, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 4, 2, 4, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 4, 2, 4, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 4, 2, 4, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 4, 2, 4, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 4, 2, 4, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 4, 2, 4, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 4, 2, 4, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 5, 1, 1, 1, 1, 1, 1, 128, 196, 196, 196, 132,,, +2, 5, 1, 1, 1, 1, 1, 1, 256, 196, 196, 196, 132,,, +2, 5, 1, 1, 1, 1, 1, 1, 384, 196, 196, 196, 132,,, +2, 5, 1, 1, 2, 2, 1, 1, 128, 196, 192, 176, 132,,, +2, 5, 1, 1, 2, 2, 1, 1, 256, 196, 192, 176, 132,,, +2, 5, 1, 1, 2, 2, 1, 1, 384, 196, 192, 176, 132,,, +2, 5, 1, 1, 2, 2, 1, 2, 128, 196, 192, 176, 132,,, +2, 5, 1, 1, 2, 2, 1, 2, 256, 196, 192, 176, 132,,, +2, 5, 1, 1, 2, 2, 1, 2, 384, 196, 192, 176, 132,,, +2, 5, 1, 1, 2, 2, 2, 1, 128, 196, 192, 176, 132,,, +2, 5, 1, 1, 2, 2, 2, 1, 256, 196, 192, 176, 132,,, +2, 5, 1, 1, 2, 2, 2, 1, 384, 196, 192, 176, 132,,, +2, 5, 1, 1, 2, 2, 2, 2, 128, 196, 192, 176, 132,,, +2, 5, 1, 1, 2, 2, 2, 2, 256, 196, 192, 176, 132,,, +2, 5, 1, 1, 2, 2, 2, 2, 384, 196, 192, 176, 132,,, +2, 5, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 5, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 5, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 5, 1, 2, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 5, 1, 2, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 5, 1, 2, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 5, 1, 2, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 5, 1, 2, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 5, 1, 2, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 5, 1, 2, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 5, 1, 2, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 5, 1, 2, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 5, 1, 2, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 5, 1, 2, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 5, 1, 2, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 5, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 5, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 5, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 5, 1, 3, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 5, 1, 3, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 5, 1, 3, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 5, 1, 3, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 5, 1, 3, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 5, 1, 3, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 5, 1, 3, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 5, 1, 3, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 5, 1, 3, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 5, 1, 3, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 5, 1, 3, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 5, 1, 3, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 5, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 5, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 5, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 5, 1, 4, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 5, 1, 4, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 5, 1, 4, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 5, 1, 4, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 5, 1, 4, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 5, 1, 4, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 5, 1, 4, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 5, 1, 4, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 5, 1, 4, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 5, 1, 4, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 5, 1, 4, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 5, 1, 4, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 5, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 5, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 5, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 5, 1, 5, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 5, 1, 5, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 5, 1, 5, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 5, 1, 5, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 5, 1, 5, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 5, 1, 5, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 5, 1, 5, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 5, 1, 5, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 5, 1, 5, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 5, 1, 5, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 5, 1, 5, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 5, 1, 5, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 5, 2, 1, 1, 1, 1, 1, 128, 196, 196, 196, 132,,, +2, 5, 2, 1, 1, 1, 1, 1, 256, 196, 196, 196, 132,,, +2, 5, 2, 1, 1, 1, 1, 1, 384, 196, 196, 196, 132,,, +2, 5, 2, 1, 2, 2, 1, 1, 128, 196, 192, 176, 132,,, +2, 5, 2, 1, 2, 2, 1, 1, 256, 196, 192, 176, 132,,, +2, 5, 2, 1, 2, 2, 1, 1, 384, 196, 192, 176, 132,,, +2, 5, 2, 1, 2, 2, 1, 2, 128, 196, 192, 176, 132,,, +2, 5, 2, 1, 2, 2, 1, 2, 256, 196, 192, 176, 132,,, +2, 5, 2, 1, 2, 2, 1, 2, 384, 196, 192, 176, 132,,, +2, 5, 2, 1, 2, 2, 2, 1, 128, 196, 192, 176, 132,,, +2, 5, 2, 1, 2, 2, 2, 1, 256, 196, 192, 176, 132,,, +2, 5, 2, 1, 2, 2, 2, 1, 384, 196, 192, 176, 132,,, +2, 5, 2, 1, 2, 2, 2, 2, 128, 196, 192, 176, 132,,, +2, 5, 2, 1, 2, 2, 2, 2, 256, 196, 192, 176, 132,,, +2, 5, 2, 1, 2, 2, 2, 2, 384, 196, 192, 176, 132,,, +2, 5, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 5, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 5, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 5, 2, 2, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 5, 2, 2, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 5, 2, 2, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 5, 2, 2, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 5, 2, 2, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 5, 2, 2, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 5, 2, 2, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 5, 2, 2, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 5, 2, 2, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 5, 2, 2, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 5, 2, 2, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 5, 2, 2, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 5, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 5, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 5, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 5, 2, 3, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 5, 2, 3, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 5, 2, 3, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 5, 2, 3, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 5, 2, 3, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 5, 2, 3, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 5, 2, 3, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 5, 2, 3, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 5, 2, 3, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 5, 2, 3, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 5, 2, 3, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 5, 2, 3, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 5, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 5, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 5, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 5, 2, 4, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 5, 2, 4, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 5, 2, 4, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 5, 2, 4, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 5, 2, 4, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 5, 2, 4, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 5, 2, 4, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 5, 2, 4, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 5, 2, 4, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 5, 2, 4, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 5, 2, 4, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 5, 2, 4, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 5, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 5, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 5, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 5, 2, 5, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +2, 5, 2, 5, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +2, 5, 2, 5, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +2, 5, 2, 5, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +2, 5, 2, 5, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +2, 5, 2, 5, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +2, 5, 2, 5, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +2, 5, 2, 5, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +2, 5, 2, 5, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +2, 5, 2, 5, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +2, 5, 2, 5, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +2, 5, 2, 5, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +2, 6, 1, 1, 1, 1, 1, 1, 128, 197, 197, 197,,,, +2, 6, 1, 1, 1, 1, 1, 1, 256, 197, 197, 197,,,, +2, 6, 1, 1, 1, 1, 1, 1, 384, 197, 197, 197,,,, +2, 6, 1, 1, 2, 2, 1, 1, 128, 197, 189, 170,,,, +2, 6, 1, 1, 2, 2, 1, 1, 256, 197, 189, 170,,,, +2, 6, 1, 1, 2, 2, 1, 1, 384, 197, 189, 170,,,, +2, 6, 1, 1, 2, 2, 1, 2, 128, 197, 189, 170,,,, +2, 6, 1, 1, 2, 2, 1, 2, 256, 197, 189, 170,,,, +2, 6, 1, 1, 2, 2, 1, 2, 384, 197, 189, 170,,,, +2, 6, 1, 1, 2, 2, 2, 1, 128, 197, 189, 170,,,, +2, 6, 1, 1, 2, 2, 2, 1, 256, 197, 189, 170,,,, +2, 6, 1, 1, 2, 2, 2, 1, 384, 197, 189, 170,,,, +2, 6, 1, 1, 2, 2, 2, 2, 128, 197, 189, 170,,,, +2, 6, 1, 1, 2, 2, 2, 2, 256, 197, 189, 170,,,, +2, 6, 1, 1, 2, 2, 2, 2, 384, 197, 189, 170,,,, +2, 6, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 6, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 6, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 6, 1, 2, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 6, 1, 2, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 6, 1, 2, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 6, 1, 2, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 6, 1, 2, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 6, 1, 2, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 6, 1, 2, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 6, 1, 2, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 6, 1, 2, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 6, 1, 2, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 6, 1, 2, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 6, 1, 2, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 6, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 6, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 6, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 6, 1, 3, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 6, 1, 3, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 6, 1, 3, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 6, 1, 3, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 6, 1, 3, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 6, 1, 3, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 6, 1, 3, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 6, 1, 3, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 6, 1, 3, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 6, 1, 3, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 6, 1, 3, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 6, 1, 3, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 6, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 6, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 6, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 6, 1, 4, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 6, 1, 4, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 6, 1, 4, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 6, 1, 4, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 6, 1, 4, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 6, 1, 4, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 6, 1, 4, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 6, 1, 4, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 6, 1, 4, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 6, 1, 4, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 6, 1, 4, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 6, 1, 4, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 6, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 6, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 6, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 6, 1, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 6, 1, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 6, 1, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 6, 1, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 6, 1, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 6, 1, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 6, 1, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 6, 1, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 6, 1, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 6, 1, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 6, 1, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 6, 1, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 6, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 6, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 6, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 6, 1, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 6, 1, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 6, 1, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 6, 1, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 6, 1, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 6, 1, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 6, 1, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 6, 1, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 6, 1, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 6, 1, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 6, 1, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 6, 1, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 6, 2, 1, 1, 1, 1, 1, 128, 197, 197, 197,,,, +2, 6, 2, 1, 1, 1, 1, 1, 256, 197, 197, 197,,,, +2, 6, 2, 1, 1, 1, 1, 1, 384, 197, 197, 197,,,, +2, 6, 2, 1, 2, 2, 1, 1, 128, 197, 189, 170,,,, +2, 6, 2, 1, 2, 2, 1, 1, 256, 197, 189, 170,,,, +2, 6, 2, 1, 2, 2, 1, 1, 384, 197, 189, 170,,,, +2, 6, 2, 1, 2, 2, 1, 2, 128, 197, 189, 170,,,, +2, 6, 2, 1, 2, 2, 1, 2, 256, 197, 189, 170,,,, +2, 6, 2, 1, 2, 2, 1, 2, 384, 197, 189, 170,,,, +2, 6, 2, 1, 2, 2, 2, 1, 128, 197, 189, 170,,,, +2, 6, 2, 1, 2, 2, 2, 1, 256, 197, 189, 170,,,, +2, 6, 2, 1, 2, 2, 2, 1, 384, 197, 189, 170,,,, +2, 6, 2, 1, 2, 2, 2, 2, 128, 197, 189, 170,,,, +2, 6, 2, 1, 2, 2, 2, 2, 256, 197, 189, 170,,,, +2, 6, 2, 1, 2, 2, 2, 2, 384, 197, 189, 170,,,, +2, 6, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 6, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 6, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 6, 2, 2, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 6, 2, 2, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 6, 2, 2, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 6, 2, 2, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 6, 2, 2, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 6, 2, 2, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 6, 2, 2, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 6, 2, 2, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 6, 2, 2, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 6, 2, 2, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 6, 2, 2, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 6, 2, 2, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 6, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 6, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 6, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 6, 2, 3, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 6, 2, 3, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 6, 2, 3, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 6, 2, 3, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 6, 2, 3, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 6, 2, 3, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 6, 2, 3, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 6, 2, 3, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 6, 2, 3, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 6, 2, 3, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 6, 2, 3, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 6, 2, 3, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 6, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 6, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 6, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 6, 2, 4, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 6, 2, 4, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 6, 2, 4, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 6, 2, 4, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 6, 2, 4, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 6, 2, 4, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 6, 2, 4, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 6, 2, 4, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 6, 2, 4, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 6, 2, 4, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 6, 2, 4, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 6, 2, 4, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 6, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 6, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 6, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 6, 2, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 6, 2, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 6, 2, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 6, 2, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 6, 2, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 6, 2, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 6, 2, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 6, 2, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 6, 2, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 6, 2, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 6, 2, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 6, 2, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 6, 2, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 6, 2, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 6, 2, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 6, 2, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 6, 2, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 6, 2, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 6, 2, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 6, 2, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 6, 2, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 6, 2, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 6, 2, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 6, 2, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 6, 2, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 6, 2, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 6, 2, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 7, 1, 1, 1, 1, 1, 1, 128, 198, 198, 198,,,, +2, 7, 1, 1, 1, 1, 1, 1, 256, 198, 198, 198,,,, +2, 7, 1, 1, 1, 1, 1, 1, 384, 198, 198, 198,,,, +2, 7, 1, 1, 2, 2, 1, 1, 128, 198, 187, 165,,,, +2, 7, 1, 1, 2, 2, 1, 1, 256, 198, 187, 165,,,, +2, 7, 1, 1, 2, 2, 1, 1, 384, 198, 187, 165,,,, +2, 7, 1, 1, 2, 2, 1, 2, 128, 198, 187, 165,,,, +2, 7, 1, 1, 2, 2, 1, 2, 256, 198, 187, 165,,,, +2, 7, 1, 1, 2, 2, 1, 2, 384, 198, 187, 165,,,, +2, 7, 1, 1, 2, 2, 2, 1, 128, 198, 187, 165,,,, +2, 7, 1, 1, 2, 2, 2, 1, 256, 198, 187, 165,,,, +2, 7, 1, 1, 2, 2, 2, 1, 384, 198, 187, 165,,,, +2, 7, 1, 1, 2, 2, 2, 2, 128, 198, 187, 165,,,, +2, 7, 1, 1, 2, 2, 2, 2, 256, 198, 187, 165,,,, +2, 7, 1, 1, 2, 2, 2, 2, 384, 198, 187, 165,,,, +2, 7, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 7, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 7, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 7, 1, 2, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 7, 1, 2, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 7, 1, 2, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 7, 1, 2, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 7, 1, 2, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 7, 1, 2, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 7, 1, 2, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 7, 1, 2, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 7, 1, 2, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 7, 1, 2, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 7, 1, 2, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 7, 1, 2, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 7, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 7, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 7, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 7, 1, 3, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 7, 1, 3, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 7, 1, 3, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 7, 1, 3, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 7, 1, 3, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 7, 1, 3, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 7, 1, 3, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 7, 1, 3, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 7, 1, 3, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 7, 1, 3, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 7, 1, 3, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 7, 1, 3, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 7, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 7, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 7, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 7, 1, 4, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 7, 1, 4, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 7, 1, 4, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 7, 1, 4, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 7, 1, 4, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 7, 1, 4, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 7, 1, 4, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 7, 1, 4, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 7, 1, 4, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 7, 1, 4, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 7, 1, 4, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 7, 1, 4, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 7, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 7, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 7, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 7, 1, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 7, 1, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 7, 1, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 7, 1, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 7, 1, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 7, 1, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 7, 1, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 7, 1, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 7, 1, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 7, 1, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 7, 1, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 7, 1, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 7, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 7, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 7, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 7, 1, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 7, 1, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 7, 1, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 7, 1, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 7, 1, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 7, 1, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 7, 1, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 7, 1, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 7, 1, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 7, 1, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 7, 1, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 7, 1, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 7, 1, 7, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 7, 1, 7, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 7, 1, 7, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 7, 1, 7, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 7, 1, 7, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 7, 1, 7, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 7, 1, 7, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 7, 1, 7, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 7, 1, 7, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 7, 1, 7, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 7, 1, 7, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 7, 1, 7, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 7, 1, 7, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 7, 1, 7, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 7, 1, 7, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 7, 2, 1, 1, 1, 1, 1, 128, 198, 198, 198,,,, +2, 7, 2, 1, 1, 1, 1, 1, 256, 198, 198, 198,,,, +2, 7, 2, 1, 1, 1, 1, 1, 384, 198, 198, 198,,,, +2, 7, 2, 1, 2, 2, 1, 1, 128, 198, 187, 165,,,, +2, 7, 2, 1, 2, 2, 1, 1, 256, 198, 187, 165,,,, +2, 7, 2, 1, 2, 2, 1, 1, 384, 198, 187, 165,,,, +2, 7, 2, 1, 2, 2, 1, 2, 128, 198, 187, 165,,,, +2, 7, 2, 1, 2, 2, 1, 2, 256, 198, 187, 165,,,, +2, 7, 2, 1, 2, 2, 1, 2, 384, 198, 187, 165,,,, +2, 7, 2, 1, 2, 2, 2, 1, 128, 198, 187, 165,,,, +2, 7, 2, 1, 2, 2, 2, 1, 256, 198, 187, 165,,,, +2, 7, 2, 1, 2, 2, 2, 1, 384, 198, 187, 165,,,, +2, 7, 2, 1, 2, 2, 2, 2, 128, 198, 187, 165,,,, +2, 7, 2, 1, 2, 2, 2, 2, 256, 198, 187, 165,,,, +2, 7, 2, 1, 2, 2, 2, 2, 384, 198, 187, 165,,,, +2, 7, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 7, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 7, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 7, 2, 2, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 7, 2, 2, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 7, 2, 2, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 7, 2, 2, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 7, 2, 2, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 7, 2, 2, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 7, 2, 2, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 7, 2, 2, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 7, 2, 2, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 7, 2, 2, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 7, 2, 2, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 7, 2, 2, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 7, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 7, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 7, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 7, 2, 3, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 7, 2, 3, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 7, 2, 3, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 7, 2, 3, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 7, 2, 3, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 7, 2, 3, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 7, 2, 3, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 7, 2, 3, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 7, 2, 3, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 7, 2, 3, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 7, 2, 3, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 7, 2, 3, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 7, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 7, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 7, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 7, 2, 4, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 7, 2, 4, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 7, 2, 4, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 7, 2, 4, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 7, 2, 4, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 7, 2, 4, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 7, 2, 4, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 7, 2, 4, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 7, 2, 4, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 7, 2, 4, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 7, 2, 4, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 7, 2, 4, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 7, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 7, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 7, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 7, 2, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 7, 2, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 7, 2, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 7, 2, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 7, 2, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 7, 2, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 7, 2, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 7, 2, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 7, 2, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 7, 2, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 7, 2, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 7, 2, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 7, 2, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 7, 2, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 7, 2, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 7, 2, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 7, 2, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 7, 2, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 7, 2, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 7, 2, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 7, 2, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 7, 2, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 7, 2, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 7, 2, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 7, 2, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 7, 2, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 7, 2, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +2, 7, 2, 7, 1, 1, 1, 1, 128, 240, 240, 240,,,, +2, 7, 2, 7, 1, 1, 1, 1, 256, 240, 240, 240,,,, +2, 7, 2, 7, 1, 1, 1, 1, 384, 240, 240, 240,,,, +2, 7, 2, 7, 2, 2, 1, 1, 128, 240, 240, 240,,,, +2, 7, 2, 7, 2, 2, 1, 1, 256, 240, 240, 240,,,, +2, 7, 2, 7, 2, 2, 1, 1, 384, 240, 240, 240,,,, +2, 7, 2, 7, 2, 2, 1, 2, 128, 240, 240, 240,,,, +2, 7, 2, 7, 2, 2, 1, 2, 256, 240, 240, 240,,,, +2, 7, 2, 7, 2, 2, 1, 2, 384, 240, 240, 240,,,, +2, 7, 2, 7, 2, 2, 2, 1, 128, 240, 240, 240,,,, +2, 7, 2, 7, 2, 2, 2, 1, 256, 240, 240, 240,,,, +2, 7, 2, 7, 2, 2, 2, 1, 384, 240, 240, 240,,,, +2, 7, 2, 7, 2, 2, 2, 2, 128, 240, 240, 240,,,, +2, 7, 2, 7, 2, 2, 2, 2, 256, 240, 240, 240,,,, +2, 7, 2, 7, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 1, 1, 1, 1, 1, 1, 1, 128, 128, 128, 128, 128, 128, 85, 42 +3, 1, 1, 1, 1, 1, 1, 1, 256, 128, 128, 128, 128, 128, 85, 42 +3, 1, 1, 1, 1, 1, 1, 1, 384, 128, 128, 128, 128, 128, 85, 42 +3, 1, 2, 1, 1, 1, 1, 1, 128, 128, 128, 128, 128, 128, 85, 42 +3, 1, 2, 1, 1, 1, 1, 1, 256, 128, 128, 128, 128, 128, 85, 42 +3, 1, 2, 1, 1, 1, 1, 1, 384, 128, 128, 128, 128, 128, 85, 42 +3, 1, 3, 1, 1, 1, 1, 1, 128, 128, 128, 128, 128, 128, 85, 42 +3, 1, 3, 1, 1, 1, 1, 1, 256, 128, 128, 128, 128, 128, 85, 42 +3, 1, 3, 1, 1, 1, 1, 1, 384, 128, 128, 128, 128, 128, 85, 42 +3, 2, 1, 1, 1, 1, 1, 1, 128, 129, 129, 129, 129,,, +3, 2, 1, 1, 1, 1, 1, 1, 256, 129, 129, 129, 129,,, +3, 2, 1, 1, 1, 1, 1, 1, 384, 129, 129, 129, 129,,, +3, 2, 1, 1, 2, 2, 1, 1, 128, 129, 129, 129, 119,,, +3, 2, 1, 1, 2, 2, 1, 1, 256, 129, 129, 129, 119,,, +3, 2, 1, 1, 2, 2, 1, 1, 384, 129, 129, 129, 119,,, +3, 2, 1, 1, 2, 2, 1, 2, 128, 129, 129, 129, 119,,, +3, 2, 1, 1, 2, 2, 1, 2, 256, 129, 129, 129, 119,,, +3, 2, 1, 1, 2, 2, 1, 2, 384, 129, 129, 129, 119,,, +3, 2, 1, 1, 2, 2, 2, 1, 128, 129, 129, 129, 119,,, +3, 2, 1, 1, 2, 2, 2, 1, 256, 129, 129, 129, 119,,, +3, 2, 1, 1, 2, 2, 2, 1, 384, 129, 129, 129, 119,,, +3, 2, 1, 1, 2, 2, 2, 2, 128, 129, 129, 129, 119,,, +3, 2, 1, 1, 2, 2, 2, 2, 256, 129, 129, 129, 119,,, +3, 2, 1, 1, 2, 2, 2, 2, 384, 129, 129, 129, 119,,, +3, 2, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 2, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 2, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 2, 1, 2, 2, 2, 1, 1, 128, 240, 240, 240, 237,,, +3, 2, 1, 2, 2, 2, 1, 1, 256, 240, 240, 240, 237,,, +3, 2, 1, 2, 2, 2, 1, 1, 384, 240, 240, 240, 237,,, +3, 2, 1, 2, 2, 2, 1, 2, 128, 240, 240, 240, 237,,, +3, 2, 1, 2, 2, 2, 1, 2, 256, 240, 240, 240, 237,,, +3, 2, 1, 2, 2, 2, 1, 2, 384, 240, 240, 240, 237,,, +3, 2, 1, 2, 2, 2, 2, 1, 128, 240, 240, 240, 237,,, +3, 2, 1, 2, 2, 2, 2, 1, 256, 240, 240, 240, 237,,, +3, 2, 1, 2, 2, 2, 2, 1, 384, 240, 240, 240, 237,,, +3, 2, 1, 2, 2, 2, 2, 2, 128, 240, 240, 240, 237,,, +3, 2, 1, 2, 2, 2, 2, 2, 256, 240, 240, 240, 237,,, +3, 2, 1, 2, 2, 2, 2, 2, 384, 240, 240, 240, 237,,, +3, 2, 2, 1, 1, 1, 1, 1, 128, 129, 129, 129, 129,,, +3, 2, 2, 1, 1, 1, 1, 1, 256, 129, 129, 129, 129,,, +3, 2, 2, 1, 1, 1, 1, 1, 384, 129, 129, 129, 129,,, +3, 2, 2, 1, 2, 2, 1, 1, 128, 129, 129, 129, 119,,, +3, 2, 2, 1, 2, 2, 1, 1, 256, 129, 129, 129, 119,,, +3, 2, 2, 1, 2, 2, 1, 1, 384, 129, 129, 129, 119,,, +3, 2, 2, 1, 2, 2, 1, 2, 128, 129, 129, 129, 119,,, +3, 2, 2, 1, 2, 2, 1, 2, 256, 129, 129, 129, 119,,, +3, 2, 2, 1, 2, 2, 1, 2, 384, 129, 129, 129, 119,,, +3, 2, 2, 1, 2, 2, 2, 1, 128, 129, 129, 129, 119,,, +3, 2, 2, 1, 2, 2, 2, 1, 256, 129, 129, 129, 119,,, +3, 2, 2, 1, 2, 2, 2, 1, 384, 129, 129, 129, 119,,, +3, 2, 2, 1, 2, 2, 2, 2, 128, 129, 129, 129, 119,,, +3, 2, 2, 1, 2, 2, 2, 2, 256, 129, 129, 129, 119,,, +3, 2, 2, 1, 2, 2, 2, 2, 384, 129, 129, 129, 119,,, +3, 2, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 2, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 2, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 2, 2, 2, 2, 2, 1, 1, 128, 240, 240, 240, 237,,, +3, 2, 2, 2, 2, 2, 1, 1, 256, 240, 240, 240, 237,,, +3, 2, 2, 2, 2, 2, 1, 1, 384, 240, 240, 240, 237,,, +3, 2, 2, 2, 2, 2, 1, 2, 128, 240, 240, 240, 237,,, +3, 2, 2, 2, 2, 2, 1, 2, 256, 240, 240, 240, 237,,, +3, 2, 2, 2, 2, 2, 1, 2, 384, 240, 240, 240, 237,,, +3, 2, 2, 2, 2, 2, 2, 1, 128, 240, 240, 240, 237,,, +3, 2, 2, 2, 2, 2, 2, 1, 256, 240, 240, 240, 237,,, +3, 2, 2, 2, 2, 2, 2, 1, 384, 240, 240, 240, 237,,, +3, 2, 2, 2, 2, 2, 2, 2, 128, 240, 240, 240, 237,,, +3, 2, 2, 2, 2, 2, 2, 2, 256, 240, 240, 240, 237,,, +3, 2, 2, 2, 2, 2, 2, 2, 384, 240, 240, 240, 237,,, +3, 2, 3, 1, 1, 1, 1, 1, 128, 129, 129, 129, 129,,, +3, 2, 3, 1, 1, 1, 1, 1, 256, 129, 129, 129, 129,,, +3, 2, 3, 1, 1, 1, 1, 1, 384, 129, 129, 129, 129,,, +3, 2, 3, 1, 2, 2, 1, 1, 128, 129, 129, 129, 119,,, +3, 2, 3, 1, 2, 2, 1, 1, 256, 129, 129, 129, 119,,, +3, 2, 3, 1, 2, 2, 1, 1, 384, 129, 129, 129, 119,,, +3, 2, 3, 1, 2, 2, 1, 2, 128, 129, 129, 129, 119,,, +3, 2, 3, 1, 2, 2, 1, 2, 256, 129, 129, 129, 119,,, +3, 2, 3, 1, 2, 2, 1, 2, 384, 129, 129, 129, 119,,, +3, 2, 3, 1, 2, 2, 2, 1, 128, 129, 129, 129, 119,,, +3, 2, 3, 1, 2, 2, 2, 1, 256, 129, 129, 129, 119,,, +3, 2, 3, 1, 2, 2, 2, 1, 384, 129, 129, 129, 119,,, +3, 2, 3, 1, 2, 2, 2, 2, 128, 129, 129, 129, 119,,, +3, 2, 3, 1, 2, 2, 2, 2, 256, 129, 129, 129, 119,,, +3, 2, 3, 1, 2, 2, 2, 2, 384, 129, 129, 129, 119,,, +3, 2, 3, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 2, 3, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 2, 3, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 2, 3, 2, 2, 2, 1, 1, 128, 240, 240, 240, 237,,, +3, 2, 3, 2, 2, 2, 1, 1, 256, 240, 240, 240, 237,,, +3, 2, 3, 2, 2, 2, 1, 1, 384, 240, 240, 240, 237,,, +3, 2, 3, 2, 2, 2, 1, 2, 128, 240, 240, 240, 237,,, +3, 2, 3, 2, 2, 2, 1, 2, 256, 240, 240, 240, 237,,, +3, 2, 3, 2, 2, 2, 1, 2, 384, 240, 240, 240, 237,,, +3, 2, 3, 2, 2, 2, 2, 1, 128, 240, 240, 240, 237,,, +3, 2, 3, 2, 2, 2, 2, 1, 256, 240, 240, 240, 237,,, +3, 2, 3, 2, 2, 2, 2, 1, 384, 240, 240, 240, 237,,, +3, 2, 3, 2, 2, 2, 2, 2, 128, 240, 240, 240, 237,,, +3, 2, 3, 2, 2, 2, 2, 2, 256, 240, 240, 240, 237,,, +3, 2, 3, 2, 2, 2, 2, 2, 384, 240, 240, 240, 237,,, +3, 3, 1, 1, 1, 1, 1, 1, 128, 130, 130, 130, 87,,, +3, 3, 1, 1, 1, 1, 1, 1, 256, 130, 130, 130, 87,,, +3, 3, 1, 1, 1, 1, 1, 1, 384, 130, 130, 130, 87,,, +3, 3, 1, 1, 2, 2, 1, 1, 128, 130, 130, 126, 87,,, +3, 3, 1, 1, 2, 2, 1, 1, 256, 130, 130, 126, 87,,, +3, 3, 1, 1, 2, 2, 1, 1, 384, 130, 130, 126, 87,,, +3, 3, 1, 1, 2, 2, 1, 2, 128, 130, 130, 126, 87,,, +3, 3, 1, 1, 2, 2, 1, 2, 256, 130, 130, 126, 87,,, +3, 3, 1, 1, 2, 2, 1, 2, 384, 130, 130, 126, 87,,, +3, 3, 1, 1, 2, 2, 2, 1, 128, 130, 130, 126, 87,,, +3, 3, 1, 1, 2, 2, 2, 1, 256, 130, 130, 126, 87,,, +3, 3, 1, 1, 2, 2, 2, 1, 384, 130, 130, 126, 87,,, +3, 3, 1, 1, 2, 2, 2, 2, 128, 130, 130, 126, 87,,, +3, 3, 1, 1, 2, 2, 2, 2, 256, 130, 130, 126, 87,,, +3, 3, 1, 1, 2, 2, 2, 2, 384, 130, 130, 126, 87,,, +3, 3, 1, 1, 3, 3, 1, 1, 128, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 1, 1, 256, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 1, 1, 384, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 1, 2, 128, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 1, 2, 256, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 1, 2, 384, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 1, 3, 128, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 1, 3, 256, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 1, 3, 384, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 2, 1, 128, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 2, 1, 256, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 2, 1, 384, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 2, 2, 128, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 2, 2, 256, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 2, 2, 384, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 2, 3, 128, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 2, 3, 256, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 2, 3, 384, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 3, 1, 128, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 3, 1, 256, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 3, 1, 384, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 3, 2, 128, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 3, 2, 256, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 3, 2, 384, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 3, 3, 128, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 3, 3, 256, 130, 128, 118, 87,,, +3, 3, 1, 1, 3, 3, 3, 3, 384, 130, 128, 118, 87,,, +3, 3, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 172,,, +3, 3, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 172,,, +3, 3, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 172,,, +3, 3, 1, 2, 2, 2, 1, 1, 128, 240, 240, 240, 172,,, +3, 3, 1, 2, 2, 2, 1, 1, 256, 240, 240, 240, 172,,, +3, 3, 1, 2, 2, 2, 1, 1, 384, 240, 240, 240, 172,,, +3, 3, 1, 2, 2, 2, 1, 2, 128, 240, 240, 240, 172,,, +3, 3, 1, 2, 2, 2, 1, 2, 256, 240, 240, 240, 172,,, +3, 3, 1, 2, 2, 2, 1, 2, 384, 240, 240, 240, 172,,, +3, 3, 1, 2, 2, 2, 2, 1, 128, 240, 240, 240, 172,,, +3, 3, 1, 2, 2, 2, 2, 1, 256, 240, 240, 240, 172,,, +3, 3, 1, 2, 2, 2, 2, 1, 384, 240, 240, 240, 172,,, +3, 3, 1, 2, 2, 2, 2, 2, 128, 240, 240, 240, 172,,, +3, 3, 1, 2, 2, 2, 2, 2, 256, 240, 240, 240, 172,,, +3, 3, 1, 2, 2, 2, 2, 2, 384, 240, 240, 240, 172,,, +3, 3, 1, 2, 3, 3, 1, 1, 128, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 1, 1, 256, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 1, 1, 384, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 1, 2, 128, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 1, 2, 256, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 1, 2, 384, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 1, 3, 128, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 1, 3, 256, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 1, 3, 384, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 2, 1, 128, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 2, 1, 256, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 2, 1, 384, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 2, 2, 128, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 2, 2, 256, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 2, 2, 384, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 2, 3, 128, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 2, 3, 256, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 2, 3, 384, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 3, 1, 128, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 3, 1, 256, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 3, 1, 384, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 3, 2, 128, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 3, 2, 256, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 3, 2, 384, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 3, 3, 128, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 3, 3, 256, 240, 240, 234, 172,,, +3, 3, 1, 2, 3, 3, 3, 3, 384, 240, 240, 234, 172,,, +3, 3, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 3, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 3, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 3, 1, 3, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +3, 3, 1, 3, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +3, 3, 1, 3, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +3, 3, 1, 3, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +3, 3, 1, 3, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +3, 3, 1, 3, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +3, 3, 1, 3, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +3, 3, 1, 3, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +3, 3, 1, 3, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +3, 3, 1, 3, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +3, 3, 1, 3, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +3, 3, 1, 3, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 1, 1, 128, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 1, 1, 256, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 1, 1, 384, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 1, 2, 128, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 1, 2, 256, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 1, 2, 384, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 1, 3, 128, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 1, 3, 256, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 1, 3, 384, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 2, 1, 128, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 2, 1, 256, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 2, 1, 384, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 2, 2, 128, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 2, 2, 256, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 2, 2, 384, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 2, 3, 128, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 2, 3, 256, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 2, 3, 384, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 3, 1, 128, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 3, 1, 256, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 3, 1, 384, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 3, 2, 128, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 3, 2, 256, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 3, 2, 384, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 3, 3, 128, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 3, 3, 256, 240, 240, 240, 240,,, +3, 3, 1, 3, 3, 3, 3, 3, 384, 240, 240, 240, 240,,, +3, 3, 2, 1, 1, 1, 1, 1, 128, 130, 130, 130, 87,,, +3, 3, 2, 1, 1, 1, 1, 1, 256, 130, 130, 130, 87,,, +3, 3, 2, 1, 1, 1, 1, 1, 384, 130, 130, 130, 87,,, +3, 3, 2, 1, 2, 2, 1, 1, 128, 130, 130, 126, 87,,, +3, 3, 2, 1, 2, 2, 1, 1, 256, 130, 130, 126, 87,,, +3, 3, 2, 1, 2, 2, 1, 1, 384, 130, 130, 126, 87,,, +3, 3, 2, 1, 2, 2, 1, 2, 128, 130, 130, 126, 87,,, +3, 3, 2, 1, 2, 2, 1, 2, 256, 130, 130, 126, 87,,, +3, 3, 2, 1, 2, 2, 1, 2, 384, 130, 130, 126, 87,,, +3, 3, 2, 1, 2, 2, 2, 1, 128, 130, 130, 126, 87,,, +3, 3, 2, 1, 2, 2, 2, 1, 256, 130, 130, 126, 87,,, +3, 3, 2, 1, 2, 2, 2, 1, 384, 130, 130, 126, 87,,, +3, 3, 2, 1, 2, 2, 2, 2, 128, 130, 130, 126, 87,,, +3, 3, 2, 1, 2, 2, 2, 2, 256, 130, 130, 126, 87,,, +3, 3, 2, 1, 2, 2, 2, 2, 384, 130, 130, 126, 87,,, +3, 3, 2, 1, 3, 3, 1, 1, 128, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 1, 1, 256, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 1, 1, 384, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 1, 2, 128, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 1, 2, 256, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 1, 2, 384, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 1, 3, 128, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 1, 3, 256, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 1, 3, 384, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 2, 1, 128, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 2, 1, 256, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 2, 1, 384, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 2, 2, 128, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 2, 2, 256, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 2, 2, 384, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 2, 3, 128, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 2, 3, 256, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 2, 3, 384, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 3, 1, 128, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 3, 1, 256, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 3, 1, 384, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 3, 2, 128, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 3, 2, 256, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 3, 2, 384, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 3, 3, 128, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 3, 3, 256, 130, 128, 118, 87,,, +3, 3, 2, 1, 3, 3, 3, 3, 384, 130, 128, 118, 87,,, +3, 3, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 172,,, +3, 3, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 172,,, +3, 3, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 172,,, +3, 3, 2, 2, 2, 2, 1, 1, 128, 240, 240, 240, 172,,, +3, 3, 2, 2, 2, 2, 1, 1, 256, 240, 240, 240, 172,,, +3, 3, 2, 2, 2, 2, 1, 1, 384, 240, 240, 240, 172,,, +3, 3, 2, 2, 2, 2, 1, 2, 128, 240, 240, 240, 172,,, +3, 3, 2, 2, 2, 2, 1, 2, 256, 240, 240, 240, 172,,, +3, 3, 2, 2, 2, 2, 1, 2, 384, 240, 240, 240, 172,,, +3, 3, 2, 2, 2, 2, 2, 1, 128, 240, 240, 240, 172,,, +3, 3, 2, 2, 2, 2, 2, 1, 256, 240, 240, 240, 172,,, +3, 3, 2, 2, 2, 2, 2, 1, 384, 240, 240, 240, 172,,, +3, 3, 2, 2, 2, 2, 2, 2, 128, 240, 240, 240, 172,,, +3, 3, 2, 2, 2, 2, 2, 2, 256, 240, 240, 240, 172,,, +3, 3, 2, 2, 2, 2, 2, 2, 384, 240, 240, 240, 172,,, +3, 3, 2, 2, 3, 3, 1, 1, 128, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 1, 1, 256, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 1, 1, 384, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 1, 2, 128, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 1, 2, 256, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 1, 2, 384, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 1, 3, 128, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 1, 3, 256, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 1, 3, 384, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 2, 1, 128, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 2, 1, 256, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 2, 1, 384, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 2, 2, 128, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 2, 2, 256, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 2, 2, 384, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 2, 3, 128, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 2, 3, 256, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 2, 3, 384, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 3, 1, 128, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 3, 1, 256, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 3, 1, 384, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 3, 2, 128, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 3, 2, 256, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 3, 2, 384, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 3, 3, 128, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 3, 3, 256, 240, 240, 234, 172,,, +3, 3, 2, 2, 3, 3, 3, 3, 384, 240, 240, 234, 172,,, +3, 3, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 3, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 3, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 3, 2, 3, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +3, 3, 2, 3, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +3, 3, 2, 3, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +3, 3, 2, 3, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +3, 3, 2, 3, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +3, 3, 2, 3, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +3, 3, 2, 3, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +3, 3, 2, 3, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +3, 3, 2, 3, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +3, 3, 2, 3, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +3, 3, 2, 3, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +3, 3, 2, 3, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 1, 1, 128, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 1, 1, 256, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 1, 1, 384, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 1, 2, 128, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 1, 2, 256, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 1, 2, 384, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 1, 3, 128, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 1, 3, 256, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 1, 3, 384, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 2, 1, 128, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 2, 1, 256, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 2, 1, 384, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 2, 2, 128, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 2, 2, 256, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 2, 2, 384, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 2, 3, 128, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 2, 3, 256, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 2, 3, 384, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 3, 1, 128, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 3, 1, 256, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 3, 1, 384, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 3, 2, 128, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 3, 2, 256, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 3, 2, 384, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 3, 3, 128, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 3, 3, 256, 240, 240, 240, 240,,, +3, 3, 2, 3, 3, 3, 3, 3, 384, 240, 240, 240, 240,,, +3, 3, 3, 1, 1, 1, 1, 1, 128, 130, 130, 130, 87,,, +3, 3, 3, 1, 1, 1, 1, 1, 256, 130, 130, 130, 87,,, +3, 3, 3, 1, 1, 1, 1, 1, 384, 130, 130, 130, 87,,, +3, 3, 3, 1, 2, 2, 1, 1, 128, 130, 130, 126, 87,,, +3, 3, 3, 1, 2, 2, 1, 1, 256, 130, 130, 126, 87,,, +3, 3, 3, 1, 2, 2, 1, 1, 384, 130, 130, 126, 87,,, +3, 3, 3, 1, 2, 2, 1, 2, 128, 130, 130, 126, 87,,, +3, 3, 3, 1, 2, 2, 1, 2, 256, 130, 130, 126, 87,,, +3, 3, 3, 1, 2, 2, 1, 2, 384, 130, 130, 126, 87,,, +3, 3, 3, 1, 2, 2, 2, 1, 128, 130, 130, 126, 87,,, +3, 3, 3, 1, 2, 2, 2, 1, 256, 130, 130, 126, 87,,, +3, 3, 3, 1, 2, 2, 2, 1, 384, 130, 130, 126, 87,,, +3, 3, 3, 1, 2, 2, 2, 2, 128, 130, 130, 126, 87,,, +3, 3, 3, 1, 2, 2, 2, 2, 256, 130, 130, 126, 87,,, +3, 3, 3, 1, 2, 2, 2, 2, 384, 130, 130, 126, 87,,, +3, 3, 3, 1, 3, 3, 1, 1, 128, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 1, 1, 256, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 1, 1, 384, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 1, 2, 128, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 1, 2, 256, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 1, 2, 384, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 1, 3, 128, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 1, 3, 256, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 1, 3, 384, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 2, 1, 128, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 2, 1, 256, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 2, 1, 384, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 2, 2, 128, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 2, 2, 256, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 2, 2, 384, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 2, 3, 128, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 2, 3, 256, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 2, 3, 384, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 3, 1, 128, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 3, 1, 256, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 3, 1, 384, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 3, 2, 128, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 3, 2, 256, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 3, 2, 384, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 3, 3, 128, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 3, 3, 256, 130, 128, 118, 87,,, +3, 3, 3, 1, 3, 3, 3, 3, 384, 130, 128, 118, 87,,, +3, 3, 3, 2, 1, 1, 1, 1, 128, 240, 240, 240, 172,,, +3, 3, 3, 2, 1, 1, 1, 1, 256, 240, 240, 240, 172,,, +3, 3, 3, 2, 1, 1, 1, 1, 384, 240, 240, 240, 172,,, +3, 3, 3, 2, 2, 2, 1, 1, 128, 240, 240, 240, 172,,, +3, 3, 3, 2, 2, 2, 1, 1, 256, 240, 240, 240, 172,,, +3, 3, 3, 2, 2, 2, 1, 1, 384, 240, 240, 240, 172,,, +3, 3, 3, 2, 2, 2, 1, 2, 128, 240, 240, 240, 172,,, +3, 3, 3, 2, 2, 2, 1, 2, 256, 240, 240, 240, 172,,, +3, 3, 3, 2, 2, 2, 1, 2, 384, 240, 240, 240, 172,,, +3, 3, 3, 2, 2, 2, 2, 1, 128, 240, 240, 240, 172,,, +3, 3, 3, 2, 2, 2, 2, 1, 256, 240, 240, 240, 172,,, +3, 3, 3, 2, 2, 2, 2, 1, 384, 240, 240, 240, 172,,, +3, 3, 3, 2, 2, 2, 2, 2, 128, 240, 240, 240, 172,,, +3, 3, 3, 2, 2, 2, 2, 2, 256, 240, 240, 240, 172,,, +3, 3, 3, 2, 2, 2, 2, 2, 384, 240, 240, 240, 172,,, +3, 3, 3, 2, 3, 3, 1, 1, 128, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 1, 1, 256, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 1, 1, 384, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 1, 2, 128, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 1, 2, 256, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 1, 2, 384, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 1, 3, 128, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 1, 3, 256, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 1, 3, 384, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 2, 1, 128, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 2, 1, 256, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 2, 1, 384, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 2, 2, 128, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 2, 2, 256, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 2, 2, 384, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 2, 3, 128, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 2, 3, 256, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 2, 3, 384, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 3, 1, 128, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 3, 1, 256, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 3, 1, 384, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 3, 2, 128, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 3, 2, 256, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 3, 2, 384, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 3, 3, 128, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 3, 3, 256, 240, 240, 234, 172,,, +3, 3, 3, 2, 3, 3, 3, 3, 384, 240, 240, 234, 172,,, +3, 3, 3, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 3, 3, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 3, 3, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 3, 3, 3, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +3, 3, 3, 3, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +3, 3, 3, 3, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +3, 3, 3, 3, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +3, 3, 3, 3, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +3, 3, 3, 3, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +3, 3, 3, 3, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +3, 3, 3, 3, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +3, 3, 3, 3, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +3, 3, 3, 3, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +3, 3, 3, 3, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +3, 3, 3, 3, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 1, 1, 128, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 1, 1, 256, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 1, 1, 384, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 1, 2, 128, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 1, 2, 256, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 1, 2, 384, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 1, 3, 128, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 1, 3, 256, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 1, 3, 384, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 2, 1, 128, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 2, 1, 256, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 2, 1, 384, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 2, 2, 128, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 2, 2, 256, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 2, 2, 384, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 2, 3, 128, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 2, 3, 256, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 2, 3, 384, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 3, 1, 128, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 3, 1, 256, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 3, 1, 384, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 3, 2, 128, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 3, 2, 256, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 3, 2, 384, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 3, 3, 128, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 3, 3, 256, 240, 240, 240, 240,,, +3, 3, 3, 3, 3, 3, 3, 3, 384, 240, 240, 240, 240,,, +3, 4, 1, 1, 1, 1, 1, 1, 128, 131, 131, 131, 88,,, +3, 4, 1, 1, 1, 1, 1, 1, 256, 131, 131, 131, 88,,, +3, 4, 1, 1, 1, 1, 1, 1, 384, 131, 131, 131, 88,,, +3, 4, 1, 1, 2, 2, 1, 1, 128, 131, 131, 121, 88,,, +3, 4, 1, 1, 2, 2, 1, 1, 256, 131, 131, 121, 88,,, +3, 4, 1, 1, 2, 2, 1, 1, 384, 131, 131, 121, 88,,, +3, 4, 1, 1, 2, 2, 1, 2, 128, 131, 131, 121, 88,,, +3, 4, 1, 1, 2, 2, 1, 2, 256, 131, 131, 121, 88,,, +3, 4, 1, 1, 2, 2, 1, 2, 384, 131, 131, 121, 88,,, +3, 4, 1, 1, 2, 2, 2, 1, 128, 131, 131, 121, 88,,, +3, 4, 1, 1, 2, 2, 2, 1, 256, 131, 131, 121, 88,,, +3, 4, 1, 1, 2, 2, 2, 1, 384, 131, 131, 121, 88,,, +3, 4, 1, 1, 2, 2, 2, 2, 128, 131, 131, 121, 88,,, +3, 4, 1, 1, 2, 2, 2, 2, 256, 131, 131, 121, 88,,, +3, 4, 1, 1, 2, 2, 2, 2, 384, 131, 131, 121, 88,,, +3, 4, 1, 1, 3, 3, 1, 1, 128, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 1, 1, 256, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 1, 1, 384, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 1, 2, 128, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 1, 2, 256, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 1, 2, 384, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 1, 3, 128, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 1, 3, 256, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 1, 3, 384, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 2, 1, 128, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 2, 1, 256, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 2, 1, 384, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 2, 2, 128, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 2, 2, 256, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 2, 2, 384, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 2, 3, 128, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 2, 3, 256, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 2, 3, 384, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 3, 1, 128, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 3, 1, 256, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 3, 1, 384, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 3, 2, 128, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 3, 2, 256, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 3, 2, 384, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 3, 3, 128, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 3, 3, 256, 131, 125, 113, 87,,, +3, 4, 1, 1, 3, 3, 3, 3, 384, 131, 125, 113, 87,,, +3, 4, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 173,,, +3, 4, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 173,,, +3, 4, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 173,,, +3, 4, 1, 2, 2, 2, 1, 1, 128, 240, 240, 239, 173,,, +3, 4, 1, 2, 2, 2, 1, 1, 256, 240, 240, 239, 173,,, +3, 4, 1, 2, 2, 2, 1, 1, 384, 240, 240, 239, 173,,, +3, 4, 1, 2, 2, 2, 1, 2, 128, 240, 240, 239, 173,,, +3, 4, 1, 2, 2, 2, 1, 2, 256, 240, 240, 239, 173,,, +3, 4, 1, 2, 2, 2, 1, 2, 384, 240, 240, 239, 173,,, +3, 4, 1, 2, 2, 2, 2, 1, 128, 240, 240, 239, 173,,, +3, 4, 1, 2, 2, 2, 2, 1, 256, 240, 240, 239, 173,,, +3, 4, 1, 2, 2, 2, 2, 1, 384, 240, 240, 239, 173,,, +3, 4, 1, 2, 2, 2, 2, 2, 128, 240, 240, 239, 173,,, +3, 4, 1, 2, 2, 2, 2, 2, 256, 240, 240, 239, 173,,, +3, 4, 1, 2, 2, 2, 2, 2, 384, 240, 240, 239, 173,,, +3, 4, 1, 2, 3, 3, 1, 1, 128, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 1, 1, 256, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 1, 1, 384, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 1, 2, 128, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 1, 2, 256, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 1, 2, 384, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 1, 3, 128, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 1, 3, 256, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 1, 3, 384, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 2, 1, 128, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 2, 1, 256, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 2, 1, 384, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 2, 2, 128, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 2, 2, 256, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 2, 2, 384, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 2, 3, 128, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 2, 3, 256, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 2, 3, 384, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 3, 1, 128, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 3, 1, 256, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 3, 1, 384, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 3, 2, 128, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 3, 2, 256, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 3, 2, 384, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 3, 3, 128, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 3, 3, 256, 240, 240, 223, 171,,, +3, 4, 1, 2, 3, 3, 3, 3, 384, 240, 240, 223, 171,,, +3, 4, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 4, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 4, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 4, 1, 3, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +3, 4, 1, 3, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +3, 4, 1, 3, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +3, 4, 1, 3, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +3, 4, 1, 3, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +3, 4, 1, 3, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +3, 4, 1, 3, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +3, 4, 1, 3, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +3, 4, 1, 3, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +3, 4, 1, 3, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +3, 4, 1, 3, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +3, 4, 1, 3, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 1, 1, 128, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 1, 1, 256, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 1, 1, 384, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 1, 2, 128, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 1, 2, 256, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 1, 2, 384, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 1, 3, 128, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 1, 3, 256, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 1, 3, 384, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 2, 1, 128, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 2, 1, 256, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 2, 1, 384, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 2, 2, 128, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 2, 2, 256, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 2, 2, 384, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 2, 3, 128, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 2, 3, 256, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 2, 3, 384, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 3, 1, 128, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 3, 1, 256, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 3, 1, 384, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 3, 2, 128, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 3, 2, 256, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 3, 2, 384, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 3, 3, 128, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 3, 3, 256, 240, 240, 240, 240,,, +3, 4, 1, 3, 3, 3, 3, 3, 384, 240, 240, 240, 240,,, +3, 4, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 4, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 4, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 4, 1, 4, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +3, 4, 1, 4, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +3, 4, 1, 4, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +3, 4, 1, 4, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +3, 4, 1, 4, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +3, 4, 1, 4, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +3, 4, 1, 4, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +3, 4, 1, 4, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +3, 4, 1, 4, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +3, 4, 1, 4, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +3, 4, 1, 4, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +3, 4, 1, 4, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 1, 1, 128, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 1, 1, 256, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 1, 1, 384, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 1, 2, 128, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 1, 2, 256, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 1, 2, 384, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 1, 3, 128, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 1, 3, 256, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 1, 3, 384, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 2, 1, 128, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 2, 1, 256, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 2, 1, 384, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 2, 2, 128, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 2, 2, 256, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 2, 2, 384, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 2, 3, 128, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 2, 3, 256, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 2, 3, 384, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 3, 1, 128, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 3, 1, 256, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 3, 1, 384, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 3, 2, 128, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 3, 2, 256, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 3, 2, 384, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 3, 3, 128, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 3, 3, 256, 240, 240, 240, 240,,, +3, 4, 1, 4, 3, 3, 3, 3, 384, 240, 240, 240, 240,,, +3, 4, 2, 1, 1, 1, 1, 1, 128, 131, 131, 131, 88,,, +3, 4, 2, 1, 1, 1, 1, 1, 256, 131, 131, 131, 88,,, +3, 4, 2, 1, 1, 1, 1, 1, 384, 131, 131, 131, 88,,, +3, 4, 2, 1, 2, 2, 1, 1, 128, 131, 131, 121, 88,,, +3, 4, 2, 1, 2, 2, 1, 1, 256, 131, 131, 121, 88,,, +3, 4, 2, 1, 2, 2, 1, 1, 384, 131, 131, 121, 88,,, +3, 4, 2, 1, 2, 2, 1, 2, 128, 131, 131, 121, 88,,, +3, 4, 2, 1, 2, 2, 1, 2, 256, 131, 131, 121, 88,,, +3, 4, 2, 1, 2, 2, 1, 2, 384, 131, 131, 121, 88,,, +3, 4, 2, 1, 2, 2, 2, 1, 128, 131, 131, 121, 88,,, +3, 4, 2, 1, 2, 2, 2, 1, 256, 131, 131, 121, 88,,, +3, 4, 2, 1, 2, 2, 2, 1, 384, 131, 131, 121, 88,,, +3, 4, 2, 1, 2, 2, 2, 2, 128, 131, 131, 121, 88,,, +3, 4, 2, 1, 2, 2, 2, 2, 256, 131, 131, 121, 88,,, +3, 4, 2, 1, 2, 2, 2, 2, 384, 131, 131, 121, 88,,, +3, 4, 2, 1, 3, 3, 1, 1, 128, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 1, 1, 256, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 1, 1, 384, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 1, 2, 128, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 1, 2, 256, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 1, 2, 384, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 1, 3, 128, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 1, 3, 256, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 1, 3, 384, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 2, 1, 128, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 2, 1, 256, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 2, 1, 384, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 2, 2, 128, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 2, 2, 256, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 2, 2, 384, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 2, 3, 128, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 2, 3, 256, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 2, 3, 384, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 3, 1, 128, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 3, 1, 256, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 3, 1, 384, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 3, 2, 128, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 3, 2, 256, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 3, 2, 384, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 3, 3, 128, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 3, 3, 256, 131, 125, 113, 87,,, +3, 4, 2, 1, 3, 3, 3, 3, 384, 131, 125, 113, 87,,, +3, 4, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 173,,, +3, 4, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 173,,, +3, 4, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 173,,, +3, 4, 2, 2, 2, 2, 1, 1, 128, 240, 240, 239, 173,,, +3, 4, 2, 2, 2, 2, 1, 1, 256, 240, 240, 239, 173,,, +3, 4, 2, 2, 2, 2, 1, 1, 384, 240, 240, 239, 173,,, +3, 4, 2, 2, 2, 2, 1, 2, 128, 240, 240, 239, 173,,, +3, 4, 2, 2, 2, 2, 1, 2, 256, 240, 240, 239, 173,,, +3, 4, 2, 2, 2, 2, 1, 2, 384, 240, 240, 239, 173,,, +3, 4, 2, 2, 2, 2, 2, 1, 128, 240, 240, 239, 173,,, +3, 4, 2, 2, 2, 2, 2, 1, 256, 240, 240, 239, 173,,, +3, 4, 2, 2, 2, 2, 2, 1, 384, 240, 240, 239, 173,,, +3, 4, 2, 2, 2, 2, 2, 2, 128, 240, 240, 239, 173,,, +3, 4, 2, 2, 2, 2, 2, 2, 256, 240, 240, 239, 173,,, +3, 4, 2, 2, 2, 2, 2, 2, 384, 240, 240, 239, 173,,, +3, 4, 2, 2, 3, 3, 1, 1, 128, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 1, 1, 256, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 1, 1, 384, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 1, 2, 128, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 1, 2, 256, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 1, 2, 384, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 1, 3, 128, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 1, 3, 256, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 1, 3, 384, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 2, 1, 128, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 2, 1, 256, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 2, 1, 384, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 2, 2, 128, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 2, 2, 256, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 2, 2, 384, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 2, 3, 128, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 2, 3, 256, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 2, 3, 384, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 3, 1, 128, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 3, 1, 256, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 3, 1, 384, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 3, 2, 128, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 3, 2, 256, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 3, 2, 384, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 3, 3, 128, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 3, 3, 256, 240, 240, 223, 171,,, +3, 4, 2, 2, 3, 3, 3, 3, 384, 240, 240, 223, 171,,, +3, 4, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 4, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 4, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 4, 2, 3, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +3, 4, 2, 3, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +3, 4, 2, 3, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +3, 4, 2, 3, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +3, 4, 2, 3, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +3, 4, 2, 3, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +3, 4, 2, 3, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +3, 4, 2, 3, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +3, 4, 2, 3, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +3, 4, 2, 3, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +3, 4, 2, 3, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +3, 4, 2, 3, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 1, 1, 128, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 1, 1, 256, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 1, 1, 384, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 1, 2, 128, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 1, 2, 256, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 1, 2, 384, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 1, 3, 128, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 1, 3, 256, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 1, 3, 384, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 2, 1, 128, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 2, 1, 256, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 2, 1, 384, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 2, 2, 128, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 2, 2, 256, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 2, 2, 384, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 2, 3, 128, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 2, 3, 256, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 2, 3, 384, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 3, 1, 128, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 3, 1, 256, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 3, 1, 384, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 3, 2, 128, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 3, 2, 256, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 3, 2, 384, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 3, 3, 128, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 3, 3, 256, 240, 240, 240, 240,,, +3, 4, 2, 3, 3, 3, 3, 3, 384, 240, 240, 240, 240,,, +3, 4, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 4, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 4, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 4, 2, 4, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +3, 4, 2, 4, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +3, 4, 2, 4, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +3, 4, 2, 4, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +3, 4, 2, 4, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +3, 4, 2, 4, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +3, 4, 2, 4, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +3, 4, 2, 4, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +3, 4, 2, 4, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +3, 4, 2, 4, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +3, 4, 2, 4, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +3, 4, 2, 4, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 1, 1, 128, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 1, 1, 256, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 1, 1, 384, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 1, 2, 128, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 1, 2, 256, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 1, 2, 384, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 1, 3, 128, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 1, 3, 256, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 1, 3, 384, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 2, 1, 128, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 2, 1, 256, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 2, 1, 384, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 2, 2, 128, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 2, 2, 256, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 2, 2, 384, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 2, 3, 128, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 2, 3, 256, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 2, 3, 384, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 3, 1, 128, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 3, 1, 256, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 3, 1, 384, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 3, 2, 128, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 3, 2, 256, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 3, 2, 384, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 3, 3, 128, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 3, 3, 256, 240, 240, 240, 240,,, +3, 4, 2, 4, 3, 3, 3, 3, 384, 240, 240, 240, 240,,, +3, 4, 3, 1, 1, 1, 1, 1, 128, 131, 131, 131, 88,,, +3, 4, 3, 1, 1, 1, 1, 1, 256, 131, 131, 131, 88,,, +3, 4, 3, 1, 1, 1, 1, 1, 384, 131, 131, 131, 88,,, +3, 4, 3, 1, 2, 2, 1, 1, 128, 131, 131, 121, 88,,, +3, 4, 3, 1, 2, 2, 1, 1, 256, 131, 131, 121, 88,,, +3, 4, 3, 1, 2, 2, 1, 1, 384, 131, 131, 121, 88,,, +3, 4, 3, 1, 2, 2, 1, 2, 128, 131, 131, 121, 88,,, +3, 4, 3, 1, 2, 2, 1, 2, 256, 131, 131, 121, 88,,, +3, 4, 3, 1, 2, 2, 1, 2, 384, 131, 131, 121, 88,,, +3, 4, 3, 1, 2, 2, 2, 1, 128, 131, 131, 121, 88,,, +3, 4, 3, 1, 2, 2, 2, 1, 256, 131, 131, 121, 88,,, +3, 4, 3, 1, 2, 2, 2, 1, 384, 131, 131, 121, 88,,, +3, 4, 3, 1, 2, 2, 2, 2, 128, 131, 131, 121, 88,,, +3, 4, 3, 1, 2, 2, 2, 2, 256, 131, 131, 121, 88,,, +3, 4, 3, 1, 2, 2, 2, 2, 384, 131, 131, 121, 88,,, +3, 4, 3, 1, 3, 3, 1, 1, 128, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 1, 1, 256, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 1, 1, 384, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 1, 2, 128, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 1, 2, 256, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 1, 2, 384, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 1, 3, 128, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 1, 3, 256, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 1, 3, 384, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 2, 1, 128, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 2, 1, 256, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 2, 1, 384, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 2, 2, 128, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 2, 2, 256, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 2, 2, 384, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 2, 3, 128, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 2, 3, 256, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 2, 3, 384, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 3, 1, 128, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 3, 1, 256, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 3, 1, 384, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 3, 2, 128, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 3, 2, 256, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 3, 2, 384, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 3, 3, 128, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 3, 3, 256, 131, 125, 113, 87,,, +3, 4, 3, 1, 3, 3, 3, 3, 384, 131, 125, 113, 87,,, +3, 4, 3, 2, 1, 1, 1, 1, 128, 240, 240, 240, 173,,, +3, 4, 3, 2, 1, 1, 1, 1, 256, 240, 240, 240, 173,,, +3, 4, 3, 2, 1, 1, 1, 1, 384, 240, 240, 240, 173,,, +3, 4, 3, 2, 2, 2, 1, 1, 128, 240, 240, 239, 173,,, +3, 4, 3, 2, 2, 2, 1, 1, 256, 240, 240, 239, 173,,, +3, 4, 3, 2, 2, 2, 1, 1, 384, 240, 240, 239, 173,,, +3, 4, 3, 2, 2, 2, 1, 2, 128, 240, 240, 239, 173,,, +3, 4, 3, 2, 2, 2, 1, 2, 256, 240, 240, 239, 173,,, +3, 4, 3, 2, 2, 2, 1, 2, 384, 240, 240, 239, 173,,, +3, 4, 3, 2, 2, 2, 2, 1, 128, 240, 240, 239, 173,,, +3, 4, 3, 2, 2, 2, 2, 1, 256, 240, 240, 239, 173,,, +3, 4, 3, 2, 2, 2, 2, 1, 384, 240, 240, 239, 173,,, +3, 4, 3, 2, 2, 2, 2, 2, 128, 240, 240, 239, 173,,, +3, 4, 3, 2, 2, 2, 2, 2, 256, 240, 240, 239, 173,,, +3, 4, 3, 2, 2, 2, 2, 2, 384, 240, 240, 239, 173,,, +3, 4, 3, 2, 3, 3, 1, 1, 128, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 1, 1, 256, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 1, 1, 384, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 1, 2, 128, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 1, 2, 256, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 1, 2, 384, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 1, 3, 128, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 1, 3, 256, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 1, 3, 384, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 2, 1, 128, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 2, 1, 256, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 2, 1, 384, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 2, 2, 128, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 2, 2, 256, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 2, 2, 384, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 2, 3, 128, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 2, 3, 256, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 2, 3, 384, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 3, 1, 128, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 3, 1, 256, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 3, 1, 384, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 3, 2, 128, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 3, 2, 256, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 3, 2, 384, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 3, 3, 128, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 3, 3, 256, 240, 240, 223, 171,,, +3, 4, 3, 2, 3, 3, 3, 3, 384, 240, 240, 223, 171,,, +3, 4, 3, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 4, 3, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 4, 3, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 4, 3, 3, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +3, 4, 3, 3, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +3, 4, 3, 3, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +3, 4, 3, 3, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +3, 4, 3, 3, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +3, 4, 3, 3, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +3, 4, 3, 3, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +3, 4, 3, 3, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +3, 4, 3, 3, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +3, 4, 3, 3, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +3, 4, 3, 3, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +3, 4, 3, 3, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 1, 1, 128, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 1, 1, 256, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 1, 1, 384, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 1, 2, 128, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 1, 2, 256, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 1, 2, 384, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 1, 3, 128, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 1, 3, 256, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 1, 3, 384, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 2, 1, 128, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 2, 1, 256, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 2, 1, 384, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 2, 2, 128, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 2, 2, 256, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 2, 2, 384, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 2, 3, 128, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 2, 3, 256, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 2, 3, 384, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 3, 1, 128, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 3, 1, 256, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 3, 1, 384, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 3, 2, 128, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 3, 2, 256, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 3, 2, 384, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 3, 3, 128, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 3, 3, 256, 240, 240, 240, 240,,, +3, 4, 3, 3, 3, 3, 3, 3, 384, 240, 240, 240, 240,,, +3, 4, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 4, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 4, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 4, 3, 4, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +3, 4, 3, 4, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +3, 4, 3, 4, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +3, 4, 3, 4, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +3, 4, 3, 4, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +3, 4, 3, 4, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +3, 4, 3, 4, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +3, 4, 3, 4, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +3, 4, 3, 4, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +3, 4, 3, 4, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +3, 4, 3, 4, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +3, 4, 3, 4, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 1, 1, 128, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 1, 1, 256, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 1, 1, 384, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 1, 2, 128, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 1, 2, 256, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 1, 2, 384, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 1, 3, 128, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 1, 3, 256, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 1, 3, 384, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 2, 1, 128, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 2, 1, 256, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 2, 1, 384, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 2, 2, 128, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 2, 2, 256, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 2, 2, 384, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 2, 3, 128, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 2, 3, 256, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 2, 3, 384, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 3, 1, 128, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 3, 1, 256, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 3, 1, 384, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 3, 2, 128, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 3, 2, 256, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 3, 2, 384, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 3, 3, 128, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 3, 3, 256, 240, 240, 240, 240,,, +3, 4, 3, 4, 3, 3, 3, 3, 384, 240, 240, 240, 240,,, +3, 5, 1, 1, 1, 1, 1, 1, 128, 132, 132, 132, 89,,, +3, 5, 1, 1, 1, 1, 1, 1, 256, 132, 132, 132, 89,,, +3, 5, 1, 1, 1, 1, 1, 1, 384, 132, 132, 132, 89,,, +3, 5, 1, 1, 2, 2, 1, 1, 128, 132, 132, 115, 80,,, +3, 5, 1, 1, 2, 2, 1, 1, 256, 132, 132, 115, 80,,, +3, 5, 1, 1, 2, 2, 1, 1, 384, 132, 132, 115, 80,,, +3, 5, 1, 1, 2, 2, 1, 2, 128, 132, 132, 115, 80,,, +3, 5, 1, 1, 2, 2, 1, 2, 256, 132, 132, 115, 80,,, +3, 5, 1, 1, 2, 2, 1, 2, 384, 132, 132, 115, 80,,, +3, 5, 1, 1, 2, 2, 2, 1, 128, 132, 132, 115, 80,,, +3, 5, 1, 1, 2, 2, 2, 1, 256, 132, 132, 115, 80,,, +3, 5, 1, 1, 2, 2, 2, 1, 384, 132, 132, 115, 80,,, +3, 5, 1, 1, 2, 2, 2, 2, 128, 132, 132, 115, 80,,, +3, 5, 1, 1, 2, 2, 2, 2, 256, 132, 132, 115, 80,,, +3, 5, 1, 1, 2, 2, 2, 2, 384, 132, 132, 115, 80,,, +3, 5, 1, 1, 3, 3, 1, 1, 128, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 1, 1, 256, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 1, 1, 384, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 1, 2, 128, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 1, 2, 256, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 1, 2, 384, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 1, 3, 128, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 1, 3, 256, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 1, 3, 384, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 2, 1, 128, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 2, 1, 256, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 2, 1, 384, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 2, 2, 128, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 2, 2, 256, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 2, 2, 384, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 2, 3, 128, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 2, 3, 256, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 2, 3, 384, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 3, 1, 128, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 3, 1, 256, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 3, 1, 384, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 3, 2, 128, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 3, 2, 256, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 3, 2, 384, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 3, 3, 128, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 3, 3, 256, 132, 124, 108, 76,,, +3, 5, 1, 1, 3, 3, 3, 3, 384, 132, 124, 108, 76,,, +3, 5, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 174,,, +3, 5, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 174,,, +3, 5, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 174,,, +3, 5, 1, 2, 2, 2, 1, 1, 128, 240, 240, 226, 156,,, +3, 5, 1, 2, 2, 2, 1, 1, 256, 240, 240, 226, 156,,, +3, 5, 1, 2, 2, 2, 1, 1, 384, 240, 240, 226, 156,,, +3, 5, 1, 2, 2, 2, 1, 2, 128, 240, 240, 226, 156,,, +3, 5, 1, 2, 2, 2, 1, 2, 256, 240, 240, 226, 156,,, +3, 5, 1, 2, 2, 2, 1, 2, 384, 240, 240, 226, 156,,, +3, 5, 1, 2, 2, 2, 2, 1, 128, 240, 240, 226, 156,,, +3, 5, 1, 2, 2, 2, 2, 1, 256, 240, 240, 226, 156,,, +3, 5, 1, 2, 2, 2, 2, 1, 384, 240, 240, 226, 156,,, +3, 5, 1, 2, 2, 2, 2, 2, 128, 240, 240, 226, 156,,, +3, 5, 1, 2, 2, 2, 2, 2, 256, 240, 240, 226, 156,,, +3, 5, 1, 2, 2, 2, 2, 2, 384, 240, 240, 226, 156,,, +3, 5, 1, 2, 3, 3, 1, 1, 128, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 1, 1, 256, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 1, 1, 384, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 1, 2, 128, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 1, 2, 256, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 1, 2, 384, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 1, 3, 128, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 1, 3, 256, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 1, 3, 384, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 2, 1, 128, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 2, 1, 256, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 2, 1, 384, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 2, 2, 128, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 2, 2, 256, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 2, 2, 384, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 2, 3, 128, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 2, 3, 256, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 2, 3, 384, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 3, 1, 128, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 3, 1, 256, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 3, 1, 384, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 3, 2, 128, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 3, 2, 256, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 3, 2, 384, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 3, 3, 128, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 3, 3, 256, 240, 240, 212, 148,,, +3, 5, 1, 2, 3, 3, 3, 3, 384, 240, 240, 212, 148,,, +3, 5, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 1, 3, 2, 2, 1, 1, 128, 240, 240, 240, 232,,, +3, 5, 1, 3, 2, 2, 1, 1, 256, 240, 240, 240, 232,,, +3, 5, 1, 3, 2, 2, 1, 1, 384, 240, 240, 240, 232,,, +3, 5, 1, 3, 2, 2, 1, 2, 128, 240, 240, 240, 232,,, +3, 5, 1, 3, 2, 2, 1, 2, 256, 240, 240, 240, 232,,, +3, 5, 1, 3, 2, 2, 1, 2, 384, 240, 240, 240, 232,,, +3, 5, 1, 3, 2, 2, 2, 1, 128, 240, 240, 240, 232,,, +3, 5, 1, 3, 2, 2, 2, 1, 256, 240, 240, 240, 232,,, +3, 5, 1, 3, 2, 2, 2, 1, 384, 240, 240, 240, 232,,, +3, 5, 1, 3, 2, 2, 2, 2, 128, 240, 240, 240, 232,,, +3, 5, 1, 3, 2, 2, 2, 2, 256, 240, 240, 240, 232,,, +3, 5, 1, 3, 2, 2, 2, 2, 384, 240, 240, 240, 232,,, +3, 5, 1, 3, 3, 3, 1, 1, 128, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 1, 1, 256, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 1, 1, 384, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 1, 2, 128, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 1, 2, 256, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 1, 2, 384, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 1, 3, 128, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 1, 3, 256, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 1, 3, 384, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 2, 1, 128, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 2, 1, 256, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 2, 1, 384, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 2, 2, 128, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 2, 2, 256, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 2, 2, 384, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 2, 3, 128, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 2, 3, 256, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 2, 3, 384, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 3, 1, 128, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 3, 1, 256, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 3, 1, 384, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 3, 2, 128, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 3, 2, 256, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 3, 2, 384, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 3, 3, 128, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 3, 3, 256, 240, 240, 240, 220,,, +3, 5, 1, 3, 3, 3, 3, 3, 384, 240, 240, 240, 220,,, +3, 5, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 1, 4, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 1, 4, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 1, 4, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 1, 4, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +3, 5, 1, 4, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +3, 5, 1, 4, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +3, 5, 1, 4, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +3, 5, 1, 4, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +3, 5, 1, 4, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +3, 5, 1, 4, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +3, 5, 1, 4, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +3, 5, 1, 4, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 1, 2, 128, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 1, 2, 256, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 1, 2, 384, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 1, 3, 128, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 1, 3, 256, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 1, 3, 384, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 2, 1, 128, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 2, 1, 256, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 2, 1, 384, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 2, 2, 128, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 2, 2, 256, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 2, 2, 384, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 2, 3, 128, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 2, 3, 256, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 2, 3, 384, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 3, 1, 128, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 3, 1, 256, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 3, 1, 384, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 3, 2, 128, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 3, 2, 256, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 3, 2, 384, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 3, 3, 128, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 3, 3, 256, 240, 240, 240, 240,,, +3, 5, 1, 4, 3, 3, 3, 3, 384, 240, 240, 240, 240,,, +3, 5, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 1, 5, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 1, 5, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 1, 5, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 1, 5, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +3, 5, 1, 5, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +3, 5, 1, 5, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +3, 5, 1, 5, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +3, 5, 1, 5, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +3, 5, 1, 5, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +3, 5, 1, 5, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +3, 5, 1, 5, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +3, 5, 1, 5, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 1, 2, 128, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 1, 2, 256, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 1, 2, 384, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 1, 3, 128, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 1, 3, 256, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 1, 3, 384, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 2, 1, 128, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 2, 1, 256, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 2, 1, 384, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 2, 2, 128, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 2, 2, 256, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 2, 2, 384, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 2, 3, 128, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 2, 3, 256, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 2, 3, 384, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 3, 1, 128, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 3, 1, 256, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 3, 1, 384, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 3, 2, 128, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 3, 2, 256, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 3, 2, 384, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 3, 3, 128, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 3, 3, 256, 240, 240, 240, 240,,, +3, 5, 1, 5, 3, 3, 3, 3, 384, 240, 240, 240, 240,,, +3, 5, 2, 1, 1, 1, 1, 1, 128, 132, 132, 132, 89,,, +3, 5, 2, 1, 1, 1, 1, 1, 256, 132, 132, 132, 89,,, +3, 5, 2, 1, 1, 1, 1, 1, 384, 132, 132, 132, 89,,, +3, 5, 2, 1, 2, 2, 1, 1, 128, 132, 132, 115, 80,,, +3, 5, 2, 1, 2, 2, 1, 1, 256, 132, 132, 115, 80,,, +3, 5, 2, 1, 2, 2, 1, 1, 384, 132, 132, 115, 80,,, +3, 5, 2, 1, 2, 2, 1, 2, 128, 132, 132, 115, 80,,, +3, 5, 2, 1, 2, 2, 1, 2, 256, 132, 132, 115, 80,,, +3, 5, 2, 1, 2, 2, 1, 2, 384, 132, 132, 115, 80,,, +3, 5, 2, 1, 2, 2, 2, 1, 128, 132, 132, 115, 80,,, +3, 5, 2, 1, 2, 2, 2, 1, 256, 132, 132, 115, 80,,, +3, 5, 2, 1, 2, 2, 2, 1, 384, 132, 132, 115, 80,,, +3, 5, 2, 1, 2, 2, 2, 2, 128, 132, 132, 115, 80,,, +3, 5, 2, 1, 2, 2, 2, 2, 256, 132, 132, 115, 80,,, +3, 5, 2, 1, 2, 2, 2, 2, 384, 132, 132, 115, 80,,, +3, 5, 2, 1, 3, 3, 1, 1, 128, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 1, 1, 256, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 1, 1, 384, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 1, 2, 128, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 1, 2, 256, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 1, 2, 384, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 1, 3, 128, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 1, 3, 256, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 1, 3, 384, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 2, 1, 128, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 2, 1, 256, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 2, 1, 384, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 2, 2, 128, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 2, 2, 256, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 2, 2, 384, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 2, 3, 128, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 2, 3, 256, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 2, 3, 384, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 3, 1, 128, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 3, 1, 256, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 3, 1, 384, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 3, 2, 128, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 3, 2, 256, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 3, 2, 384, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 3, 3, 128, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 3, 3, 256, 132, 124, 108, 76,,, +3, 5, 2, 1, 3, 3, 3, 3, 384, 132, 124, 108, 76,,, +3, 5, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 174,,, +3, 5, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 174,,, +3, 5, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 174,,, +3, 5, 2, 2, 2, 2, 1, 1, 128, 240, 240, 226, 156,,, +3, 5, 2, 2, 2, 2, 1, 1, 256, 240, 240, 226, 156,,, +3, 5, 2, 2, 2, 2, 1, 1, 384, 240, 240, 226, 156,,, +3, 5, 2, 2, 2, 2, 1, 2, 128, 240, 240, 226, 156,,, +3, 5, 2, 2, 2, 2, 1, 2, 256, 240, 240, 226, 156,,, +3, 5, 2, 2, 2, 2, 1, 2, 384, 240, 240, 226, 156,,, +3, 5, 2, 2, 2, 2, 2, 1, 128, 240, 240, 226, 156,,, +3, 5, 2, 2, 2, 2, 2, 1, 256, 240, 240, 226, 156,,, +3, 5, 2, 2, 2, 2, 2, 1, 384, 240, 240, 226, 156,,, +3, 5, 2, 2, 2, 2, 2, 2, 128, 240, 240, 226, 156,,, +3, 5, 2, 2, 2, 2, 2, 2, 256, 240, 240, 226, 156,,, +3, 5, 2, 2, 2, 2, 2, 2, 384, 240, 240, 226, 156,,, +3, 5, 2, 2, 3, 3, 1, 1, 128, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 1, 1, 256, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 1, 1, 384, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 1, 2, 128, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 1, 2, 256, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 1, 2, 384, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 1, 3, 128, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 1, 3, 256, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 1, 3, 384, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 2, 1, 128, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 2, 1, 256, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 2, 1, 384, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 2, 2, 128, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 2, 2, 256, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 2, 2, 384, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 2, 3, 128, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 2, 3, 256, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 2, 3, 384, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 3, 1, 128, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 3, 1, 256, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 3, 1, 384, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 3, 2, 128, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 3, 2, 256, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 3, 2, 384, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 3, 3, 128, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 3, 3, 256, 240, 240, 212, 148,,, +3, 5, 2, 2, 3, 3, 3, 3, 384, 240, 240, 212, 148,,, +3, 5, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 2, 3, 2, 2, 1, 1, 128, 240, 240, 240, 232,,, +3, 5, 2, 3, 2, 2, 1, 1, 256, 240, 240, 240, 232,,, +3, 5, 2, 3, 2, 2, 1, 1, 384, 240, 240, 240, 232,,, +3, 5, 2, 3, 2, 2, 1, 2, 128, 240, 240, 240, 232,,, +3, 5, 2, 3, 2, 2, 1, 2, 256, 240, 240, 240, 232,,, +3, 5, 2, 3, 2, 2, 1, 2, 384, 240, 240, 240, 232,,, +3, 5, 2, 3, 2, 2, 2, 1, 128, 240, 240, 240, 232,,, +3, 5, 2, 3, 2, 2, 2, 1, 256, 240, 240, 240, 232,,, +3, 5, 2, 3, 2, 2, 2, 1, 384, 240, 240, 240, 232,,, +3, 5, 2, 3, 2, 2, 2, 2, 128, 240, 240, 240, 232,,, +3, 5, 2, 3, 2, 2, 2, 2, 256, 240, 240, 240, 232,,, +3, 5, 2, 3, 2, 2, 2, 2, 384, 240, 240, 240, 232,,, +3, 5, 2, 3, 3, 3, 1, 1, 128, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 1, 1, 256, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 1, 1, 384, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 1, 2, 128, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 1, 2, 256, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 1, 2, 384, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 1, 3, 128, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 1, 3, 256, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 1, 3, 384, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 2, 1, 128, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 2, 1, 256, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 2, 1, 384, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 2, 2, 128, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 2, 2, 256, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 2, 2, 384, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 2, 3, 128, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 2, 3, 256, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 2, 3, 384, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 3, 1, 128, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 3, 1, 256, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 3, 1, 384, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 3, 2, 128, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 3, 2, 256, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 3, 2, 384, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 3, 3, 128, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 3, 3, 256, 240, 240, 240, 220,,, +3, 5, 2, 3, 3, 3, 3, 3, 384, 240, 240, 240, 220,,, +3, 5, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 2, 4, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 2, 4, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 2, 4, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 2, 4, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +3, 5, 2, 4, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +3, 5, 2, 4, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +3, 5, 2, 4, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +3, 5, 2, 4, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +3, 5, 2, 4, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +3, 5, 2, 4, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +3, 5, 2, 4, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +3, 5, 2, 4, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 1, 2, 128, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 1, 2, 256, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 1, 2, 384, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 1, 3, 128, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 1, 3, 256, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 1, 3, 384, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 2, 1, 128, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 2, 1, 256, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 2, 1, 384, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 2, 2, 128, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 2, 2, 256, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 2, 2, 384, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 2, 3, 128, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 2, 3, 256, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 2, 3, 384, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 3, 1, 128, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 3, 1, 256, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 3, 1, 384, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 3, 2, 128, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 3, 2, 256, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 3, 2, 384, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 3, 3, 128, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 3, 3, 256, 240, 240, 240, 240,,, +3, 5, 2, 4, 3, 3, 3, 3, 384, 240, 240, 240, 240,,, +3, 5, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 2, 5, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 2, 5, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 2, 5, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 2, 5, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +3, 5, 2, 5, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +3, 5, 2, 5, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +3, 5, 2, 5, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +3, 5, 2, 5, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +3, 5, 2, 5, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +3, 5, 2, 5, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +3, 5, 2, 5, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +3, 5, 2, 5, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 1, 2, 128, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 1, 2, 256, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 1, 2, 384, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 1, 3, 128, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 1, 3, 256, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 1, 3, 384, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 2, 1, 128, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 2, 1, 256, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 2, 1, 384, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 2, 2, 128, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 2, 2, 256, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 2, 2, 384, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 2, 3, 128, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 2, 3, 256, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 2, 3, 384, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 3, 1, 128, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 3, 1, 256, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 3, 1, 384, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 3, 2, 128, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 3, 2, 256, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 3, 2, 384, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 3, 3, 128, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 3, 3, 256, 240, 240, 240, 240,,, +3, 5, 2, 5, 3, 3, 3, 3, 384, 240, 240, 240, 240,,, +3, 5, 3, 1, 1, 1, 1, 1, 128, 132, 132, 132, 89,,, +3, 5, 3, 1, 1, 1, 1, 1, 256, 132, 132, 132, 89,,, +3, 5, 3, 1, 1, 1, 1, 1, 384, 132, 132, 132, 89,,, +3, 5, 3, 1, 2, 2, 1, 1, 128, 132, 132, 115, 80,,, +3, 5, 3, 1, 2, 2, 1, 1, 256, 132, 132, 115, 80,,, +3, 5, 3, 1, 2, 2, 1, 1, 384, 132, 132, 115, 80,,, +3, 5, 3, 1, 2, 2, 1, 2, 128, 132, 132, 115, 80,,, +3, 5, 3, 1, 2, 2, 1, 2, 256, 132, 132, 115, 80,,, +3, 5, 3, 1, 2, 2, 1, 2, 384, 132, 132, 115, 80,,, +3, 5, 3, 1, 2, 2, 2, 1, 128, 132, 132, 115, 80,,, +3, 5, 3, 1, 2, 2, 2, 1, 256, 132, 132, 115, 80,,, +3, 5, 3, 1, 2, 2, 2, 1, 384, 132, 132, 115, 80,,, +3, 5, 3, 1, 2, 2, 2, 2, 128, 132, 132, 115, 80,,, +3, 5, 3, 1, 2, 2, 2, 2, 256, 132, 132, 115, 80,,, +3, 5, 3, 1, 2, 2, 2, 2, 384, 132, 132, 115, 80,,, +3, 5, 3, 1, 3, 3, 1, 1, 128, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 1, 1, 256, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 1, 1, 384, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 1, 2, 128, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 1, 2, 256, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 1, 2, 384, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 1, 3, 128, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 1, 3, 256, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 1, 3, 384, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 2, 1, 128, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 2, 1, 256, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 2, 1, 384, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 2, 2, 128, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 2, 2, 256, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 2, 2, 384, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 2, 3, 128, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 2, 3, 256, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 2, 3, 384, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 3, 1, 128, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 3, 1, 256, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 3, 1, 384, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 3, 2, 128, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 3, 2, 256, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 3, 2, 384, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 3, 3, 128, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 3, 3, 256, 132, 124, 108, 76,,, +3, 5, 3, 1, 3, 3, 3, 3, 384, 132, 124, 108, 76,,, +3, 5, 3, 2, 1, 1, 1, 1, 128, 240, 240, 240, 174,,, +3, 5, 3, 2, 1, 1, 1, 1, 256, 240, 240, 240, 174,,, +3, 5, 3, 2, 1, 1, 1, 1, 384, 240, 240, 240, 174,,, +3, 5, 3, 2, 2, 2, 1, 1, 128, 240, 240, 226, 156,,, +3, 5, 3, 2, 2, 2, 1, 1, 256, 240, 240, 226, 156,,, +3, 5, 3, 2, 2, 2, 1, 1, 384, 240, 240, 226, 156,,, +3, 5, 3, 2, 2, 2, 1, 2, 128, 240, 240, 226, 156,,, +3, 5, 3, 2, 2, 2, 1, 2, 256, 240, 240, 226, 156,,, +3, 5, 3, 2, 2, 2, 1, 2, 384, 240, 240, 226, 156,,, +3, 5, 3, 2, 2, 2, 2, 1, 128, 240, 240, 226, 156,,, +3, 5, 3, 2, 2, 2, 2, 1, 256, 240, 240, 226, 156,,, +3, 5, 3, 2, 2, 2, 2, 1, 384, 240, 240, 226, 156,,, +3, 5, 3, 2, 2, 2, 2, 2, 128, 240, 240, 226, 156,,, +3, 5, 3, 2, 2, 2, 2, 2, 256, 240, 240, 226, 156,,, +3, 5, 3, 2, 2, 2, 2, 2, 384, 240, 240, 226, 156,,, +3, 5, 3, 2, 3, 3, 1, 1, 128, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 1, 1, 256, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 1, 1, 384, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 1, 2, 128, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 1, 2, 256, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 1, 2, 384, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 1, 3, 128, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 1, 3, 256, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 1, 3, 384, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 2, 1, 128, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 2, 1, 256, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 2, 1, 384, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 2, 2, 128, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 2, 2, 256, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 2, 2, 384, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 2, 3, 128, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 2, 3, 256, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 2, 3, 384, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 3, 1, 128, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 3, 1, 256, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 3, 1, 384, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 3, 2, 128, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 3, 2, 256, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 3, 2, 384, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 3, 3, 128, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 3, 3, 256, 240, 240, 212, 148,,, +3, 5, 3, 2, 3, 3, 3, 3, 384, 240, 240, 212, 148,,, +3, 5, 3, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 3, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 3, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 3, 3, 2, 2, 1, 1, 128, 240, 240, 240, 232,,, +3, 5, 3, 3, 2, 2, 1, 1, 256, 240, 240, 240, 232,,, +3, 5, 3, 3, 2, 2, 1, 1, 384, 240, 240, 240, 232,,, +3, 5, 3, 3, 2, 2, 1, 2, 128, 240, 240, 240, 232,,, +3, 5, 3, 3, 2, 2, 1, 2, 256, 240, 240, 240, 232,,, +3, 5, 3, 3, 2, 2, 1, 2, 384, 240, 240, 240, 232,,, +3, 5, 3, 3, 2, 2, 2, 1, 128, 240, 240, 240, 232,,, +3, 5, 3, 3, 2, 2, 2, 1, 256, 240, 240, 240, 232,,, +3, 5, 3, 3, 2, 2, 2, 1, 384, 240, 240, 240, 232,,, +3, 5, 3, 3, 2, 2, 2, 2, 128, 240, 240, 240, 232,,, +3, 5, 3, 3, 2, 2, 2, 2, 256, 240, 240, 240, 232,,, +3, 5, 3, 3, 2, 2, 2, 2, 384, 240, 240, 240, 232,,, +3, 5, 3, 3, 3, 3, 1, 1, 128, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 1, 1, 256, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 1, 1, 384, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 1, 2, 128, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 1, 2, 256, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 1, 2, 384, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 1, 3, 128, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 1, 3, 256, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 1, 3, 384, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 2, 1, 128, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 2, 1, 256, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 2, 1, 384, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 2, 2, 128, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 2, 2, 256, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 2, 2, 384, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 2, 3, 128, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 2, 3, 256, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 2, 3, 384, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 3, 1, 128, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 3, 1, 256, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 3, 1, 384, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 3, 2, 128, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 3, 2, 256, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 3, 2, 384, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 3, 3, 128, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 3, 3, 256, 240, 240, 240, 220,,, +3, 5, 3, 3, 3, 3, 3, 3, 384, 240, 240, 240, 220,,, +3, 5, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 3, 4, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 3, 4, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 3, 4, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 3, 4, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +3, 5, 3, 4, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +3, 5, 3, 4, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +3, 5, 3, 4, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +3, 5, 3, 4, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +3, 5, 3, 4, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +3, 5, 3, 4, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +3, 5, 3, 4, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +3, 5, 3, 4, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 1, 2, 128, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 1, 2, 256, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 1, 2, 384, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 1, 3, 128, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 1, 3, 256, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 1, 3, 384, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 2, 1, 128, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 2, 1, 256, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 2, 1, 384, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 2, 2, 128, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 2, 2, 256, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 2, 2, 384, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 2, 3, 128, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 2, 3, 256, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 2, 3, 384, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 3, 1, 128, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 3, 1, 256, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 3, 1, 384, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 3, 2, 128, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 3, 2, 256, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 3, 2, 384, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 3, 3, 128, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 3, 3, 256, 240, 240, 240, 240,,, +3, 5, 3, 4, 3, 3, 3, 3, 384, 240, 240, 240, 240,,, +3, 5, 3, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 3, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 3, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 3, 5, 2, 2, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 3, 5, 2, 2, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 3, 5, 2, 2, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 3, 5, 2, 2, 1, 2, 128, 240, 240, 240, 240,,, +3, 5, 3, 5, 2, 2, 1, 2, 256, 240, 240, 240, 240,,, +3, 5, 3, 5, 2, 2, 1, 2, 384, 240, 240, 240, 240,,, +3, 5, 3, 5, 2, 2, 2, 1, 128, 240, 240, 240, 240,,, +3, 5, 3, 5, 2, 2, 2, 1, 256, 240, 240, 240, 240,,, +3, 5, 3, 5, 2, 2, 2, 1, 384, 240, 240, 240, 240,,, +3, 5, 3, 5, 2, 2, 2, 2, 128, 240, 240, 240, 240,,, +3, 5, 3, 5, 2, 2, 2, 2, 256, 240, 240, 240, 240,,, +3, 5, 3, 5, 2, 2, 2, 2, 384, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 1, 1, 128, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 1, 1, 256, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 1, 1, 384, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 1, 2, 128, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 1, 2, 256, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 1, 2, 384, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 1, 3, 128, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 1, 3, 256, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 1, 3, 384, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 2, 1, 128, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 2, 1, 256, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 2, 1, 384, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 2, 2, 128, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 2, 2, 256, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 2, 2, 384, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 2, 3, 128, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 2, 3, 256, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 2, 3, 384, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 3, 1, 128, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 3, 1, 256, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 3, 1, 384, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 3, 2, 128, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 3, 2, 256, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 3, 2, 384, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 3, 3, 128, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 3, 3, 256, 240, 240, 240, 240,,, +3, 5, 3, 5, 3, 3, 3, 3, 384, 240, 240, 240, 240,,, +3, 6, 1, 1, 1, 1, 1, 1, 128, 133, 133, 90,,,, +3, 6, 1, 1, 1, 1, 1, 1, 256, 133, 133, 90,,,, +3, 6, 1, 1, 1, 1, 1, 1, 384, 133, 133, 90,,,, +3, 6, 1, 1, 2, 2, 1, 1, 128, 133, 129, 90,,,, +3, 6, 1, 1, 2, 2, 1, 1, 256, 133, 129, 90,,,, +3, 6, 1, 1, 2, 2, 1, 1, 384, 133, 129, 90,,,, +3, 6, 1, 1, 2, 2, 1, 2, 128, 133, 129, 90,,,, +3, 6, 1, 1, 2, 2, 1, 2, 256, 133, 129, 90,,,, +3, 6, 1, 1, 2, 2, 1, 2, 384, 133, 129, 90,,,, +3, 6, 1, 1, 2, 2, 2, 1, 128, 133, 129, 90,,,, +3, 6, 1, 1, 2, 2, 2, 1, 256, 133, 129, 90,,,, +3, 6, 1, 1, 2, 2, 2, 1, 384, 133, 129, 90,,,, +3, 6, 1, 1, 2, 2, 2, 2, 128, 133, 129, 90,,,, +3, 6, 1, 1, 2, 2, 2, 2, 256, 133, 129, 90,,,, +3, 6, 1, 1, 2, 2, 2, 2, 384, 133, 129, 90,,,, +3, 6, 1, 1, 3, 3, 1, 1, 128, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 1, 1, 256, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 1, 1, 384, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 1, 2, 128, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 1, 2, 256, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 1, 2, 384, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 1, 3, 128, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 1, 3, 256, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 1, 3, 384, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 2, 1, 128, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 2, 1, 256, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 2, 1, 384, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 2, 2, 128, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 2, 2, 256, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 2, 2, 384, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 2, 3, 128, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 2, 3, 256, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 2, 3, 384, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 3, 1, 128, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 3, 1, 256, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 3, 1, 384, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 3, 2, 128, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 3, 2, 256, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 3, 2, 384, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 3, 3, 128, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 3, 3, 256, 131, 121, 90,,,, +3, 6, 1, 1, 3, 3, 3, 3, 384, 131, 121, 90,,,, +3, 6, 1, 2, 1, 1, 1, 1, 128, 240, 240, 175,,,, +3, 6, 1, 2, 1, 1, 1, 1, 256, 240, 240, 175,,,, +3, 6, 1, 2, 1, 1, 1, 1, 384, 240, 240, 175,,,, +3, 6, 1, 2, 2, 2, 1, 1, 128, 240, 240, 175,,,, +3, 6, 1, 2, 2, 2, 1, 1, 256, 240, 240, 175,,,, +3, 6, 1, 2, 2, 2, 1, 1, 384, 240, 240, 175,,,, +3, 6, 1, 2, 2, 2, 1, 2, 128, 240, 240, 175,,,, +3, 6, 1, 2, 2, 2, 1, 2, 256, 240, 240, 175,,,, +3, 6, 1, 2, 2, 2, 1, 2, 384, 240, 240, 175,,,, +3, 6, 1, 2, 2, 2, 2, 1, 128, 240, 240, 175,,,, +3, 6, 1, 2, 2, 2, 2, 1, 256, 240, 240, 175,,,, +3, 6, 1, 2, 2, 2, 2, 1, 384, 240, 240, 175,,,, +3, 6, 1, 2, 2, 2, 2, 2, 128, 240, 240, 175,,,, +3, 6, 1, 2, 2, 2, 2, 2, 256, 240, 240, 175,,,, +3, 6, 1, 2, 2, 2, 2, 2, 384, 240, 240, 175,,,, +3, 6, 1, 2, 3, 3, 1, 1, 128, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 1, 1, 256, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 1, 1, 384, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 1, 2, 128, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 1, 2, 256, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 1, 2, 384, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 1, 3, 128, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 1, 3, 256, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 1, 3, 384, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 2, 1, 128, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 2, 1, 256, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 2, 1, 384, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 2, 2, 128, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 2, 2, 256, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 2, 2, 384, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 2, 3, 128, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 2, 3, 256, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 2, 3, 384, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 3, 1, 128, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 3, 1, 256, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 3, 1, 384, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 3, 2, 128, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 3, 2, 256, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 3, 2, 384, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 3, 3, 128, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 3, 3, 256, 240, 237, 175,,,, +3, 6, 1, 2, 3, 3, 3, 3, 384, 240, 237, 175,,,, +3, 6, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 6, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 6, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 6, 1, 3, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 6, 1, 3, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 6, 1, 3, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 6, 1, 3, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 6, 1, 3, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 6, 1, 3, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 6, 1, 3, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 6, 1, 3, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 6, 1, 3, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 6, 1, 3, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 6, 1, 3, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 6, 1, 3, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 6, 1, 3, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 6, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 6, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 6, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 6, 1, 4, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 6, 1, 4, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 6, 1, 4, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 6, 1, 4, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 6, 1, 4, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 6, 1, 4, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 6, 1, 4, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 6, 1, 4, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 6, 1, 4, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 6, 1, 4, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 6, 1, 4, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 6, 1, 4, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 6, 1, 4, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 6, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 6, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 6, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 6, 1, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 6, 1, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 6, 1, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 6, 1, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 6, 1, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 6, 1, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 6, 1, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 6, 1, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 6, 1, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 6, 1, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 6, 1, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 6, 1, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 6, 1, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 6, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 6, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 6, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 6, 1, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 6, 1, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 6, 1, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 6, 1, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 6, 1, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 6, 1, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 6, 1, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 6, 1, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 6, 1, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 6, 1, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 6, 1, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 6, 1, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 6, 1, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 6, 2, 1, 1, 1, 1, 1, 128, 133, 133, 90,,,, +3, 6, 2, 1, 1, 1, 1, 1, 256, 133, 133, 90,,,, +3, 6, 2, 1, 1, 1, 1, 1, 384, 133, 133, 90,,,, +3, 6, 2, 1, 2, 2, 1, 1, 128, 133, 129, 90,,,, +3, 6, 2, 1, 2, 2, 1, 1, 256, 133, 129, 90,,,, +3, 6, 2, 1, 2, 2, 1, 1, 384, 133, 129, 90,,,, +3, 6, 2, 1, 2, 2, 1, 2, 128, 133, 129, 90,,,, +3, 6, 2, 1, 2, 2, 1, 2, 256, 133, 129, 90,,,, +3, 6, 2, 1, 2, 2, 1, 2, 384, 133, 129, 90,,,, +3, 6, 2, 1, 2, 2, 2, 1, 128, 133, 129, 90,,,, +3, 6, 2, 1, 2, 2, 2, 1, 256, 133, 129, 90,,,, +3, 6, 2, 1, 2, 2, 2, 1, 384, 133, 129, 90,,,, +3, 6, 2, 1, 2, 2, 2, 2, 128, 133, 129, 90,,,, +3, 6, 2, 1, 2, 2, 2, 2, 256, 133, 129, 90,,,, +3, 6, 2, 1, 2, 2, 2, 2, 384, 133, 129, 90,,,, +3, 6, 2, 1, 3, 3, 1, 1, 128, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 1, 1, 256, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 1, 1, 384, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 1, 2, 128, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 1, 2, 256, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 1, 2, 384, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 1, 3, 128, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 1, 3, 256, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 1, 3, 384, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 2, 1, 128, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 2, 1, 256, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 2, 1, 384, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 2, 2, 128, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 2, 2, 256, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 2, 2, 384, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 2, 3, 128, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 2, 3, 256, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 2, 3, 384, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 3, 1, 128, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 3, 1, 256, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 3, 1, 384, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 3, 2, 128, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 3, 2, 256, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 3, 2, 384, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 3, 3, 128, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 3, 3, 256, 131, 121, 90,,,, +3, 6, 2, 1, 3, 3, 3, 3, 384, 131, 121, 90,,,, +3, 6, 2, 2, 1, 1, 1, 1, 128, 240, 240, 175,,,, +3, 6, 2, 2, 1, 1, 1, 1, 256, 240, 240, 175,,,, +3, 6, 2, 2, 1, 1, 1, 1, 384, 240, 240, 175,,,, +3, 6, 2, 2, 2, 2, 1, 1, 128, 240, 240, 175,,,, +3, 6, 2, 2, 2, 2, 1, 1, 256, 240, 240, 175,,,, +3, 6, 2, 2, 2, 2, 1, 1, 384, 240, 240, 175,,,, +3, 6, 2, 2, 2, 2, 1, 2, 128, 240, 240, 175,,,, +3, 6, 2, 2, 2, 2, 1, 2, 256, 240, 240, 175,,,, +3, 6, 2, 2, 2, 2, 1, 2, 384, 240, 240, 175,,,, +3, 6, 2, 2, 2, 2, 2, 1, 128, 240, 240, 175,,,, +3, 6, 2, 2, 2, 2, 2, 1, 256, 240, 240, 175,,,, +3, 6, 2, 2, 2, 2, 2, 1, 384, 240, 240, 175,,,, +3, 6, 2, 2, 2, 2, 2, 2, 128, 240, 240, 175,,,, +3, 6, 2, 2, 2, 2, 2, 2, 256, 240, 240, 175,,,, +3, 6, 2, 2, 2, 2, 2, 2, 384, 240, 240, 175,,,, +3, 6, 2, 2, 3, 3, 1, 1, 128, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 1, 1, 256, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 1, 1, 384, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 1, 2, 128, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 1, 2, 256, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 1, 2, 384, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 1, 3, 128, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 1, 3, 256, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 1, 3, 384, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 2, 1, 128, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 2, 1, 256, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 2, 1, 384, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 2, 2, 128, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 2, 2, 256, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 2, 2, 384, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 2, 3, 128, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 2, 3, 256, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 2, 3, 384, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 3, 1, 128, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 3, 1, 256, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 3, 1, 384, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 3, 2, 128, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 3, 2, 256, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 3, 2, 384, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 3, 3, 128, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 3, 3, 256, 240, 237, 175,,,, +3, 6, 2, 2, 3, 3, 3, 3, 384, 240, 237, 175,,,, +3, 6, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 6, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 6, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 6, 2, 3, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 6, 2, 3, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 6, 2, 3, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 6, 2, 3, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 6, 2, 3, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 6, 2, 3, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 6, 2, 3, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 6, 2, 3, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 6, 2, 3, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 6, 2, 3, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 6, 2, 3, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 6, 2, 3, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 6, 2, 3, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 6, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 6, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 6, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 6, 2, 4, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 6, 2, 4, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 6, 2, 4, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 6, 2, 4, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 6, 2, 4, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 6, 2, 4, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 6, 2, 4, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 6, 2, 4, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 6, 2, 4, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 6, 2, 4, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 6, 2, 4, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 6, 2, 4, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 6, 2, 4, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 6, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 6, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 6, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 6, 2, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 6, 2, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 6, 2, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 6, 2, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 6, 2, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 6, 2, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 6, 2, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 6, 2, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 6, 2, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 6, 2, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 6, 2, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 6, 2, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 6, 2, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 6, 2, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 6, 2, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 6, 2, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 6, 2, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 6, 2, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 6, 2, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 6, 2, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 6, 2, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 6, 2, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 6, 2, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 6, 2, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 6, 2, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 6, 2, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 6, 2, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 6, 2, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 6, 2, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 6, 3, 1, 1, 1, 1, 1, 128, 133, 133, 90,,,, +3, 6, 3, 1, 1, 1, 1, 1, 256, 133, 133, 90,,,, +3, 6, 3, 1, 1, 1, 1, 1, 384, 133, 133, 90,,,, +3, 6, 3, 1, 2, 2, 1, 1, 128, 133, 129, 90,,,, +3, 6, 3, 1, 2, 2, 1, 1, 256, 133, 129, 90,,,, +3, 6, 3, 1, 2, 2, 1, 1, 384, 133, 129, 90,,,, +3, 6, 3, 1, 2, 2, 1, 2, 128, 133, 129, 90,,,, +3, 6, 3, 1, 2, 2, 1, 2, 256, 133, 129, 90,,,, +3, 6, 3, 1, 2, 2, 1, 2, 384, 133, 129, 90,,,, +3, 6, 3, 1, 2, 2, 2, 1, 128, 133, 129, 90,,,, +3, 6, 3, 1, 2, 2, 2, 1, 256, 133, 129, 90,,,, +3, 6, 3, 1, 2, 2, 2, 1, 384, 133, 129, 90,,,, +3, 6, 3, 1, 2, 2, 2, 2, 128, 133, 129, 90,,,, +3, 6, 3, 1, 2, 2, 2, 2, 256, 133, 129, 90,,,, +3, 6, 3, 1, 2, 2, 2, 2, 384, 133, 129, 90,,,, +3, 6, 3, 1, 3, 3, 1, 1, 128, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 1, 1, 256, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 1, 1, 384, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 1, 2, 128, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 1, 2, 256, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 1, 2, 384, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 1, 3, 128, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 1, 3, 256, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 1, 3, 384, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 2, 1, 128, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 2, 1, 256, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 2, 1, 384, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 2, 2, 128, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 2, 2, 256, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 2, 2, 384, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 2, 3, 128, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 2, 3, 256, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 2, 3, 384, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 3, 1, 128, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 3, 1, 256, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 3, 1, 384, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 3, 2, 128, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 3, 2, 256, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 3, 2, 384, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 3, 3, 128, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 3, 3, 256, 131, 121, 90,,,, +3, 6, 3, 1, 3, 3, 3, 3, 384, 131, 121, 90,,,, +3, 6, 3, 2, 1, 1, 1, 1, 128, 240, 240, 175,,,, +3, 6, 3, 2, 1, 1, 1, 1, 256, 240, 240, 175,,,, +3, 6, 3, 2, 1, 1, 1, 1, 384, 240, 240, 175,,,, +3, 6, 3, 2, 2, 2, 1, 1, 128, 240, 240, 175,,,, +3, 6, 3, 2, 2, 2, 1, 1, 256, 240, 240, 175,,,, +3, 6, 3, 2, 2, 2, 1, 1, 384, 240, 240, 175,,,, +3, 6, 3, 2, 2, 2, 1, 2, 128, 240, 240, 175,,,, +3, 6, 3, 2, 2, 2, 1, 2, 256, 240, 240, 175,,,, +3, 6, 3, 2, 2, 2, 1, 2, 384, 240, 240, 175,,,, +3, 6, 3, 2, 2, 2, 2, 1, 128, 240, 240, 175,,,, +3, 6, 3, 2, 2, 2, 2, 1, 256, 240, 240, 175,,,, +3, 6, 3, 2, 2, 2, 2, 1, 384, 240, 240, 175,,,, +3, 6, 3, 2, 2, 2, 2, 2, 128, 240, 240, 175,,,, +3, 6, 3, 2, 2, 2, 2, 2, 256, 240, 240, 175,,,, +3, 6, 3, 2, 2, 2, 2, 2, 384, 240, 240, 175,,,, +3, 6, 3, 2, 3, 3, 1, 1, 128, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 1, 1, 256, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 1, 1, 384, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 1, 2, 128, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 1, 2, 256, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 1, 2, 384, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 1, 3, 128, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 1, 3, 256, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 1, 3, 384, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 2, 1, 128, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 2, 1, 256, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 2, 1, 384, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 2, 2, 128, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 2, 2, 256, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 2, 2, 384, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 2, 3, 128, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 2, 3, 256, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 2, 3, 384, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 3, 1, 128, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 3, 1, 256, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 3, 1, 384, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 3, 2, 128, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 3, 2, 256, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 3, 2, 384, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 3, 3, 128, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 3, 3, 256, 240, 237, 175,,,, +3, 6, 3, 2, 3, 3, 3, 3, 384, 240, 237, 175,,,, +3, 6, 3, 3, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 6, 3, 3, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 6, 3, 3, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 6, 3, 3, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 6, 3, 3, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 6, 3, 3, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 6, 3, 3, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 6, 3, 3, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 6, 3, 3, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 6, 3, 3, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 6, 3, 3, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 6, 3, 3, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 6, 3, 3, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 6, 3, 3, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 6, 3, 3, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 6, 3, 3, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 6, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 6, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 6, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 6, 3, 4, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 6, 3, 4, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 6, 3, 4, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 6, 3, 4, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 6, 3, 4, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 6, 3, 4, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 6, 3, 4, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 6, 3, 4, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 6, 3, 4, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 6, 3, 4, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 6, 3, 4, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 6, 3, 4, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 6, 3, 4, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 6, 3, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 6, 3, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 6, 3, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 6, 3, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 6, 3, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 6, 3, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 6, 3, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 6, 3, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 6, 3, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 6, 3, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 6, 3, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 6, 3, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 6, 3, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 6, 3, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 6, 3, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 6, 3, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 6, 3, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 6, 3, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 6, 3, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 6, 3, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 6, 3, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 6, 3, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 6, 3, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 6, 3, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 6, 3, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 6, 3, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 6, 3, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 6, 3, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 6, 3, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 6, 3, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 6, 3, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 6, 3, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 7, 1, 1, 1, 1, 1, 1, 128, 134, 134, 91,,,, +3, 7, 1, 1, 1, 1, 1, 1, 256, 134, 134, 91,,,, +3, 7, 1, 1, 1, 1, 1, 1, 384, 134, 134, 91,,,, +3, 7, 1, 1, 2, 2, 1, 1, 128, 134, 127, 91,,,, +3, 7, 1, 1, 2, 2, 1, 1, 256, 134, 127, 91,,,, +3, 7, 1, 1, 2, 2, 1, 1, 384, 134, 127, 91,,,, +3, 7, 1, 1, 2, 2, 1, 2, 128, 134, 127, 91,,,, +3, 7, 1, 1, 2, 2, 1, 2, 256, 134, 127, 91,,,, +3, 7, 1, 1, 2, 2, 1, 2, 384, 134, 127, 91,,,, +3, 7, 1, 1, 2, 2, 2, 1, 128, 134, 127, 91,,,, +3, 7, 1, 1, 2, 2, 2, 1, 256, 134, 127, 91,,,, +3, 7, 1, 1, 2, 2, 2, 1, 384, 134, 127, 91,,,, +3, 7, 1, 1, 2, 2, 2, 2, 128, 134, 127, 91,,,, +3, 7, 1, 1, 2, 2, 2, 2, 256, 134, 127, 91,,,, +3, 7, 1, 1, 2, 2, 2, 2, 384, 134, 127, 91,,,, +3, 7, 1, 1, 3, 3, 1, 1, 128, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 1, 1, 256, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 1, 1, 384, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 1, 2, 128, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 1, 2, 256, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 1, 2, 384, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 1, 3, 128, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 1, 3, 256, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 1, 3, 384, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 2, 1, 128, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 2, 1, 256, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 2, 1, 384, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 2, 2, 128, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 2, 2, 256, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 2, 2, 384, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 2, 3, 128, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 2, 3, 256, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 2, 3, 384, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 3, 1, 128, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 3, 1, 256, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 3, 1, 384, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 3, 2, 128, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 3, 2, 256, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 3, 2, 384, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 3, 3, 128, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 3, 3, 256, 130, 119, 91,,,, +3, 7, 1, 1, 3, 3, 3, 3, 384, 130, 119, 91,,,, +3, 7, 1, 2, 1, 1, 1, 1, 128, 240, 240, 176,,,, +3, 7, 1, 2, 1, 1, 1, 1, 256, 240, 240, 176,,,, +3, 7, 1, 2, 1, 1, 1, 1, 384, 240, 240, 176,,,, +3, 7, 1, 2, 2, 2, 1, 1, 128, 240, 240, 176,,,, +3, 7, 1, 2, 2, 2, 1, 1, 256, 240, 240, 176,,,, +3, 7, 1, 2, 2, 2, 1, 1, 384, 240, 240, 176,,,, +3, 7, 1, 2, 2, 2, 1, 2, 128, 240, 240, 176,,,, +3, 7, 1, 2, 2, 2, 1, 2, 256, 240, 240, 176,,,, +3, 7, 1, 2, 2, 2, 1, 2, 384, 240, 240, 176,,,, +3, 7, 1, 2, 2, 2, 2, 1, 128, 240, 240, 176,,,, +3, 7, 1, 2, 2, 2, 2, 1, 256, 240, 240, 176,,,, +3, 7, 1, 2, 2, 2, 2, 1, 384, 240, 240, 176,,,, +3, 7, 1, 2, 2, 2, 2, 2, 128, 240, 240, 176,,,, +3, 7, 1, 2, 2, 2, 2, 2, 256, 240, 240, 176,,,, +3, 7, 1, 2, 2, 2, 2, 2, 384, 240, 240, 176,,,, +3, 7, 1, 2, 3, 3, 1, 1, 128, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 1, 1, 256, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 1, 1, 384, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 1, 2, 128, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 1, 2, 256, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 1, 2, 384, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 1, 3, 128, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 1, 3, 256, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 1, 3, 384, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 2, 1, 128, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 2, 1, 256, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 2, 1, 384, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 2, 2, 128, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 2, 2, 256, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 2, 2, 384, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 2, 3, 128, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 2, 3, 256, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 2, 3, 384, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 3, 1, 128, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 3, 1, 256, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 3, 1, 384, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 3, 2, 128, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 3, 2, 256, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 3, 2, 384, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 3, 3, 128, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 3, 3, 256, 240, 232, 176,,,, +3, 7, 1, 2, 3, 3, 3, 3, 384, 240, 232, 176,,,, +3, 7, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 7, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 7, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 7, 1, 3, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 7, 1, 3, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 7, 1, 3, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 7, 1, 3, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 7, 1, 3, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 7, 1, 3, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 7, 1, 3, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 7, 1, 3, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 7, 1, 3, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 7, 1, 3, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 7, 1, 3, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 7, 1, 3, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 7, 1, 3, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 7, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 7, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 7, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 7, 1, 4, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 7, 1, 4, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 7, 1, 4, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 7, 1, 4, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 7, 1, 4, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 7, 1, 4, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 7, 1, 4, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 7, 1, 4, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 7, 1, 4, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 7, 1, 4, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 7, 1, 4, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 7, 1, 4, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 7, 1, 4, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 7, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 7, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 7, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 7, 1, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 7, 1, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 7, 1, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 7, 1, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 7, 1, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 7, 1, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 7, 1, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 7, 1, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 7, 1, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 7, 1, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 7, 1, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 7, 1, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 7, 1, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 7, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 7, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 7, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 7, 1, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 7, 1, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 7, 1, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 7, 1, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 7, 1, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 7, 1, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 7, 1, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 7, 1, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 7, 1, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 7, 1, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 7, 1, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 7, 1, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 7, 1, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 7, 1, 7, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 7, 1, 7, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 7, 1, 7, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 7, 1, 7, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 7, 1, 7, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 7, 1, 7, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 7, 1, 7, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 7, 1, 7, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 7, 1, 7, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 7, 1, 7, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 7, 1, 7, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 7, 1, 7, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 7, 1, 7, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 7, 1, 7, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 7, 1, 7, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 7, 1, 7, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 7, 2, 1, 1, 1, 1, 1, 128, 134, 134, 91,,,, +3, 7, 2, 1, 1, 1, 1, 1, 256, 134, 134, 91,,,, +3, 7, 2, 1, 1, 1, 1, 1, 384, 134, 134, 91,,,, +3, 7, 2, 1, 2, 2, 1, 1, 128, 134, 127, 91,,,, +3, 7, 2, 1, 2, 2, 1, 1, 256, 134, 127, 91,,,, +3, 7, 2, 1, 2, 2, 1, 1, 384, 134, 127, 91,,,, +3, 7, 2, 1, 2, 2, 1, 2, 128, 134, 127, 91,,,, +3, 7, 2, 1, 2, 2, 1, 2, 256, 134, 127, 91,,,, +3, 7, 2, 1, 2, 2, 1, 2, 384, 134, 127, 91,,,, +3, 7, 2, 1, 2, 2, 2, 1, 128, 134, 127, 91,,,, +3, 7, 2, 1, 2, 2, 2, 1, 256, 134, 127, 91,,,, +3, 7, 2, 1, 2, 2, 2, 1, 384, 134, 127, 91,,,, +3, 7, 2, 1, 2, 2, 2, 2, 128, 134, 127, 91,,,, +3, 7, 2, 1, 2, 2, 2, 2, 256, 134, 127, 91,,,, +3, 7, 2, 1, 2, 2, 2, 2, 384, 134, 127, 91,,,, +3, 7, 2, 1, 3, 3, 1, 1, 128, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 1, 1, 256, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 1, 1, 384, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 1, 2, 128, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 1, 2, 256, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 1, 2, 384, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 1, 3, 128, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 1, 3, 256, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 1, 3, 384, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 2, 1, 128, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 2, 1, 256, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 2, 1, 384, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 2, 2, 128, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 2, 2, 256, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 2, 2, 384, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 2, 3, 128, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 2, 3, 256, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 2, 3, 384, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 3, 1, 128, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 3, 1, 256, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 3, 1, 384, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 3, 2, 128, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 3, 2, 256, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 3, 2, 384, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 3, 3, 128, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 3, 3, 256, 130, 119, 91,,,, +3, 7, 2, 1, 3, 3, 3, 3, 384, 130, 119, 91,,,, +3, 7, 2, 2, 1, 1, 1, 1, 128, 240, 240, 176,,,, +3, 7, 2, 2, 1, 1, 1, 1, 256, 240, 240, 176,,,, +3, 7, 2, 2, 1, 1, 1, 1, 384, 240, 240, 176,,,, +3, 7, 2, 2, 2, 2, 1, 1, 128, 240, 240, 176,,,, +3, 7, 2, 2, 2, 2, 1, 1, 256, 240, 240, 176,,,, +3, 7, 2, 2, 2, 2, 1, 1, 384, 240, 240, 176,,,, +3, 7, 2, 2, 2, 2, 1, 2, 128, 240, 240, 176,,,, +3, 7, 2, 2, 2, 2, 1, 2, 256, 240, 240, 176,,,, +3, 7, 2, 2, 2, 2, 1, 2, 384, 240, 240, 176,,,, +3, 7, 2, 2, 2, 2, 2, 1, 128, 240, 240, 176,,,, +3, 7, 2, 2, 2, 2, 2, 1, 256, 240, 240, 176,,,, +3, 7, 2, 2, 2, 2, 2, 1, 384, 240, 240, 176,,,, +3, 7, 2, 2, 2, 2, 2, 2, 128, 240, 240, 176,,,, +3, 7, 2, 2, 2, 2, 2, 2, 256, 240, 240, 176,,,, +3, 7, 2, 2, 2, 2, 2, 2, 384, 240, 240, 176,,,, +3, 7, 2, 2, 3, 3, 1, 1, 128, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 1, 1, 256, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 1, 1, 384, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 1, 2, 128, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 1, 2, 256, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 1, 2, 384, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 1, 3, 128, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 1, 3, 256, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 1, 3, 384, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 2, 1, 128, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 2, 1, 256, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 2, 1, 384, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 2, 2, 128, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 2, 2, 256, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 2, 2, 384, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 2, 3, 128, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 2, 3, 256, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 2, 3, 384, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 3, 1, 128, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 3, 1, 256, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 3, 1, 384, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 3, 2, 128, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 3, 2, 256, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 3, 2, 384, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 3, 3, 128, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 3, 3, 256, 240, 232, 176,,,, +3, 7, 2, 2, 3, 3, 3, 3, 384, 240, 232, 176,,,, +3, 7, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 7, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 7, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 7, 2, 3, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 7, 2, 3, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 7, 2, 3, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 7, 2, 3, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 7, 2, 3, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 7, 2, 3, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 7, 2, 3, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 7, 2, 3, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 7, 2, 3, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 7, 2, 3, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 7, 2, 3, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 7, 2, 3, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 7, 2, 3, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 7, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 7, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 7, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 7, 2, 4, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 7, 2, 4, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 7, 2, 4, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 7, 2, 4, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 7, 2, 4, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 7, 2, 4, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 7, 2, 4, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 7, 2, 4, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 7, 2, 4, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 7, 2, 4, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 7, 2, 4, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 7, 2, 4, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 7, 2, 4, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 7, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 7, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 7, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 7, 2, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 7, 2, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 7, 2, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 7, 2, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 7, 2, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 7, 2, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 7, 2, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 7, 2, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 7, 2, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 7, 2, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 7, 2, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 7, 2, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 7, 2, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 7, 2, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 7, 2, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 7, 2, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 7, 2, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 7, 2, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 7, 2, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 7, 2, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 7, 2, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 7, 2, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 7, 2, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 7, 2, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 7, 2, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 7, 2, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 7, 2, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 7, 2, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 7, 2, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 7, 2, 7, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 7, 2, 7, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 7, 2, 7, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 7, 2, 7, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 7, 2, 7, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 7, 2, 7, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 7, 2, 7, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 7, 2, 7, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 7, 2, 7, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 7, 2, 7, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 7, 2, 7, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 7, 2, 7, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 7, 2, 7, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 7, 2, 7, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 7, 2, 7, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 7, 2, 7, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 7, 3, 1, 1, 1, 1, 1, 128, 134, 134, 91,,,, +3, 7, 3, 1, 1, 1, 1, 1, 256, 134, 134, 91,,,, +3, 7, 3, 1, 1, 1, 1, 1, 384, 134, 134, 91,,,, +3, 7, 3, 1, 2, 2, 1, 1, 128, 134, 127, 91,,,, +3, 7, 3, 1, 2, 2, 1, 1, 256, 134, 127, 91,,,, +3, 7, 3, 1, 2, 2, 1, 1, 384, 134, 127, 91,,,, +3, 7, 3, 1, 2, 2, 1, 2, 128, 134, 127, 91,,,, +3, 7, 3, 1, 2, 2, 1, 2, 256, 134, 127, 91,,,, +3, 7, 3, 1, 2, 2, 1, 2, 384, 134, 127, 91,,,, +3, 7, 3, 1, 2, 2, 2, 1, 128, 134, 127, 91,,,, +3, 7, 3, 1, 2, 2, 2, 1, 256, 134, 127, 91,,,, +3, 7, 3, 1, 2, 2, 2, 1, 384, 134, 127, 91,,,, +3, 7, 3, 1, 2, 2, 2, 2, 128, 134, 127, 91,,,, +3, 7, 3, 1, 2, 2, 2, 2, 256, 134, 127, 91,,,, +3, 7, 3, 1, 2, 2, 2, 2, 384, 134, 127, 91,,,, +3, 7, 3, 1, 3, 3, 1, 1, 128, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 1, 1, 256, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 1, 1, 384, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 1, 2, 128, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 1, 2, 256, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 1, 2, 384, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 1, 3, 128, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 1, 3, 256, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 1, 3, 384, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 2, 1, 128, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 2, 1, 256, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 2, 1, 384, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 2, 2, 128, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 2, 2, 256, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 2, 2, 384, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 2, 3, 128, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 2, 3, 256, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 2, 3, 384, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 3, 1, 128, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 3, 1, 256, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 3, 1, 384, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 3, 2, 128, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 3, 2, 256, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 3, 2, 384, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 3, 3, 128, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 3, 3, 256, 130, 119, 91,,,, +3, 7, 3, 1, 3, 3, 3, 3, 384, 130, 119, 91,,,, +3, 7, 3, 2, 1, 1, 1, 1, 128, 240, 240, 176,,,, +3, 7, 3, 2, 1, 1, 1, 1, 256, 240, 240, 176,,,, +3, 7, 3, 2, 1, 1, 1, 1, 384, 240, 240, 176,,,, +3, 7, 3, 2, 2, 2, 1, 1, 128, 240, 240, 176,,,, +3, 7, 3, 2, 2, 2, 1, 1, 256, 240, 240, 176,,,, +3, 7, 3, 2, 2, 2, 1, 1, 384, 240, 240, 176,,,, +3, 7, 3, 2, 2, 2, 1, 2, 128, 240, 240, 176,,,, +3, 7, 3, 2, 2, 2, 1, 2, 256, 240, 240, 176,,,, +3, 7, 3, 2, 2, 2, 1, 2, 384, 240, 240, 176,,,, +3, 7, 3, 2, 2, 2, 2, 1, 128, 240, 240, 176,,,, +3, 7, 3, 2, 2, 2, 2, 1, 256, 240, 240, 176,,,, +3, 7, 3, 2, 2, 2, 2, 1, 384, 240, 240, 176,,,, +3, 7, 3, 2, 2, 2, 2, 2, 128, 240, 240, 176,,,, +3, 7, 3, 2, 2, 2, 2, 2, 256, 240, 240, 176,,,, +3, 7, 3, 2, 2, 2, 2, 2, 384, 240, 240, 176,,,, +3, 7, 3, 2, 3, 3, 1, 1, 128, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 1, 1, 256, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 1, 1, 384, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 1, 2, 128, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 1, 2, 256, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 1, 2, 384, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 1, 3, 128, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 1, 3, 256, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 1, 3, 384, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 2, 1, 128, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 2, 1, 256, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 2, 1, 384, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 2, 2, 128, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 2, 2, 256, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 2, 2, 384, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 2, 3, 128, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 2, 3, 256, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 2, 3, 384, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 3, 1, 128, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 3, 1, 256, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 3, 1, 384, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 3, 2, 128, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 3, 2, 256, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 3, 2, 384, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 3, 3, 128, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 3, 3, 256, 240, 232, 176,,,, +3, 7, 3, 2, 3, 3, 3, 3, 384, 240, 232, 176,,,, +3, 7, 3, 3, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 7, 3, 3, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 7, 3, 3, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 7, 3, 3, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 7, 3, 3, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 7, 3, 3, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 7, 3, 3, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 7, 3, 3, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 7, 3, 3, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 7, 3, 3, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 7, 3, 3, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 7, 3, 3, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 7, 3, 3, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 7, 3, 3, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 7, 3, 3, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 7, 3, 3, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 7, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 7, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 7, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 7, 3, 4, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 7, 3, 4, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 7, 3, 4, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 7, 3, 4, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 7, 3, 4, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 7, 3, 4, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 7, 3, 4, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 7, 3, 4, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 7, 3, 4, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 7, 3, 4, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 7, 3, 4, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 7, 3, 4, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 7, 3, 4, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 7, 3, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 7, 3, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 7, 3, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 7, 3, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 7, 3, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 7, 3, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 7, 3, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 7, 3, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 7, 3, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 7, 3, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 7, 3, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 7, 3, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 7, 3, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 7, 3, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 7, 3, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 7, 3, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 7, 3, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 7, 3, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 7, 3, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 7, 3, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 7, 3, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 7, 3, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 7, 3, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 7, 3, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 7, 3, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 7, 3, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 7, 3, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 7, 3, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 7, 3, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 7, 3, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 7, 3, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 7, 3, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +3, 7, 3, 7, 1, 1, 1, 1, 128, 240, 240, 240,,,, +3, 7, 3, 7, 1, 1, 1, 1, 256, 240, 240, 240,,,, +3, 7, 3, 7, 1, 1, 1, 1, 384, 240, 240, 240,,,, +3, 7, 3, 7, 2, 2, 1, 1, 128, 240, 240, 240,,,, +3, 7, 3, 7, 2, 2, 1, 1, 256, 240, 240, 240,,,, +3, 7, 3, 7, 2, 2, 1, 1, 384, 240, 240, 240,,,, +3, 7, 3, 7, 2, 2, 1, 2, 128, 240, 240, 240,,,, +3, 7, 3, 7, 2, 2, 1, 2, 256, 240, 240, 240,,,, +3, 7, 3, 7, 2, 2, 1, 2, 384, 240, 240, 240,,,, +3, 7, 3, 7, 2, 2, 2, 1, 128, 240, 240, 240,,,, +3, 7, 3, 7, 2, 2, 2, 1, 256, 240, 240, 240,,,, +3, 7, 3, 7, 2, 2, 2, 1, 384, 240, 240, 240,,,, +3, 7, 3, 7, 2, 2, 2, 2, 128, 240, 240, 240,,,, +3, 7, 3, 7, 2, 2, 2, 2, 256, 240, 240, 240,,,, +3, 7, 3, 7, 2, 2, 2, 2, 384, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 1, 1, 128, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 1, 1, 256, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 1, 1, 384, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 1, 2, 128, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 1, 2, 256, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 1, 2, 384, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 1, 3, 128, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 1, 3, 256, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 1, 3, 384, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 2, 1, 128, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 2, 1, 256, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 2, 1, 384, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 2, 2, 128, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 2, 2, 256, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 2, 2, 384, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 2, 3, 128, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 2, 3, 256, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 2, 3, 384, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 3, 1, 128, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 3, 1, 256, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 3, 1, 384, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 3, 2, 128, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 3, 2, 256, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 3, 2, 384, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 3, 3, 128, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 3, 3, 256, 240, 240, 240,,,, +3, 7, 3, 7, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 1, 1, 1, 1, 1, 1, 1, 128, 96, 96, 96, 96, 96, 64, 32 +4, 1, 1, 1, 1, 1, 1, 1, 256, 96, 96, 96, 96, 96, 64, 32 +4, 1, 1, 1, 1, 1, 1, 1, 384, 96, 96, 96, 96, 96, 64, 32 +4, 1, 2, 1, 1, 1, 1, 1, 128, 96, 96, 96, 96, 96, 64, 32 +4, 1, 2, 1, 1, 1, 1, 1, 256, 96, 96, 96, 96, 96, 64, 32 +4, 1, 2, 1, 1, 1, 1, 1, 384, 96, 96, 96, 96, 96, 64, 32 +4, 1, 3, 1, 1, 1, 1, 1, 128, 96, 96, 96, 96, 96, 64, 32 +4, 1, 3, 1, 1, 1, 1, 1, 256, 96, 96, 96, 96, 96, 64, 32 +4, 1, 3, 1, 1, 1, 1, 1, 384, 96, 96, 96, 96, 96, 64, 32 +4, 1, 4, 1, 1, 1, 1, 1, 128, 96, 96, 96, 96, 96, 64, 32 +4, 1, 4, 1, 1, 1, 1, 1, 256, 96, 96, 96, 96, 96, 64, 32 +4, 1, 4, 1, 1, 1, 1, 1, 384, 96, 96, 96, 96, 96, 64, 32 +4, 2, 1, 1, 1, 1, 1, 1, 128, 97, 97, 97, 97,,, +4, 2, 1, 1, 1, 1, 1, 1, 256, 97, 97, 97, 97,,, +4, 2, 1, 1, 1, 1, 1, 1, 384, 97, 97, 97, 97,,, +4, 2, 1, 1, 2, 2, 1, 1, 128, 97, 97, 97, 85,,, +4, 2, 1, 1, 2, 2, 1, 1, 256, 97, 97, 97, 85,,, +4, 2, 1, 1, 2, 2, 1, 1, 384, 97, 97, 97, 85,,, +4, 2, 1, 1, 2, 2, 1, 2, 128, 97, 97, 97, 85,,, +4, 2, 1, 1, 2, 2, 1, 2, 256, 97, 97, 97, 85,,, +4, 2, 1, 1, 2, 2, 1, 2, 384, 97, 97, 97, 85,,, +4, 2, 1, 1, 2, 2, 2, 1, 128, 97, 97, 97, 85,,, +4, 2, 1, 1, 2, 2, 2, 1, 256, 97, 97, 97, 85,,, +4, 2, 1, 1, 2, 2, 2, 1, 384, 97, 97, 97, 85,,, +4, 2, 1, 1, 2, 2, 2, 2, 128, 97, 97, 97, 85,,, +4, 2, 1, 1, 2, 2, 2, 2, 256, 97, 97, 97, 85,,, +4, 2, 1, 1, 2, 2, 2, 2, 384, 97, 97, 97, 85,,, +4, 2, 1, 2, 1, 1, 1, 1, 128, 193, 193, 193, 193,,, +4, 2, 1, 2, 1, 1, 1, 1, 256, 193, 193, 193, 193,,, +4, 2, 1, 2, 1, 1, 1, 1, 384, 193, 193, 193, 193,,, +4, 2, 1, 2, 2, 2, 1, 1, 128, 193, 193, 193, 169,,, +4, 2, 1, 2, 2, 2, 1, 1, 256, 193, 193, 193, 169,,, +4, 2, 1, 2, 2, 2, 1, 1, 384, 193, 193, 193, 169,,, +4, 2, 1, 2, 2, 2, 1, 2, 128, 193, 193, 193, 169,,, +4, 2, 1, 2, 2, 2, 1, 2, 256, 193, 193, 193, 169,,, +4, 2, 1, 2, 2, 2, 1, 2, 384, 193, 193, 193, 169,,, +4, 2, 1, 2, 2, 2, 2, 1, 128, 193, 193, 193, 169,,, +4, 2, 1, 2, 2, 2, 2, 1, 256, 193, 193, 193, 169,,, +4, 2, 1, 2, 2, 2, 2, 1, 384, 193, 193, 193, 169,,, +4, 2, 1, 2, 2, 2, 2, 2, 128, 193, 193, 193, 169,,, +4, 2, 1, 2, 2, 2, 2, 2, 256, 193, 193, 193, 169,,, +4, 2, 1, 2, 2, 2, 2, 2, 384, 193, 193, 193, 169,,, +4, 2, 2, 1, 1, 1, 1, 1, 128, 97, 97, 97, 97,,, +4, 2, 2, 1, 1, 1, 1, 1, 256, 97, 97, 97, 97,,, +4, 2, 2, 1, 1, 1, 1, 1, 384, 97, 97, 97, 97,,, +4, 2, 2, 1, 2, 2, 1, 1, 128, 97, 97, 97, 85,,, +4, 2, 2, 1, 2, 2, 1, 1, 256, 97, 97, 97, 85,,, +4, 2, 2, 1, 2, 2, 1, 1, 384, 97, 97, 97, 85,,, +4, 2, 2, 1, 2, 2, 1, 2, 128, 97, 97, 97, 85,,, +4, 2, 2, 1, 2, 2, 1, 2, 256, 97, 97, 97, 85,,, +4, 2, 2, 1, 2, 2, 1, 2, 384, 97, 97, 97, 85,,, +4, 2, 2, 1, 2, 2, 2, 1, 128, 97, 97, 97, 85,,, +4, 2, 2, 1, 2, 2, 2, 1, 256, 97, 97, 97, 85,,, +4, 2, 2, 1, 2, 2, 2, 1, 384, 97, 97, 97, 85,,, +4, 2, 2, 1, 2, 2, 2, 2, 128, 97, 97, 97, 85,,, +4, 2, 2, 1, 2, 2, 2, 2, 256, 97, 97, 97, 85,,, +4, 2, 2, 1, 2, 2, 2, 2, 384, 97, 97, 97, 85,,, +4, 2, 2, 2, 1, 1, 1, 1, 128, 193, 193, 193, 193,,, +4, 2, 2, 2, 1, 1, 1, 1, 256, 193, 193, 193, 193,,, +4, 2, 2, 2, 1, 1, 1, 1, 384, 193, 193, 193, 193,,, +4, 2, 2, 2, 2, 2, 1, 1, 128, 193, 193, 193, 169,,, +4, 2, 2, 2, 2, 2, 1, 1, 256, 193, 193, 193, 169,,, +4, 2, 2, 2, 2, 2, 1, 1, 384, 193, 193, 193, 169,,, +4, 2, 2, 2, 2, 2, 1, 2, 128, 193, 193, 193, 169,,, +4, 2, 2, 2, 2, 2, 1, 2, 256, 193, 193, 193, 169,,, +4, 2, 2, 2, 2, 2, 1, 2, 384, 193, 193, 193, 169,,, +4, 2, 2, 2, 2, 2, 2, 1, 128, 193, 193, 193, 169,,, +4, 2, 2, 2, 2, 2, 2, 1, 256, 193, 193, 193, 169,,, +4, 2, 2, 2, 2, 2, 2, 1, 384, 193, 193, 193, 169,,, +4, 2, 2, 2, 2, 2, 2, 2, 128, 193, 193, 193, 169,,, +4, 2, 2, 2, 2, 2, 2, 2, 256, 193, 193, 193, 169,,, +4, 2, 2, 2, 2, 2, 2, 2, 384, 193, 193, 193, 169,,, +4, 2, 3, 1, 1, 1, 1, 1, 128, 97, 97, 97, 97,,, +4, 2, 3, 1, 1, 1, 1, 1, 256, 97, 97, 97, 97,,, +4, 2, 3, 1, 1, 1, 1, 1, 384, 97, 97, 97, 97,,, +4, 2, 3, 1, 2, 2, 1, 1, 128, 97, 97, 97, 85,,, +4, 2, 3, 1, 2, 2, 1, 1, 256, 97, 97, 97, 85,,, +4, 2, 3, 1, 2, 2, 1, 1, 384, 97, 97, 97, 85,,, +4, 2, 3, 1, 2, 2, 1, 2, 128, 97, 97, 97, 85,,, +4, 2, 3, 1, 2, 2, 1, 2, 256, 97, 97, 97, 85,,, +4, 2, 3, 1, 2, 2, 1, 2, 384, 97, 97, 97, 85,,, +4, 2, 3, 1, 2, 2, 2, 1, 128, 97, 97, 97, 85,,, +4, 2, 3, 1, 2, 2, 2, 1, 256, 97, 97, 97, 85,,, +4, 2, 3, 1, 2, 2, 2, 1, 384, 97, 97, 97, 85,,, +4, 2, 3, 1, 2, 2, 2, 2, 128, 97, 97, 97, 85,,, +4, 2, 3, 1, 2, 2, 2, 2, 256, 97, 97, 97, 85,,, +4, 2, 3, 1, 2, 2, 2, 2, 384, 97, 97, 97, 85,,, +4, 2, 3, 2, 1, 1, 1, 1, 128, 193, 193, 193, 193,,, +4, 2, 3, 2, 1, 1, 1, 1, 256, 193, 193, 193, 193,,, +4, 2, 3, 2, 1, 1, 1, 1, 384, 193, 193, 193, 193,,, +4, 2, 3, 2, 2, 2, 1, 1, 128, 193, 193, 193, 169,,, +4, 2, 3, 2, 2, 2, 1, 1, 256, 193, 193, 193, 169,,, +4, 2, 3, 2, 2, 2, 1, 1, 384, 193, 193, 193, 169,,, +4, 2, 3, 2, 2, 2, 1, 2, 128, 193, 193, 193, 169,,, +4, 2, 3, 2, 2, 2, 1, 2, 256, 193, 193, 193, 169,,, +4, 2, 3, 2, 2, 2, 1, 2, 384, 193, 193, 193, 169,,, +4, 2, 3, 2, 2, 2, 2, 1, 128, 193, 193, 193, 169,,, +4, 2, 3, 2, 2, 2, 2, 1, 256, 193, 193, 193, 169,,, +4, 2, 3, 2, 2, 2, 2, 1, 384, 193, 193, 193, 169,,, +4, 2, 3, 2, 2, 2, 2, 2, 128, 193, 193, 193, 169,,, +4, 2, 3, 2, 2, 2, 2, 2, 256, 193, 193, 193, 169,,, +4, 2, 3, 2, 2, 2, 2, 2, 384, 193, 193, 193, 169,,, +4, 2, 4, 1, 1, 1, 1, 1, 128, 97, 97, 97, 97,,, +4, 2, 4, 1, 1, 1, 1, 1, 256, 97, 97, 97, 97,,, +4, 2, 4, 1, 1, 1, 1, 1, 384, 97, 97, 97, 97,,, +4, 2, 4, 1, 2, 2, 1, 1, 128, 97, 97, 97, 85,,, +4, 2, 4, 1, 2, 2, 1, 1, 256, 97, 97, 97, 85,,, +4, 2, 4, 1, 2, 2, 1, 1, 384, 97, 97, 97, 85,,, +4, 2, 4, 1, 2, 2, 1, 2, 128, 97, 97, 97, 85,,, +4, 2, 4, 1, 2, 2, 1, 2, 256, 97, 97, 97, 85,,, +4, 2, 4, 1, 2, 2, 1, 2, 384, 97, 97, 97, 85,,, +4, 2, 4, 1, 2, 2, 2, 1, 128, 97, 97, 97, 85,,, +4, 2, 4, 1, 2, 2, 2, 1, 256, 97, 97, 97, 85,,, +4, 2, 4, 1, 2, 2, 2, 1, 384, 97, 97, 97, 85,,, +4, 2, 4, 1, 2, 2, 2, 2, 128, 97, 97, 97, 85,,, +4, 2, 4, 1, 2, 2, 2, 2, 256, 97, 97, 97, 85,,, +4, 2, 4, 1, 2, 2, 2, 2, 384, 97, 97, 97, 85,,, +4, 2, 4, 2, 1, 1, 1, 1, 128, 193, 193, 193, 193,,, +4, 2, 4, 2, 1, 1, 1, 1, 256, 193, 193, 193, 193,,, +4, 2, 4, 2, 1, 1, 1, 1, 384, 193, 193, 193, 193,,, +4, 2, 4, 2, 2, 2, 1, 1, 128, 193, 193, 193, 169,,, +4, 2, 4, 2, 2, 2, 1, 1, 256, 193, 193, 193, 169,,, +4, 2, 4, 2, 2, 2, 1, 1, 384, 193, 193, 193, 169,,, +4, 2, 4, 2, 2, 2, 1, 2, 128, 193, 193, 193, 169,,, +4, 2, 4, 2, 2, 2, 1, 2, 256, 193, 193, 193, 169,,, +4, 2, 4, 2, 2, 2, 1, 2, 384, 193, 193, 193, 169,,, +4, 2, 4, 2, 2, 2, 2, 1, 128, 193, 193, 193, 169,,, +4, 2, 4, 2, 2, 2, 2, 1, 256, 193, 193, 193, 169,,, +4, 2, 4, 2, 2, 2, 2, 1, 384, 193, 193, 193, 169,,, +4, 2, 4, 2, 2, 2, 2, 2, 128, 193, 193, 193, 169,,, +4, 2, 4, 2, 2, 2, 2, 2, 256, 193, 193, 193, 169,,, +4, 2, 4, 2, 2, 2, 2, 2, 384, 193, 193, 193, 169,,, +4, 3, 1, 1, 1, 1, 1, 1, 128, 98, 98, 98, 66,,, +4, 3, 1, 1, 1, 1, 1, 1, 256, 98, 98, 98, 66,,, +4, 3, 1, 1, 1, 1, 1, 1, 384, 98, 98, 98, 66,,, +4, 3, 1, 1, 2, 2, 1, 1, 128, 98, 98, 94, 66,,, +4, 3, 1, 1, 2, 2, 1, 1, 256, 98, 98, 94, 66,,, +4, 3, 1, 1, 2, 2, 1, 1, 384, 98, 98, 94, 66,,, +4, 3, 1, 1, 2, 2, 1, 2, 128, 98, 98, 94, 66,,, +4, 3, 1, 1, 2, 2, 1, 2, 256, 98, 98, 94, 66,,, +4, 3, 1, 1, 2, 2, 1, 2, 384, 98, 98, 94, 66,,, +4, 3, 1, 1, 2, 2, 2, 1, 128, 98, 98, 94, 66,,, +4, 3, 1, 1, 2, 2, 2, 1, 256, 98, 98, 94, 66,,, +4, 3, 1, 1, 2, 2, 2, 1, 384, 98, 98, 94, 66,,, +4, 3, 1, 1, 2, 2, 2, 2, 128, 98, 98, 94, 66,,, +4, 3, 1, 1, 2, 2, 2, 2, 256, 98, 98, 94, 66,,, +4, 3, 1, 1, 2, 2, 2, 2, 384, 98, 98, 94, 66,,, +4, 3, 1, 1, 3, 3, 1, 1, 128, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 1, 1, 256, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 1, 1, 384, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 1, 2, 128, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 1, 2, 256, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 1, 2, 384, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 1, 3, 128, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 1, 3, 256, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 1, 3, 384, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 2, 1, 128, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 2, 1, 256, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 2, 1, 384, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 2, 2, 128, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 2, 2, 256, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 2, 2, 384, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 2, 3, 128, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 2, 3, 256, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 2, 3, 384, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 3, 1, 128, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 3, 1, 256, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 3, 1, 384, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 3, 2, 128, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 3, 2, 256, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 3, 2, 384, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 3, 3, 128, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 3, 3, 256, 98, 98, 89, 66,,, +4, 3, 1, 1, 3, 3, 3, 3, 384, 98, 98, 89, 66,,, +4, 3, 1, 2, 1, 1, 1, 1, 128, 194, 194, 194, 130,,, +4, 3, 1, 2, 1, 1, 1, 1, 256, 194, 194, 194, 130,,, +4, 3, 1, 2, 1, 1, 1, 1, 384, 194, 194, 194, 130,,, +4, 3, 1, 2, 2, 2, 1, 1, 128, 194, 194, 186, 130,,, +4, 3, 1, 2, 2, 2, 1, 1, 256, 194, 194, 186, 130,,, +4, 3, 1, 2, 2, 2, 1, 1, 384, 194, 194, 186, 130,,, +4, 3, 1, 2, 2, 2, 1, 2, 128, 194, 194, 186, 130,,, +4, 3, 1, 2, 2, 2, 1, 2, 256, 194, 194, 186, 130,,, +4, 3, 1, 2, 2, 2, 1, 2, 384, 194, 194, 186, 130,,, +4, 3, 1, 2, 2, 2, 2, 1, 128, 194, 194, 186, 130,,, +4, 3, 1, 2, 2, 2, 2, 1, 256, 194, 194, 186, 130,,, +4, 3, 1, 2, 2, 2, 2, 1, 384, 194, 194, 186, 130,,, +4, 3, 1, 2, 2, 2, 2, 2, 128, 194, 194, 186, 130,,, +4, 3, 1, 2, 2, 2, 2, 2, 256, 194, 194, 186, 130,,, +4, 3, 1, 2, 2, 2, 2, 2, 384, 194, 194, 186, 130,,, +4, 3, 1, 2, 3, 3, 1, 1, 128, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 1, 1, 256, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 1, 1, 384, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 1, 2, 128, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 1, 2, 256, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 1, 2, 384, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 1, 3, 128, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 1, 3, 256, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 1, 3, 384, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 2, 1, 128, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 2, 1, 256, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 2, 1, 384, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 2, 2, 128, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 2, 2, 256, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 2, 2, 384, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 2, 3, 128, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 2, 3, 256, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 2, 3, 384, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 3, 1, 128, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 3, 1, 256, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 3, 1, 384, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 3, 2, 128, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 3, 2, 256, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 3, 2, 384, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 3, 3, 128, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 3, 3, 256, 194, 194, 176, 130,,, +4, 3, 1, 2, 3, 3, 3, 3, 384, 194, 194, 176, 130,,, +4, 3, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 194,,, +4, 3, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 194,,, +4, 3, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 194,,, +4, 3, 1, 3, 2, 2, 1, 1, 128, 240, 240, 240, 194,,, +4, 3, 1, 3, 2, 2, 1, 1, 256, 240, 240, 240, 194,,, +4, 3, 1, 3, 2, 2, 1, 1, 384, 240, 240, 240, 194,,, +4, 3, 1, 3, 2, 2, 1, 2, 128, 240, 240, 240, 194,,, +4, 3, 1, 3, 2, 2, 1, 2, 256, 240, 240, 240, 194,,, +4, 3, 1, 3, 2, 2, 1, 2, 384, 240, 240, 240, 194,,, +4, 3, 1, 3, 2, 2, 2, 1, 128, 240, 240, 240, 194,,, +4, 3, 1, 3, 2, 2, 2, 1, 256, 240, 240, 240, 194,,, +4, 3, 1, 3, 2, 2, 2, 1, 384, 240, 240, 240, 194,,, +4, 3, 1, 3, 2, 2, 2, 2, 128, 240, 240, 240, 194,,, +4, 3, 1, 3, 2, 2, 2, 2, 256, 240, 240, 240, 194,,, +4, 3, 1, 3, 2, 2, 2, 2, 384, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 1, 1, 128, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 1, 1, 256, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 1, 1, 384, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 1, 2, 128, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 1, 2, 256, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 1, 2, 384, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 1, 3, 128, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 1, 3, 256, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 1, 3, 384, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 2, 1, 128, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 2, 1, 256, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 2, 1, 384, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 2, 2, 128, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 2, 2, 256, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 2, 2, 384, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 2, 3, 128, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 2, 3, 256, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 2, 3, 384, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 3, 1, 128, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 3, 1, 256, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 3, 1, 384, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 3, 2, 128, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 3, 2, 256, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 3, 2, 384, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 3, 3, 128, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 3, 3, 256, 240, 240, 240, 194,,, +4, 3, 1, 3, 3, 3, 3, 3, 384, 240, 240, 240, 194,,, +4, 3, 2, 1, 1, 1, 1, 1, 128, 98, 98, 98, 66,,, +4, 3, 2, 1, 1, 1, 1, 1, 256, 98, 98, 98, 66,,, +4, 3, 2, 1, 1, 1, 1, 1, 384, 98, 98, 98, 66,,, +4, 3, 2, 1, 2, 2, 1, 1, 128, 98, 98, 94, 66,,, +4, 3, 2, 1, 2, 2, 1, 1, 256, 98, 98, 94, 66,,, +4, 3, 2, 1, 2, 2, 1, 1, 384, 98, 98, 94, 66,,, +4, 3, 2, 1, 2, 2, 1, 2, 128, 98, 98, 94, 66,,, +4, 3, 2, 1, 2, 2, 1, 2, 256, 98, 98, 94, 66,,, +4, 3, 2, 1, 2, 2, 1, 2, 384, 98, 98, 94, 66,,, +4, 3, 2, 1, 2, 2, 2, 1, 128, 98, 98, 94, 66,,, +4, 3, 2, 1, 2, 2, 2, 1, 256, 98, 98, 94, 66,,, +4, 3, 2, 1, 2, 2, 2, 1, 384, 98, 98, 94, 66,,, +4, 3, 2, 1, 2, 2, 2, 2, 128, 98, 98, 94, 66,,, +4, 3, 2, 1, 2, 2, 2, 2, 256, 98, 98, 94, 66,,, +4, 3, 2, 1, 2, 2, 2, 2, 384, 98, 98, 94, 66,,, +4, 3, 2, 1, 3, 3, 1, 1, 128, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 1, 1, 256, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 1, 1, 384, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 1, 2, 128, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 1, 2, 256, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 1, 2, 384, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 1, 3, 128, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 1, 3, 256, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 1, 3, 384, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 2, 1, 128, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 2, 1, 256, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 2, 1, 384, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 2, 2, 128, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 2, 2, 256, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 2, 2, 384, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 2, 3, 128, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 2, 3, 256, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 2, 3, 384, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 3, 1, 128, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 3, 1, 256, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 3, 1, 384, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 3, 2, 128, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 3, 2, 256, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 3, 2, 384, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 3, 3, 128, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 3, 3, 256, 98, 98, 89, 66,,, +4, 3, 2, 1, 3, 3, 3, 3, 384, 98, 98, 89, 66,,, +4, 3, 2, 2, 1, 1, 1, 1, 128, 194, 194, 194, 130,,, +4, 3, 2, 2, 1, 1, 1, 1, 256, 194, 194, 194, 130,,, +4, 3, 2, 2, 1, 1, 1, 1, 384, 194, 194, 194, 130,,, +4, 3, 2, 2, 2, 2, 1, 1, 128, 194, 194, 186, 130,,, +4, 3, 2, 2, 2, 2, 1, 1, 256, 194, 194, 186, 130,,, +4, 3, 2, 2, 2, 2, 1, 1, 384, 194, 194, 186, 130,,, +4, 3, 2, 2, 2, 2, 1, 2, 128, 194, 194, 186, 130,,, +4, 3, 2, 2, 2, 2, 1, 2, 256, 194, 194, 186, 130,,, +4, 3, 2, 2, 2, 2, 1, 2, 384, 194, 194, 186, 130,,, +4, 3, 2, 2, 2, 2, 2, 1, 128, 194, 194, 186, 130,,, +4, 3, 2, 2, 2, 2, 2, 1, 256, 194, 194, 186, 130,,, +4, 3, 2, 2, 2, 2, 2, 1, 384, 194, 194, 186, 130,,, +4, 3, 2, 2, 2, 2, 2, 2, 128, 194, 194, 186, 130,,, +4, 3, 2, 2, 2, 2, 2, 2, 256, 194, 194, 186, 130,,, +4, 3, 2, 2, 2, 2, 2, 2, 384, 194, 194, 186, 130,,, +4, 3, 2, 2, 3, 3, 1, 1, 128, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 1, 1, 256, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 1, 1, 384, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 1, 2, 128, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 1, 2, 256, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 1, 2, 384, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 1, 3, 128, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 1, 3, 256, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 1, 3, 384, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 2, 1, 128, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 2, 1, 256, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 2, 1, 384, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 2, 2, 128, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 2, 2, 256, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 2, 2, 384, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 2, 3, 128, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 2, 3, 256, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 2, 3, 384, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 3, 1, 128, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 3, 1, 256, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 3, 1, 384, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 3, 2, 128, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 3, 2, 256, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 3, 2, 384, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 3, 3, 128, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 3, 3, 256, 194, 194, 176, 130,,, +4, 3, 2, 2, 3, 3, 3, 3, 384, 194, 194, 176, 130,,, +4, 3, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 194,,, +4, 3, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 194,,, +4, 3, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 194,,, +4, 3, 2, 3, 2, 2, 1, 1, 128, 240, 240, 240, 194,,, +4, 3, 2, 3, 2, 2, 1, 1, 256, 240, 240, 240, 194,,, +4, 3, 2, 3, 2, 2, 1, 1, 384, 240, 240, 240, 194,,, +4, 3, 2, 3, 2, 2, 1, 2, 128, 240, 240, 240, 194,,, +4, 3, 2, 3, 2, 2, 1, 2, 256, 240, 240, 240, 194,,, +4, 3, 2, 3, 2, 2, 1, 2, 384, 240, 240, 240, 194,,, +4, 3, 2, 3, 2, 2, 2, 1, 128, 240, 240, 240, 194,,, +4, 3, 2, 3, 2, 2, 2, 1, 256, 240, 240, 240, 194,,, +4, 3, 2, 3, 2, 2, 2, 1, 384, 240, 240, 240, 194,,, +4, 3, 2, 3, 2, 2, 2, 2, 128, 240, 240, 240, 194,,, +4, 3, 2, 3, 2, 2, 2, 2, 256, 240, 240, 240, 194,,, +4, 3, 2, 3, 2, 2, 2, 2, 384, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 1, 1, 128, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 1, 1, 256, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 1, 1, 384, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 1, 2, 128, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 1, 2, 256, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 1, 2, 384, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 1, 3, 128, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 1, 3, 256, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 1, 3, 384, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 2, 1, 128, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 2, 1, 256, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 2, 1, 384, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 2, 2, 128, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 2, 2, 256, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 2, 2, 384, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 2, 3, 128, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 2, 3, 256, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 2, 3, 384, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 3, 1, 128, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 3, 1, 256, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 3, 1, 384, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 3, 2, 128, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 3, 2, 256, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 3, 2, 384, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 3, 3, 128, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 3, 3, 256, 240, 240, 240, 194,,, +4, 3, 2, 3, 3, 3, 3, 3, 384, 240, 240, 240, 194,,, +4, 3, 3, 1, 1, 1, 1, 1, 128, 98, 98, 98, 66,,, +4, 3, 3, 1, 1, 1, 1, 1, 256, 98, 98, 98, 66,,, +4, 3, 3, 1, 1, 1, 1, 1, 384, 98, 98, 98, 66,,, +4, 3, 3, 1, 2, 2, 1, 1, 128, 98, 98, 94, 66,,, +4, 3, 3, 1, 2, 2, 1, 1, 256, 98, 98, 94, 66,,, +4, 3, 3, 1, 2, 2, 1, 1, 384, 98, 98, 94, 66,,, +4, 3, 3, 1, 2, 2, 1, 2, 128, 98, 98, 94, 66,,, +4, 3, 3, 1, 2, 2, 1, 2, 256, 98, 98, 94, 66,,, +4, 3, 3, 1, 2, 2, 1, 2, 384, 98, 98, 94, 66,,, +4, 3, 3, 1, 2, 2, 2, 1, 128, 98, 98, 94, 66,,, +4, 3, 3, 1, 2, 2, 2, 1, 256, 98, 98, 94, 66,,, +4, 3, 3, 1, 2, 2, 2, 1, 384, 98, 98, 94, 66,,, +4, 3, 3, 1, 2, 2, 2, 2, 128, 98, 98, 94, 66,,, +4, 3, 3, 1, 2, 2, 2, 2, 256, 98, 98, 94, 66,,, +4, 3, 3, 1, 2, 2, 2, 2, 384, 98, 98, 94, 66,,, +4, 3, 3, 1, 3, 3, 1, 1, 128, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 1, 1, 256, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 1, 1, 384, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 1, 2, 128, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 1, 2, 256, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 1, 2, 384, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 1, 3, 128, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 1, 3, 256, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 1, 3, 384, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 2, 1, 128, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 2, 1, 256, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 2, 1, 384, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 2, 2, 128, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 2, 2, 256, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 2, 2, 384, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 2, 3, 128, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 2, 3, 256, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 2, 3, 384, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 3, 1, 128, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 3, 1, 256, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 3, 1, 384, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 3, 2, 128, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 3, 2, 256, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 3, 2, 384, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 3, 3, 128, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 3, 3, 256, 98, 98, 89, 66,,, +4, 3, 3, 1, 3, 3, 3, 3, 384, 98, 98, 89, 66,,, +4, 3, 3, 2, 1, 1, 1, 1, 128, 194, 194, 194, 130,,, +4, 3, 3, 2, 1, 1, 1, 1, 256, 194, 194, 194, 130,,, +4, 3, 3, 2, 1, 1, 1, 1, 384, 194, 194, 194, 130,,, +4, 3, 3, 2, 2, 2, 1, 1, 128, 194, 194, 186, 130,,, +4, 3, 3, 2, 2, 2, 1, 1, 256, 194, 194, 186, 130,,, +4, 3, 3, 2, 2, 2, 1, 1, 384, 194, 194, 186, 130,,, +4, 3, 3, 2, 2, 2, 1, 2, 128, 194, 194, 186, 130,,, +4, 3, 3, 2, 2, 2, 1, 2, 256, 194, 194, 186, 130,,, +4, 3, 3, 2, 2, 2, 1, 2, 384, 194, 194, 186, 130,,, +4, 3, 3, 2, 2, 2, 2, 1, 128, 194, 194, 186, 130,,, +4, 3, 3, 2, 2, 2, 2, 1, 256, 194, 194, 186, 130,,, +4, 3, 3, 2, 2, 2, 2, 1, 384, 194, 194, 186, 130,,, +4, 3, 3, 2, 2, 2, 2, 2, 128, 194, 194, 186, 130,,, +4, 3, 3, 2, 2, 2, 2, 2, 256, 194, 194, 186, 130,,, +4, 3, 3, 2, 2, 2, 2, 2, 384, 194, 194, 186, 130,,, +4, 3, 3, 2, 3, 3, 1, 1, 128, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 1, 1, 256, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 1, 1, 384, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 1, 2, 128, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 1, 2, 256, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 1, 2, 384, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 1, 3, 128, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 1, 3, 256, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 1, 3, 384, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 2, 1, 128, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 2, 1, 256, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 2, 1, 384, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 2, 2, 128, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 2, 2, 256, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 2, 2, 384, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 2, 3, 128, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 2, 3, 256, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 2, 3, 384, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 3, 1, 128, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 3, 1, 256, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 3, 1, 384, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 3, 2, 128, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 3, 2, 256, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 3, 2, 384, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 3, 3, 128, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 3, 3, 256, 194, 194, 176, 130,,, +4, 3, 3, 2, 3, 3, 3, 3, 384, 194, 194, 176, 130,,, +4, 3, 3, 3, 1, 1, 1, 1, 128, 240, 240, 240, 194,,, +4, 3, 3, 3, 1, 1, 1, 1, 256, 240, 240, 240, 194,,, +4, 3, 3, 3, 1, 1, 1, 1, 384, 240, 240, 240, 194,,, +4, 3, 3, 3, 2, 2, 1, 1, 128, 240, 240, 240, 194,,, +4, 3, 3, 3, 2, 2, 1, 1, 256, 240, 240, 240, 194,,, +4, 3, 3, 3, 2, 2, 1, 1, 384, 240, 240, 240, 194,,, +4, 3, 3, 3, 2, 2, 1, 2, 128, 240, 240, 240, 194,,, +4, 3, 3, 3, 2, 2, 1, 2, 256, 240, 240, 240, 194,,, +4, 3, 3, 3, 2, 2, 1, 2, 384, 240, 240, 240, 194,,, +4, 3, 3, 3, 2, 2, 2, 1, 128, 240, 240, 240, 194,,, +4, 3, 3, 3, 2, 2, 2, 1, 256, 240, 240, 240, 194,,, +4, 3, 3, 3, 2, 2, 2, 1, 384, 240, 240, 240, 194,,, +4, 3, 3, 3, 2, 2, 2, 2, 128, 240, 240, 240, 194,,, +4, 3, 3, 3, 2, 2, 2, 2, 256, 240, 240, 240, 194,,, +4, 3, 3, 3, 2, 2, 2, 2, 384, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 1, 1, 128, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 1, 1, 256, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 1, 1, 384, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 1, 2, 128, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 1, 2, 256, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 1, 2, 384, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 1, 3, 128, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 1, 3, 256, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 1, 3, 384, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 2, 1, 128, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 2, 1, 256, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 2, 1, 384, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 2, 2, 128, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 2, 2, 256, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 2, 2, 384, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 2, 3, 128, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 2, 3, 256, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 2, 3, 384, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 3, 1, 128, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 3, 1, 256, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 3, 1, 384, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 3, 2, 128, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 3, 2, 256, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 3, 2, 384, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 3, 3, 128, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 3, 3, 256, 240, 240, 240, 194,,, +4, 3, 3, 3, 3, 3, 3, 3, 384, 240, 240, 240, 194,,, +4, 3, 4, 1, 1, 1, 1, 1, 128, 98, 98, 98, 66,,, +4, 3, 4, 1, 1, 1, 1, 1, 256, 98, 98, 98, 66,,, +4, 3, 4, 1, 1, 1, 1, 1, 384, 98, 98, 98, 66,,, +4, 3, 4, 1, 2, 2, 1, 1, 128, 98, 98, 94, 66,,, +4, 3, 4, 1, 2, 2, 1, 1, 256, 98, 98, 94, 66,,, +4, 3, 4, 1, 2, 2, 1, 1, 384, 98, 98, 94, 66,,, +4, 3, 4, 1, 2, 2, 1, 2, 128, 98, 98, 94, 66,,, +4, 3, 4, 1, 2, 2, 1, 2, 256, 98, 98, 94, 66,,, +4, 3, 4, 1, 2, 2, 1, 2, 384, 98, 98, 94, 66,,, +4, 3, 4, 1, 2, 2, 2, 1, 128, 98, 98, 94, 66,,, +4, 3, 4, 1, 2, 2, 2, 1, 256, 98, 98, 94, 66,,, +4, 3, 4, 1, 2, 2, 2, 1, 384, 98, 98, 94, 66,,, +4, 3, 4, 1, 2, 2, 2, 2, 128, 98, 98, 94, 66,,, +4, 3, 4, 1, 2, 2, 2, 2, 256, 98, 98, 94, 66,,, +4, 3, 4, 1, 2, 2, 2, 2, 384, 98, 98, 94, 66,,, +4, 3, 4, 1, 3, 3, 1, 1, 128, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 1, 1, 256, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 1, 1, 384, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 1, 2, 128, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 1, 2, 256, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 1, 2, 384, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 1, 3, 128, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 1, 3, 256, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 1, 3, 384, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 2, 1, 128, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 2, 1, 256, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 2, 1, 384, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 2, 2, 128, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 2, 2, 256, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 2, 2, 384, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 2, 3, 128, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 2, 3, 256, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 2, 3, 384, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 3, 1, 128, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 3, 1, 256, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 3, 1, 384, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 3, 2, 128, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 3, 2, 256, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 3, 2, 384, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 3, 3, 128, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 3, 3, 256, 98, 98, 89, 66,,, +4, 3, 4, 1, 3, 3, 3, 3, 384, 98, 98, 89, 66,,, +4, 3, 4, 2, 1, 1, 1, 1, 128, 194, 194, 194, 130,,, +4, 3, 4, 2, 1, 1, 1, 1, 256, 194, 194, 194, 130,,, +4, 3, 4, 2, 1, 1, 1, 1, 384, 194, 194, 194, 130,,, +4, 3, 4, 2, 2, 2, 1, 1, 128, 194, 194, 186, 130,,, +4, 3, 4, 2, 2, 2, 1, 1, 256, 194, 194, 186, 130,,, +4, 3, 4, 2, 2, 2, 1, 1, 384, 194, 194, 186, 130,,, +4, 3, 4, 2, 2, 2, 1, 2, 128, 194, 194, 186, 130,,, +4, 3, 4, 2, 2, 2, 1, 2, 256, 194, 194, 186, 130,,, +4, 3, 4, 2, 2, 2, 1, 2, 384, 194, 194, 186, 130,,, +4, 3, 4, 2, 2, 2, 2, 1, 128, 194, 194, 186, 130,,, +4, 3, 4, 2, 2, 2, 2, 1, 256, 194, 194, 186, 130,,, +4, 3, 4, 2, 2, 2, 2, 1, 384, 194, 194, 186, 130,,, +4, 3, 4, 2, 2, 2, 2, 2, 128, 194, 194, 186, 130,,, +4, 3, 4, 2, 2, 2, 2, 2, 256, 194, 194, 186, 130,,, +4, 3, 4, 2, 2, 2, 2, 2, 384, 194, 194, 186, 130,,, +4, 3, 4, 2, 3, 3, 1, 1, 128, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 1, 1, 256, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 1, 1, 384, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 1, 2, 128, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 1, 2, 256, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 1, 2, 384, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 1, 3, 128, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 1, 3, 256, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 1, 3, 384, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 2, 1, 128, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 2, 1, 256, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 2, 1, 384, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 2, 2, 128, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 2, 2, 256, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 2, 2, 384, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 2, 3, 128, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 2, 3, 256, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 2, 3, 384, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 3, 1, 128, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 3, 1, 256, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 3, 1, 384, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 3, 2, 128, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 3, 2, 256, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 3, 2, 384, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 3, 3, 128, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 3, 3, 256, 194, 194, 176, 130,,, +4, 3, 4, 2, 3, 3, 3, 3, 384, 194, 194, 176, 130,,, +4, 3, 4, 3, 1, 1, 1, 1, 128, 240, 240, 240, 194,,, +4, 3, 4, 3, 1, 1, 1, 1, 256, 240, 240, 240, 194,,, +4, 3, 4, 3, 1, 1, 1, 1, 384, 240, 240, 240, 194,,, +4, 3, 4, 3, 2, 2, 1, 1, 128, 240, 240, 240, 194,,, +4, 3, 4, 3, 2, 2, 1, 1, 256, 240, 240, 240, 194,,, +4, 3, 4, 3, 2, 2, 1, 1, 384, 240, 240, 240, 194,,, +4, 3, 4, 3, 2, 2, 1, 2, 128, 240, 240, 240, 194,,, +4, 3, 4, 3, 2, 2, 1, 2, 256, 240, 240, 240, 194,,, +4, 3, 4, 3, 2, 2, 1, 2, 384, 240, 240, 240, 194,,, +4, 3, 4, 3, 2, 2, 2, 1, 128, 240, 240, 240, 194,,, +4, 3, 4, 3, 2, 2, 2, 1, 256, 240, 240, 240, 194,,, +4, 3, 4, 3, 2, 2, 2, 1, 384, 240, 240, 240, 194,,, +4, 3, 4, 3, 2, 2, 2, 2, 128, 240, 240, 240, 194,,, +4, 3, 4, 3, 2, 2, 2, 2, 256, 240, 240, 240, 194,,, +4, 3, 4, 3, 2, 2, 2, 2, 384, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 1, 1, 128, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 1, 1, 256, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 1, 1, 384, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 1, 2, 128, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 1, 2, 256, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 1, 2, 384, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 1, 3, 128, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 1, 3, 256, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 1, 3, 384, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 2, 1, 128, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 2, 1, 256, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 2, 1, 384, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 2, 2, 128, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 2, 2, 256, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 2, 2, 384, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 2, 3, 128, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 2, 3, 256, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 2, 3, 384, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 3, 1, 128, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 3, 1, 256, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 3, 1, 384, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 3, 2, 128, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 3, 2, 256, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 3, 2, 384, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 3, 3, 128, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 3, 3, 256, 240, 240, 240, 194,,, +4, 3, 4, 3, 3, 3, 3, 3, 384, 240, 240, 240, 194,,, +4, 4, 1, 1, 1, 1, 1, 1, 128, 99, 99, 99, 67,,, +4, 4, 1, 1, 1, 1, 1, 1, 256, 99, 99, 99, 67,,, +4, 4, 1, 1, 1, 1, 1, 1, 384, 99, 99, 99, 67,,, +4, 4, 1, 1, 2, 2, 1, 1, 128, 99, 99, 87, 59,,, +4, 4, 1, 1, 2, 2, 1, 1, 256, 99, 99, 87, 59,,, +4, 4, 1, 1, 2, 2, 1, 1, 384, 99, 99, 87, 59,,, +4, 4, 1, 1, 2, 2, 1, 2, 128, 99, 99, 87, 59,,, +4, 4, 1, 1, 2, 2, 1, 2, 256, 99, 99, 87, 59,,, +4, 4, 1, 1, 2, 2, 1, 2, 384, 99, 99, 87, 59,,, +4, 4, 1, 1, 2, 2, 2, 1, 128, 99, 99, 87, 59,,, +4, 4, 1, 1, 2, 2, 2, 1, 256, 99, 99, 87, 59,,, +4, 4, 1, 1, 2, 2, 2, 1, 384, 99, 99, 87, 59,,, +4, 4, 1, 1, 2, 2, 2, 2, 128, 99, 99, 87, 59,,, +4, 4, 1, 1, 2, 2, 2, 2, 256, 99, 99, 87, 59,,, +4, 4, 1, 1, 2, 2, 2, 2, 384, 99, 99, 87, 59,,, +4, 4, 1, 1, 3, 3, 1, 1, 128, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 1, 1, 256, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 1, 1, 384, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 1, 2, 128, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 1, 2, 256, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 1, 2, 384, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 1, 3, 128, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 1, 3, 256, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 1, 3, 384, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 2, 1, 128, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 2, 1, 256, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 2, 1, 384, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 2, 2, 128, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 2, 2, 256, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 2, 2, 384, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 2, 3, 128, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 2, 3, 256, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 2, 3, 384, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 3, 1, 128, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 3, 1, 256, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 3, 1, 384, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 3, 2, 128, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 3, 2, 256, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 3, 2, 384, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 3, 3, 128, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 3, 3, 256, 99, 96, 83, 56,,, +4, 4, 1, 1, 3, 3, 3, 3, 384, 99, 96, 83, 56,,, +4, 4, 1, 2, 1, 1, 1, 1, 128, 195, 195, 195, 131,,, +4, 4, 1, 2, 1, 1, 1, 1, 256, 195, 195, 195, 131,,, +4, 4, 1, 2, 1, 1, 1, 1, 384, 195, 195, 195, 131,,, +4, 4, 1, 2, 2, 2, 1, 1, 128, 195, 195, 171, 115,,, +4, 4, 1, 2, 2, 2, 1, 1, 256, 195, 195, 171, 115,,, +4, 4, 1, 2, 2, 2, 1, 1, 384, 195, 195, 171, 115,,, +4, 4, 1, 2, 2, 2, 1, 2, 128, 195, 195, 171, 115,,, +4, 4, 1, 2, 2, 2, 1, 2, 256, 195, 195, 171, 115,,, +4, 4, 1, 2, 2, 2, 1, 2, 384, 195, 195, 171, 115,,, +4, 4, 1, 2, 2, 2, 2, 1, 128, 195, 195, 171, 115,,, +4, 4, 1, 2, 2, 2, 2, 1, 256, 195, 195, 171, 115,,, +4, 4, 1, 2, 2, 2, 2, 1, 384, 195, 195, 171, 115,,, +4, 4, 1, 2, 2, 2, 2, 2, 128, 195, 195, 171, 115,,, +4, 4, 1, 2, 2, 2, 2, 2, 256, 195, 195, 171, 115,,, +4, 4, 1, 2, 2, 2, 2, 2, 384, 195, 195, 171, 115,,, +4, 4, 1, 2, 3, 3, 1, 1, 128, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 1, 1, 256, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 1, 1, 384, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 1, 2, 128, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 1, 2, 256, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 1, 2, 384, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 1, 3, 128, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 1, 3, 256, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 1, 3, 384, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 2, 1, 128, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 2, 1, 256, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 2, 1, 384, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 2, 2, 128, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 2, 2, 256, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 2, 2, 384, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 2, 3, 128, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 2, 3, 256, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 2, 3, 384, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 3, 1, 128, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 3, 1, 256, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 3, 1, 384, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 3, 2, 128, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 3, 2, 256, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 3, 2, 384, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 3, 3, 128, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 3, 3, 256, 195, 189, 163, 109,,, +4, 4, 1, 2, 3, 3, 3, 3, 384, 195, 189, 163, 109,,, +4, 4, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 195,,, +4, 4, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 195,,, +4, 4, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 195,,, +4, 4, 1, 3, 2, 2, 1, 1, 128, 240, 240, 240, 171,,, +4, 4, 1, 3, 2, 2, 1, 1, 256, 240, 240, 240, 171,,, +4, 4, 1, 3, 2, 2, 1, 1, 384, 240, 240, 240, 171,,, +4, 4, 1, 3, 2, 2, 1, 2, 128, 240, 240, 240, 171,,, +4, 4, 1, 3, 2, 2, 1, 2, 256, 240, 240, 240, 171,,, +4, 4, 1, 3, 2, 2, 1, 2, 384, 240, 240, 240, 171,,, +4, 4, 1, 3, 2, 2, 2, 1, 128, 240, 240, 240, 171,,, +4, 4, 1, 3, 2, 2, 2, 1, 256, 240, 240, 240, 171,,, +4, 4, 1, 3, 2, 2, 2, 1, 384, 240, 240, 240, 171,,, +4, 4, 1, 3, 2, 2, 2, 2, 128, 240, 240, 240, 171,,, +4, 4, 1, 3, 2, 2, 2, 2, 256, 240, 240, 240, 171,,, +4, 4, 1, 3, 2, 2, 2, 2, 384, 240, 240, 240, 171,,, +4, 4, 1, 3, 3, 3, 1, 1, 128, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 1, 1, 256, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 1, 1, 384, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 1, 2, 128, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 1, 2, 256, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 1, 2, 384, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 1, 3, 128, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 1, 3, 256, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 1, 3, 384, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 2, 1, 128, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 2, 1, 256, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 2, 1, 384, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 2, 2, 128, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 2, 2, 256, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 2, 2, 384, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 2, 3, 128, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 2, 3, 256, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 2, 3, 384, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 3, 1, 128, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 3, 1, 256, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 3, 1, 384, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 3, 2, 128, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 3, 2, 256, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 3, 2, 384, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 3, 3, 128, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 3, 3, 256, 240, 240, 240, 162,,, +4, 4, 1, 3, 3, 3, 3, 3, 384, 240, 240, 240, 162,,, +4, 4, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 4, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 4, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 4, 1, 4, 2, 2, 1, 1, 128, 240, 240, 240, 227,,, +4, 4, 1, 4, 2, 2, 1, 1, 256, 240, 240, 240, 227,,, +4, 4, 1, 4, 2, 2, 1, 1, 384, 240, 240, 240, 227,,, +4, 4, 1, 4, 2, 2, 1, 2, 128, 240, 240, 240, 227,,, +4, 4, 1, 4, 2, 2, 1, 2, 256, 240, 240, 240, 227,,, +4, 4, 1, 4, 2, 2, 1, 2, 384, 240, 240, 240, 227,,, +4, 4, 1, 4, 2, 2, 2, 1, 128, 240, 240, 240, 227,,, +4, 4, 1, 4, 2, 2, 2, 1, 256, 240, 240, 240, 227,,, +4, 4, 1, 4, 2, 2, 2, 1, 384, 240, 240, 240, 227,,, +4, 4, 1, 4, 2, 2, 2, 2, 128, 240, 240, 240, 227,,, +4, 4, 1, 4, 2, 2, 2, 2, 256, 240, 240, 240, 227,,, +4, 4, 1, 4, 2, 2, 2, 2, 384, 240, 240, 240, 227,,, +4, 4, 1, 4, 3, 3, 1, 1, 128, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 1, 1, 256, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 1, 1, 384, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 1, 2, 128, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 1, 2, 256, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 1, 2, 384, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 1, 3, 128, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 1, 3, 256, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 1, 3, 384, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 2, 1, 128, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 2, 1, 256, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 2, 1, 384, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 2, 2, 128, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 2, 2, 256, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 2, 2, 384, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 2, 3, 128, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 2, 3, 256, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 2, 3, 384, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 3, 1, 128, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 3, 1, 256, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 3, 1, 384, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 3, 2, 128, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 3, 2, 256, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 3, 2, 384, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 3, 3, 128, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 3, 3, 256, 240, 240, 240, 215,,, +4, 4, 1, 4, 3, 3, 3, 3, 384, 240, 240, 240, 215,,, +4, 4, 2, 1, 1, 1, 1, 1, 128, 99, 99, 99, 67,,, +4, 4, 2, 1, 1, 1, 1, 1, 256, 99, 99, 99, 67,,, +4, 4, 2, 1, 1, 1, 1, 1, 384, 99, 99, 99, 67,,, +4, 4, 2, 1, 2, 2, 1, 1, 128, 99, 99, 87, 59,,, +4, 4, 2, 1, 2, 2, 1, 1, 256, 99, 99, 87, 59,,, +4, 4, 2, 1, 2, 2, 1, 1, 384, 99, 99, 87, 59,,, +4, 4, 2, 1, 2, 2, 1, 2, 128, 99, 99, 87, 59,,, +4, 4, 2, 1, 2, 2, 1, 2, 256, 99, 99, 87, 59,,, +4, 4, 2, 1, 2, 2, 1, 2, 384, 99, 99, 87, 59,,, +4, 4, 2, 1, 2, 2, 2, 1, 128, 99, 99, 87, 59,,, +4, 4, 2, 1, 2, 2, 2, 1, 256, 99, 99, 87, 59,,, +4, 4, 2, 1, 2, 2, 2, 1, 384, 99, 99, 87, 59,,, +4, 4, 2, 1, 2, 2, 2, 2, 128, 99, 99, 87, 59,,, +4, 4, 2, 1, 2, 2, 2, 2, 256, 99, 99, 87, 59,,, +4, 4, 2, 1, 2, 2, 2, 2, 384, 99, 99, 87, 59,,, +4, 4, 2, 1, 3, 3, 1, 1, 128, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 1, 1, 256, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 1, 1, 384, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 1, 2, 128, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 1, 2, 256, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 1, 2, 384, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 1, 3, 128, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 1, 3, 256, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 1, 3, 384, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 2, 1, 128, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 2, 1, 256, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 2, 1, 384, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 2, 2, 128, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 2, 2, 256, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 2, 2, 384, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 2, 3, 128, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 2, 3, 256, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 2, 3, 384, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 3, 1, 128, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 3, 1, 256, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 3, 1, 384, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 3, 2, 128, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 3, 2, 256, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 3, 2, 384, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 3, 3, 128, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 3, 3, 256, 99, 96, 83, 56,,, +4, 4, 2, 1, 3, 3, 3, 3, 384, 99, 96, 83, 56,,, +4, 4, 2, 2, 1, 1, 1, 1, 128, 195, 195, 195, 131,,, +4, 4, 2, 2, 1, 1, 1, 1, 256, 195, 195, 195, 131,,, +4, 4, 2, 2, 1, 1, 1, 1, 384, 195, 195, 195, 131,,, +4, 4, 2, 2, 2, 2, 1, 1, 128, 195, 195, 171, 115,,, +4, 4, 2, 2, 2, 2, 1, 1, 256, 195, 195, 171, 115,,, +4, 4, 2, 2, 2, 2, 1, 1, 384, 195, 195, 171, 115,,, +4, 4, 2, 2, 2, 2, 1, 2, 128, 195, 195, 171, 115,,, +4, 4, 2, 2, 2, 2, 1, 2, 256, 195, 195, 171, 115,,, +4, 4, 2, 2, 2, 2, 1, 2, 384, 195, 195, 171, 115,,, +4, 4, 2, 2, 2, 2, 2, 1, 128, 195, 195, 171, 115,,, +4, 4, 2, 2, 2, 2, 2, 1, 256, 195, 195, 171, 115,,, +4, 4, 2, 2, 2, 2, 2, 1, 384, 195, 195, 171, 115,,, +4, 4, 2, 2, 2, 2, 2, 2, 128, 195, 195, 171, 115,,, +4, 4, 2, 2, 2, 2, 2, 2, 256, 195, 195, 171, 115,,, +4, 4, 2, 2, 2, 2, 2, 2, 384, 195, 195, 171, 115,,, +4, 4, 2, 2, 3, 3, 1, 1, 128, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 1, 1, 256, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 1, 1, 384, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 1, 2, 128, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 1, 2, 256, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 1, 2, 384, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 1, 3, 128, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 1, 3, 256, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 1, 3, 384, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 2, 1, 128, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 2, 1, 256, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 2, 1, 384, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 2, 2, 128, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 2, 2, 256, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 2, 2, 384, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 2, 3, 128, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 2, 3, 256, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 2, 3, 384, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 3, 1, 128, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 3, 1, 256, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 3, 1, 384, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 3, 2, 128, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 3, 2, 256, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 3, 2, 384, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 3, 3, 128, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 3, 3, 256, 195, 189, 163, 109,,, +4, 4, 2, 2, 3, 3, 3, 3, 384, 195, 189, 163, 109,,, +4, 4, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 195,,, +4, 4, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 195,,, +4, 4, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 195,,, +4, 4, 2, 3, 2, 2, 1, 1, 128, 240, 240, 240, 171,,, +4, 4, 2, 3, 2, 2, 1, 1, 256, 240, 240, 240, 171,,, +4, 4, 2, 3, 2, 2, 1, 1, 384, 240, 240, 240, 171,,, +4, 4, 2, 3, 2, 2, 1, 2, 128, 240, 240, 240, 171,,, +4, 4, 2, 3, 2, 2, 1, 2, 256, 240, 240, 240, 171,,, +4, 4, 2, 3, 2, 2, 1, 2, 384, 240, 240, 240, 171,,, +4, 4, 2, 3, 2, 2, 2, 1, 128, 240, 240, 240, 171,,, +4, 4, 2, 3, 2, 2, 2, 1, 256, 240, 240, 240, 171,,, +4, 4, 2, 3, 2, 2, 2, 1, 384, 240, 240, 240, 171,,, +4, 4, 2, 3, 2, 2, 2, 2, 128, 240, 240, 240, 171,,, +4, 4, 2, 3, 2, 2, 2, 2, 256, 240, 240, 240, 171,,, +4, 4, 2, 3, 2, 2, 2, 2, 384, 240, 240, 240, 171,,, +4, 4, 2, 3, 3, 3, 1, 1, 128, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 1, 1, 256, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 1, 1, 384, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 1, 2, 128, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 1, 2, 256, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 1, 2, 384, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 1, 3, 128, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 1, 3, 256, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 1, 3, 384, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 2, 1, 128, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 2, 1, 256, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 2, 1, 384, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 2, 2, 128, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 2, 2, 256, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 2, 2, 384, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 2, 3, 128, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 2, 3, 256, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 2, 3, 384, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 3, 1, 128, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 3, 1, 256, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 3, 1, 384, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 3, 2, 128, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 3, 2, 256, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 3, 2, 384, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 3, 3, 128, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 3, 3, 256, 240, 240, 240, 162,,, +4, 4, 2, 3, 3, 3, 3, 3, 384, 240, 240, 240, 162,,, +4, 4, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 4, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 4, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 4, 2, 4, 2, 2, 1, 1, 128, 240, 240, 240, 227,,, +4, 4, 2, 4, 2, 2, 1, 1, 256, 240, 240, 240, 227,,, +4, 4, 2, 4, 2, 2, 1, 1, 384, 240, 240, 240, 227,,, +4, 4, 2, 4, 2, 2, 1, 2, 128, 240, 240, 240, 227,,, +4, 4, 2, 4, 2, 2, 1, 2, 256, 240, 240, 240, 227,,, +4, 4, 2, 4, 2, 2, 1, 2, 384, 240, 240, 240, 227,,, +4, 4, 2, 4, 2, 2, 2, 1, 128, 240, 240, 240, 227,,, +4, 4, 2, 4, 2, 2, 2, 1, 256, 240, 240, 240, 227,,, +4, 4, 2, 4, 2, 2, 2, 1, 384, 240, 240, 240, 227,,, +4, 4, 2, 4, 2, 2, 2, 2, 128, 240, 240, 240, 227,,, +4, 4, 2, 4, 2, 2, 2, 2, 256, 240, 240, 240, 227,,, +4, 4, 2, 4, 2, 2, 2, 2, 384, 240, 240, 240, 227,,, +4, 4, 2, 4, 3, 3, 1, 1, 128, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 1, 1, 256, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 1, 1, 384, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 1, 2, 128, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 1, 2, 256, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 1, 2, 384, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 1, 3, 128, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 1, 3, 256, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 1, 3, 384, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 2, 1, 128, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 2, 1, 256, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 2, 1, 384, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 2, 2, 128, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 2, 2, 256, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 2, 2, 384, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 2, 3, 128, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 2, 3, 256, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 2, 3, 384, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 3, 1, 128, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 3, 1, 256, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 3, 1, 384, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 3, 2, 128, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 3, 2, 256, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 3, 2, 384, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 3, 3, 128, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 3, 3, 256, 240, 240, 240, 215,,, +4, 4, 2, 4, 3, 3, 3, 3, 384, 240, 240, 240, 215,,, +4, 4, 3, 1, 1, 1, 1, 1, 128, 99, 99, 99, 67,,, +4, 4, 3, 1, 1, 1, 1, 1, 256, 99, 99, 99, 67,,, +4, 4, 3, 1, 1, 1, 1, 1, 384, 99, 99, 99, 67,,, +4, 4, 3, 1, 2, 2, 1, 1, 128, 99, 99, 87, 59,,, +4, 4, 3, 1, 2, 2, 1, 1, 256, 99, 99, 87, 59,,, +4, 4, 3, 1, 2, 2, 1, 1, 384, 99, 99, 87, 59,,, +4, 4, 3, 1, 2, 2, 1, 2, 128, 99, 99, 87, 59,,, +4, 4, 3, 1, 2, 2, 1, 2, 256, 99, 99, 87, 59,,, +4, 4, 3, 1, 2, 2, 1, 2, 384, 99, 99, 87, 59,,, +4, 4, 3, 1, 2, 2, 2, 1, 128, 99, 99, 87, 59,,, +4, 4, 3, 1, 2, 2, 2, 1, 256, 99, 99, 87, 59,,, +4, 4, 3, 1, 2, 2, 2, 1, 384, 99, 99, 87, 59,,, +4, 4, 3, 1, 2, 2, 2, 2, 128, 99, 99, 87, 59,,, +4, 4, 3, 1, 2, 2, 2, 2, 256, 99, 99, 87, 59,,, +4, 4, 3, 1, 2, 2, 2, 2, 384, 99, 99, 87, 59,,, +4, 4, 3, 1, 3, 3, 1, 1, 128, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 1, 1, 256, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 1, 1, 384, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 1, 2, 128, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 1, 2, 256, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 1, 2, 384, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 1, 3, 128, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 1, 3, 256, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 1, 3, 384, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 2, 1, 128, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 2, 1, 256, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 2, 1, 384, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 2, 2, 128, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 2, 2, 256, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 2, 2, 384, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 2, 3, 128, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 2, 3, 256, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 2, 3, 384, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 3, 1, 128, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 3, 1, 256, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 3, 1, 384, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 3, 2, 128, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 3, 2, 256, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 3, 2, 384, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 3, 3, 128, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 3, 3, 256, 99, 96, 83, 56,,, +4, 4, 3, 1, 3, 3, 3, 3, 384, 99, 96, 83, 56,,, +4, 4, 3, 2, 1, 1, 1, 1, 128, 195, 195, 195, 131,,, +4, 4, 3, 2, 1, 1, 1, 1, 256, 195, 195, 195, 131,,, +4, 4, 3, 2, 1, 1, 1, 1, 384, 195, 195, 195, 131,,, +4, 4, 3, 2, 2, 2, 1, 1, 128, 195, 195, 171, 115,,, +4, 4, 3, 2, 2, 2, 1, 1, 256, 195, 195, 171, 115,,, +4, 4, 3, 2, 2, 2, 1, 1, 384, 195, 195, 171, 115,,, +4, 4, 3, 2, 2, 2, 1, 2, 128, 195, 195, 171, 115,,, +4, 4, 3, 2, 2, 2, 1, 2, 256, 195, 195, 171, 115,,, +4, 4, 3, 2, 2, 2, 1, 2, 384, 195, 195, 171, 115,,, +4, 4, 3, 2, 2, 2, 2, 1, 128, 195, 195, 171, 115,,, +4, 4, 3, 2, 2, 2, 2, 1, 256, 195, 195, 171, 115,,, +4, 4, 3, 2, 2, 2, 2, 1, 384, 195, 195, 171, 115,,, +4, 4, 3, 2, 2, 2, 2, 2, 128, 195, 195, 171, 115,,, +4, 4, 3, 2, 2, 2, 2, 2, 256, 195, 195, 171, 115,,, +4, 4, 3, 2, 2, 2, 2, 2, 384, 195, 195, 171, 115,,, +4, 4, 3, 2, 3, 3, 1, 1, 128, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 1, 1, 256, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 1, 1, 384, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 1, 2, 128, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 1, 2, 256, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 1, 2, 384, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 1, 3, 128, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 1, 3, 256, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 1, 3, 384, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 2, 1, 128, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 2, 1, 256, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 2, 1, 384, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 2, 2, 128, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 2, 2, 256, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 2, 2, 384, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 2, 3, 128, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 2, 3, 256, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 2, 3, 384, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 3, 1, 128, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 3, 1, 256, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 3, 1, 384, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 3, 2, 128, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 3, 2, 256, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 3, 2, 384, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 3, 3, 128, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 3, 3, 256, 195, 189, 163, 109,,, +4, 4, 3, 2, 3, 3, 3, 3, 384, 195, 189, 163, 109,,, +4, 4, 3, 3, 1, 1, 1, 1, 128, 240, 240, 240, 195,,, +4, 4, 3, 3, 1, 1, 1, 1, 256, 240, 240, 240, 195,,, +4, 4, 3, 3, 1, 1, 1, 1, 384, 240, 240, 240, 195,,, +4, 4, 3, 3, 2, 2, 1, 1, 128, 240, 240, 240, 171,,, +4, 4, 3, 3, 2, 2, 1, 1, 256, 240, 240, 240, 171,,, +4, 4, 3, 3, 2, 2, 1, 1, 384, 240, 240, 240, 171,,, +4, 4, 3, 3, 2, 2, 1, 2, 128, 240, 240, 240, 171,,, +4, 4, 3, 3, 2, 2, 1, 2, 256, 240, 240, 240, 171,,, +4, 4, 3, 3, 2, 2, 1, 2, 384, 240, 240, 240, 171,,, +4, 4, 3, 3, 2, 2, 2, 1, 128, 240, 240, 240, 171,,, +4, 4, 3, 3, 2, 2, 2, 1, 256, 240, 240, 240, 171,,, +4, 4, 3, 3, 2, 2, 2, 1, 384, 240, 240, 240, 171,,, +4, 4, 3, 3, 2, 2, 2, 2, 128, 240, 240, 240, 171,,, +4, 4, 3, 3, 2, 2, 2, 2, 256, 240, 240, 240, 171,,, +4, 4, 3, 3, 2, 2, 2, 2, 384, 240, 240, 240, 171,,, +4, 4, 3, 3, 3, 3, 1, 1, 128, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 1, 1, 256, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 1, 1, 384, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 1, 2, 128, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 1, 2, 256, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 1, 2, 384, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 1, 3, 128, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 1, 3, 256, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 1, 3, 384, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 2, 1, 128, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 2, 1, 256, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 2, 1, 384, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 2, 2, 128, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 2, 2, 256, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 2, 2, 384, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 2, 3, 128, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 2, 3, 256, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 2, 3, 384, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 3, 1, 128, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 3, 1, 256, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 3, 1, 384, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 3, 2, 128, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 3, 2, 256, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 3, 2, 384, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 3, 3, 128, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 3, 3, 256, 240, 240, 240, 162,,, +4, 4, 3, 3, 3, 3, 3, 3, 384, 240, 240, 240, 162,,, +4, 4, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 4, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 4, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 4, 3, 4, 2, 2, 1, 1, 128, 240, 240, 240, 227,,, +4, 4, 3, 4, 2, 2, 1, 1, 256, 240, 240, 240, 227,,, +4, 4, 3, 4, 2, 2, 1, 1, 384, 240, 240, 240, 227,,, +4, 4, 3, 4, 2, 2, 1, 2, 128, 240, 240, 240, 227,,, +4, 4, 3, 4, 2, 2, 1, 2, 256, 240, 240, 240, 227,,, +4, 4, 3, 4, 2, 2, 1, 2, 384, 240, 240, 240, 227,,, +4, 4, 3, 4, 2, 2, 2, 1, 128, 240, 240, 240, 227,,, +4, 4, 3, 4, 2, 2, 2, 1, 256, 240, 240, 240, 227,,, +4, 4, 3, 4, 2, 2, 2, 1, 384, 240, 240, 240, 227,,, +4, 4, 3, 4, 2, 2, 2, 2, 128, 240, 240, 240, 227,,, +4, 4, 3, 4, 2, 2, 2, 2, 256, 240, 240, 240, 227,,, +4, 4, 3, 4, 2, 2, 2, 2, 384, 240, 240, 240, 227,,, +4, 4, 3, 4, 3, 3, 1, 1, 128, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 1, 1, 256, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 1, 1, 384, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 1, 2, 128, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 1, 2, 256, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 1, 2, 384, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 1, 3, 128, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 1, 3, 256, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 1, 3, 384, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 2, 1, 128, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 2, 1, 256, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 2, 1, 384, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 2, 2, 128, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 2, 2, 256, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 2, 2, 384, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 2, 3, 128, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 2, 3, 256, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 2, 3, 384, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 3, 1, 128, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 3, 1, 256, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 3, 1, 384, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 3, 2, 128, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 3, 2, 256, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 3, 2, 384, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 3, 3, 128, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 3, 3, 256, 240, 240, 240, 215,,, +4, 4, 3, 4, 3, 3, 3, 3, 384, 240, 240, 240, 215,,, +4, 4, 4, 1, 1, 1, 1, 1, 128, 99, 99, 99, 67,,, +4, 4, 4, 1, 1, 1, 1, 1, 256, 99, 99, 99, 67,,, +4, 4, 4, 1, 1, 1, 1, 1, 384, 99, 99, 99, 67,,, +4, 4, 4, 1, 2, 2, 1, 1, 128, 99, 99, 87, 59,,, +4, 4, 4, 1, 2, 2, 1, 1, 256, 99, 99, 87, 59,,, +4, 4, 4, 1, 2, 2, 1, 1, 384, 99, 99, 87, 59,,, +4, 4, 4, 1, 2, 2, 1, 2, 128, 99, 99, 87, 59,,, +4, 4, 4, 1, 2, 2, 1, 2, 256, 99, 99, 87, 59,,, +4, 4, 4, 1, 2, 2, 1, 2, 384, 99, 99, 87, 59,,, +4, 4, 4, 1, 2, 2, 2, 1, 128, 99, 99, 87, 59,,, +4, 4, 4, 1, 2, 2, 2, 1, 256, 99, 99, 87, 59,,, +4, 4, 4, 1, 2, 2, 2, 1, 384, 99, 99, 87, 59,,, +4, 4, 4, 1, 2, 2, 2, 2, 128, 99, 99, 87, 59,,, +4, 4, 4, 1, 2, 2, 2, 2, 256, 99, 99, 87, 59,,, +4, 4, 4, 1, 2, 2, 2, 2, 384, 99, 99, 87, 59,,, +4, 4, 4, 1, 3, 3, 1, 1, 128, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 1, 1, 256, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 1, 1, 384, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 1, 2, 128, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 1, 2, 256, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 1, 2, 384, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 1, 3, 128, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 1, 3, 256, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 1, 3, 384, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 2, 1, 128, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 2, 1, 256, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 2, 1, 384, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 2, 2, 128, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 2, 2, 256, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 2, 2, 384, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 2, 3, 128, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 2, 3, 256, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 2, 3, 384, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 3, 1, 128, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 3, 1, 256, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 3, 1, 384, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 3, 2, 128, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 3, 2, 256, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 3, 2, 384, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 3, 3, 128, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 3, 3, 256, 99, 96, 83, 56,,, +4, 4, 4, 1, 3, 3, 3, 3, 384, 99, 96, 83, 56,,, +4, 4, 4, 2, 1, 1, 1, 1, 128, 195, 195, 195, 131,,, +4, 4, 4, 2, 1, 1, 1, 1, 256, 195, 195, 195, 131,,, +4, 4, 4, 2, 1, 1, 1, 1, 384, 195, 195, 195, 131,,, +4, 4, 4, 2, 2, 2, 1, 1, 128, 195, 195, 171, 115,,, +4, 4, 4, 2, 2, 2, 1, 1, 256, 195, 195, 171, 115,,, +4, 4, 4, 2, 2, 2, 1, 1, 384, 195, 195, 171, 115,,, +4, 4, 4, 2, 2, 2, 1, 2, 128, 195, 195, 171, 115,,, +4, 4, 4, 2, 2, 2, 1, 2, 256, 195, 195, 171, 115,,, +4, 4, 4, 2, 2, 2, 1, 2, 384, 195, 195, 171, 115,,, +4, 4, 4, 2, 2, 2, 2, 1, 128, 195, 195, 171, 115,,, +4, 4, 4, 2, 2, 2, 2, 1, 256, 195, 195, 171, 115,,, +4, 4, 4, 2, 2, 2, 2, 1, 384, 195, 195, 171, 115,,, +4, 4, 4, 2, 2, 2, 2, 2, 128, 195, 195, 171, 115,,, +4, 4, 4, 2, 2, 2, 2, 2, 256, 195, 195, 171, 115,,, +4, 4, 4, 2, 2, 2, 2, 2, 384, 195, 195, 171, 115,,, +4, 4, 4, 2, 3, 3, 1, 1, 128, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 1, 1, 256, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 1, 1, 384, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 1, 2, 128, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 1, 2, 256, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 1, 2, 384, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 1, 3, 128, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 1, 3, 256, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 1, 3, 384, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 2, 1, 128, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 2, 1, 256, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 2, 1, 384, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 2, 2, 128, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 2, 2, 256, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 2, 2, 384, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 2, 3, 128, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 2, 3, 256, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 2, 3, 384, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 3, 1, 128, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 3, 1, 256, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 3, 1, 384, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 3, 2, 128, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 3, 2, 256, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 3, 2, 384, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 3, 3, 128, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 3, 3, 256, 195, 189, 163, 109,,, +4, 4, 4, 2, 3, 3, 3, 3, 384, 195, 189, 163, 109,,, +4, 4, 4, 3, 1, 1, 1, 1, 128, 240, 240, 240, 195,,, +4, 4, 4, 3, 1, 1, 1, 1, 256, 240, 240, 240, 195,,, +4, 4, 4, 3, 1, 1, 1, 1, 384, 240, 240, 240, 195,,, +4, 4, 4, 3, 2, 2, 1, 1, 128, 240, 240, 240, 171,,, +4, 4, 4, 3, 2, 2, 1, 1, 256, 240, 240, 240, 171,,, +4, 4, 4, 3, 2, 2, 1, 1, 384, 240, 240, 240, 171,,, +4, 4, 4, 3, 2, 2, 1, 2, 128, 240, 240, 240, 171,,, +4, 4, 4, 3, 2, 2, 1, 2, 256, 240, 240, 240, 171,,, +4, 4, 4, 3, 2, 2, 1, 2, 384, 240, 240, 240, 171,,, +4, 4, 4, 3, 2, 2, 2, 1, 128, 240, 240, 240, 171,,, +4, 4, 4, 3, 2, 2, 2, 1, 256, 240, 240, 240, 171,,, +4, 4, 4, 3, 2, 2, 2, 1, 384, 240, 240, 240, 171,,, +4, 4, 4, 3, 2, 2, 2, 2, 128, 240, 240, 240, 171,,, +4, 4, 4, 3, 2, 2, 2, 2, 256, 240, 240, 240, 171,,, +4, 4, 4, 3, 2, 2, 2, 2, 384, 240, 240, 240, 171,,, +4, 4, 4, 3, 3, 3, 1, 1, 128, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 1, 1, 256, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 1, 1, 384, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 1, 2, 128, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 1, 2, 256, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 1, 2, 384, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 1, 3, 128, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 1, 3, 256, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 1, 3, 384, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 2, 1, 128, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 2, 1, 256, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 2, 1, 384, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 2, 2, 128, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 2, 2, 256, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 2, 2, 384, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 2, 3, 128, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 2, 3, 256, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 2, 3, 384, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 3, 1, 128, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 3, 1, 256, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 3, 1, 384, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 3, 2, 128, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 3, 2, 256, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 3, 2, 384, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 3, 3, 128, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 3, 3, 256, 240, 240, 240, 162,,, +4, 4, 4, 3, 3, 3, 3, 3, 384, 240, 240, 240, 162,,, +4, 4, 4, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 4, 4, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 4, 4, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 4, 4, 4, 2, 2, 1, 1, 128, 240, 240, 240, 227,,, +4, 4, 4, 4, 2, 2, 1, 1, 256, 240, 240, 240, 227,,, +4, 4, 4, 4, 2, 2, 1, 1, 384, 240, 240, 240, 227,,, +4, 4, 4, 4, 2, 2, 1, 2, 128, 240, 240, 240, 227,,, +4, 4, 4, 4, 2, 2, 1, 2, 256, 240, 240, 240, 227,,, +4, 4, 4, 4, 2, 2, 1, 2, 384, 240, 240, 240, 227,,, +4, 4, 4, 4, 2, 2, 2, 1, 128, 240, 240, 240, 227,,, +4, 4, 4, 4, 2, 2, 2, 1, 256, 240, 240, 240, 227,,, +4, 4, 4, 4, 2, 2, 2, 1, 384, 240, 240, 240, 227,,, +4, 4, 4, 4, 2, 2, 2, 2, 128, 240, 240, 240, 227,,, +4, 4, 4, 4, 2, 2, 2, 2, 256, 240, 240, 240, 227,,, +4, 4, 4, 4, 2, 2, 2, 2, 384, 240, 240, 240, 227,,, +4, 4, 4, 4, 3, 3, 1, 1, 128, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 1, 1, 256, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 1, 1, 384, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 1, 2, 128, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 1, 2, 256, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 1, 2, 384, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 1, 3, 128, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 1, 3, 256, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 1, 3, 384, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 2, 1, 128, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 2, 1, 256, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 2, 1, 384, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 2, 2, 128, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 2, 2, 256, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 2, 2, 384, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 2, 3, 128, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 2, 3, 256, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 2, 3, 384, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 3, 1, 128, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 3, 1, 256, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 3, 1, 384, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 3, 2, 128, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 3, 2, 256, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 3, 2, 384, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 3, 3, 128, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 3, 3, 256, 240, 240, 240, 215,,, +4, 4, 4, 4, 3, 3, 3, 3, 384, 240, 240, 240, 215,,, +4, 5, 1, 1, 1, 1, 1, 1, 128, 100, 100, 68, 36,,, +4, 5, 1, 1, 1, 1, 1, 1, 256, 100, 100, 68, 36,,, +4, 5, 1, 1, 1, 1, 1, 1, 384, 100, 100, 68, 36,,, +4, 5, 1, 1, 2, 2, 1, 1, 128, 100, 99, 68, 36,,, +4, 5, 1, 1, 2, 2, 1, 1, 256, 100, 99, 68, 36,,, +4, 5, 1, 1, 2, 2, 1, 1, 384, 100, 99, 68, 36,,, +4, 5, 1, 1, 2, 2, 1, 2, 128, 100, 99, 68, 36,,, +4, 5, 1, 1, 2, 2, 1, 2, 256, 100, 99, 68, 36,,, +4, 5, 1, 1, 2, 2, 1, 2, 384, 100, 99, 68, 36,,, +4, 5, 1, 1, 2, 2, 2, 1, 128, 100, 99, 68, 36,,, +4, 5, 1, 1, 2, 2, 2, 1, 256, 100, 99, 68, 36,,, +4, 5, 1, 1, 2, 2, 2, 1, 384, 100, 99, 68, 36,,, +4, 5, 1, 1, 2, 2, 2, 2, 128, 100, 99, 68, 36,,, +4, 5, 1, 1, 2, 2, 2, 2, 256, 100, 99, 68, 36,,, +4, 5, 1, 1, 2, 2, 2, 2, 384, 100, 99, 68, 36,,, +4, 5, 1, 1, 3, 3, 1, 1, 128, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 1, 1, 256, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 1, 1, 384, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 1, 2, 128, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 1, 2, 256, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 1, 2, 384, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 1, 3, 128, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 1, 3, 256, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 1, 3, 384, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 2, 1, 128, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 2, 1, 256, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 2, 1, 384, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 2, 2, 128, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 2, 2, 256, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 2, 2, 384, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 2, 3, 128, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 2, 3, 256, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 2, 3, 384, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 3, 1, 128, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 3, 1, 256, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 3, 1, 384, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 3, 2, 128, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 3, 2, 256, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 3, 2, 384, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 3, 3, 128, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 3, 3, 256, 100, 94, 68, 36,,, +4, 5, 1, 1, 3, 3, 3, 3, 384, 100, 94, 68, 36,,, +4, 5, 1, 2, 1, 1, 1, 1, 128, 196, 196, 132, 68,,, +4, 5, 1, 2, 1, 1, 1, 1, 256, 196, 196, 132, 68,,, +4, 5, 1, 2, 1, 1, 1, 1, 384, 196, 196, 132, 68,,, +4, 5, 1, 2, 2, 2, 1, 1, 128, 196, 194, 132, 68,,, +4, 5, 1, 2, 2, 2, 1, 1, 256, 196, 194, 132, 68,,, +4, 5, 1, 2, 2, 2, 1, 1, 384, 196, 194, 132, 68,,, +4, 5, 1, 2, 2, 2, 1, 2, 128, 196, 194, 132, 68,,, +4, 5, 1, 2, 2, 2, 1, 2, 256, 196, 194, 132, 68,,, +4, 5, 1, 2, 2, 2, 1, 2, 384, 196, 194, 132, 68,,, +4, 5, 1, 2, 2, 2, 2, 1, 128, 196, 194, 132, 68,,, +4, 5, 1, 2, 2, 2, 2, 1, 256, 196, 194, 132, 68,,, +4, 5, 1, 2, 2, 2, 2, 1, 384, 196, 194, 132, 68,,, +4, 5, 1, 2, 2, 2, 2, 2, 128, 196, 194, 132, 68,,, +4, 5, 1, 2, 2, 2, 2, 2, 256, 196, 194, 132, 68,,, +4, 5, 1, 2, 2, 2, 2, 2, 384, 196, 194, 132, 68,,, +4, 5, 1, 2, 3, 3, 1, 1, 128, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 1, 1, 256, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 1, 1, 384, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 1, 2, 128, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 1, 2, 256, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 1, 2, 384, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 1, 3, 128, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 1, 3, 256, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 1, 3, 384, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 2, 1, 128, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 2, 1, 256, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 2, 1, 384, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 2, 2, 128, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 2, 2, 256, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 2, 2, 384, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 2, 3, 128, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 2, 3, 256, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 2, 3, 384, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 3, 1, 128, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 3, 1, 256, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 3, 1, 384, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 3, 2, 128, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 3, 2, 256, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 3, 2, 384, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 3, 3, 128, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 3, 3, 256, 196, 184, 132, 68,,, +4, 5, 1, 2, 3, 3, 3, 3, 384, 196, 184, 132, 68,,, +4, 5, 1, 3, 1, 1, 1, 1, 128, 240, 240, 196, 100,,, +4, 5, 1, 3, 1, 1, 1, 1, 256, 240, 240, 196, 100,,, +4, 5, 1, 3, 1, 1, 1, 1, 384, 240, 240, 196, 100,,, +4, 5, 1, 3, 2, 2, 1, 1, 128, 240, 240, 196, 100,,, +4, 5, 1, 3, 2, 2, 1, 1, 256, 240, 240, 196, 100,,, +4, 5, 1, 3, 2, 2, 1, 1, 384, 240, 240, 196, 100,,, +4, 5, 1, 3, 2, 2, 1, 2, 128, 240, 240, 196, 100,,, +4, 5, 1, 3, 2, 2, 1, 2, 256, 240, 240, 196, 100,,, +4, 5, 1, 3, 2, 2, 1, 2, 384, 240, 240, 196, 100,,, +4, 5, 1, 3, 2, 2, 2, 1, 128, 240, 240, 196, 100,,, +4, 5, 1, 3, 2, 2, 2, 1, 256, 240, 240, 196, 100,,, +4, 5, 1, 3, 2, 2, 2, 1, 384, 240, 240, 196, 100,,, +4, 5, 1, 3, 2, 2, 2, 2, 128, 240, 240, 196, 100,,, +4, 5, 1, 3, 2, 2, 2, 2, 256, 240, 240, 196, 100,,, +4, 5, 1, 3, 2, 2, 2, 2, 384, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 1, 1, 128, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 1, 1, 256, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 1, 1, 384, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 1, 2, 128, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 1, 2, 256, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 1, 2, 384, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 1, 3, 128, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 1, 3, 256, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 1, 3, 384, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 2, 1, 128, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 2, 1, 256, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 2, 1, 384, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 2, 2, 128, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 2, 2, 256, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 2, 2, 384, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 2, 3, 128, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 2, 3, 256, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 2, 3, 384, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 3, 1, 128, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 3, 1, 256, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 3, 1, 384, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 3, 2, 128, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 3, 2, 256, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 3, 2, 384, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 3, 3, 128, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 3, 3, 256, 240, 240, 196, 100,,, +4, 5, 1, 3, 3, 3, 3, 3, 384, 240, 240, 196, 100,,, +4, 5, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 132,,, +4, 5, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 132,,, +4, 5, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 132,,, +4, 5, 1, 4, 2, 2, 1, 1, 128, 240, 240, 240, 132,,, +4, 5, 1, 4, 2, 2, 1, 1, 256, 240, 240, 240, 132,,, +4, 5, 1, 4, 2, 2, 1, 1, 384, 240, 240, 240, 132,,, +4, 5, 1, 4, 2, 2, 1, 2, 128, 240, 240, 240, 132,,, +4, 5, 1, 4, 2, 2, 1, 2, 256, 240, 240, 240, 132,,, +4, 5, 1, 4, 2, 2, 1, 2, 384, 240, 240, 240, 132,,, +4, 5, 1, 4, 2, 2, 2, 1, 128, 240, 240, 240, 132,,, +4, 5, 1, 4, 2, 2, 2, 1, 256, 240, 240, 240, 132,,, +4, 5, 1, 4, 2, 2, 2, 1, 384, 240, 240, 240, 132,,, +4, 5, 1, 4, 2, 2, 2, 2, 128, 240, 240, 240, 132,,, +4, 5, 1, 4, 2, 2, 2, 2, 256, 240, 240, 240, 132,,, +4, 5, 1, 4, 2, 2, 2, 2, 384, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 1, 1, 128, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 1, 1, 256, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 1, 1, 384, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 1, 2, 128, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 1, 2, 256, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 1, 2, 384, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 1, 3, 128, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 1, 3, 256, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 1, 3, 384, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 2, 1, 128, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 2, 1, 256, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 2, 1, 384, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 2, 2, 128, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 2, 2, 256, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 2, 2, 384, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 2, 3, 128, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 2, 3, 256, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 2, 3, 384, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 3, 1, 128, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 3, 1, 256, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 3, 1, 384, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 3, 2, 128, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 3, 2, 256, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 3, 2, 384, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 3, 3, 128, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 3, 3, 256, 240, 240, 240, 132,,, +4, 5, 1, 4, 3, 3, 3, 3, 384, 240, 240, 240, 132,,, +4, 5, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 164,,, +4, 5, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 164,,, +4, 5, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 164,,, +4, 5, 1, 5, 2, 2, 1, 1, 128, 240, 240, 240, 164,,, +4, 5, 1, 5, 2, 2, 1, 1, 256, 240, 240, 240, 164,,, +4, 5, 1, 5, 2, 2, 1, 1, 384, 240, 240, 240, 164,,, +4, 5, 1, 5, 2, 2, 1, 2, 128, 240, 240, 240, 164,,, +4, 5, 1, 5, 2, 2, 1, 2, 256, 240, 240, 240, 164,,, +4, 5, 1, 5, 2, 2, 1, 2, 384, 240, 240, 240, 164,,, +4, 5, 1, 5, 2, 2, 2, 1, 128, 240, 240, 240, 164,,, +4, 5, 1, 5, 2, 2, 2, 1, 256, 240, 240, 240, 164,,, +4, 5, 1, 5, 2, 2, 2, 1, 384, 240, 240, 240, 164,,, +4, 5, 1, 5, 2, 2, 2, 2, 128, 240, 240, 240, 164,,, +4, 5, 1, 5, 2, 2, 2, 2, 256, 240, 240, 240, 164,,, +4, 5, 1, 5, 2, 2, 2, 2, 384, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 1, 1, 128, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 1, 1, 256, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 1, 1, 384, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 1, 2, 128, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 1, 2, 256, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 1, 2, 384, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 1, 3, 128, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 1, 3, 256, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 1, 3, 384, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 2, 1, 128, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 2, 1, 256, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 2, 1, 384, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 2, 2, 128, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 2, 2, 256, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 2, 2, 384, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 2, 3, 128, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 2, 3, 256, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 2, 3, 384, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 3, 1, 128, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 3, 1, 256, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 3, 1, 384, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 3, 2, 128, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 3, 2, 256, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 3, 2, 384, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 3, 3, 128, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 3, 3, 256, 240, 240, 240, 164,,, +4, 5, 1, 5, 3, 3, 3, 3, 384, 240, 240, 240, 164,,, +4, 5, 2, 1, 1, 1, 1, 1, 128, 100, 100, 68, 36,,, +4, 5, 2, 1, 1, 1, 1, 1, 256, 100, 100, 68, 36,,, +4, 5, 2, 1, 1, 1, 1, 1, 384, 100, 100, 68, 36,,, +4, 5, 2, 1, 2, 2, 1, 1, 128, 100, 99, 68, 36,,, +4, 5, 2, 1, 2, 2, 1, 1, 256, 100, 99, 68, 36,,, +4, 5, 2, 1, 2, 2, 1, 1, 384, 100, 99, 68, 36,,, +4, 5, 2, 1, 2, 2, 1, 2, 128, 100, 99, 68, 36,,, +4, 5, 2, 1, 2, 2, 1, 2, 256, 100, 99, 68, 36,,, +4, 5, 2, 1, 2, 2, 1, 2, 384, 100, 99, 68, 36,,, +4, 5, 2, 1, 2, 2, 2, 1, 128, 100, 99, 68, 36,,, +4, 5, 2, 1, 2, 2, 2, 1, 256, 100, 99, 68, 36,,, +4, 5, 2, 1, 2, 2, 2, 1, 384, 100, 99, 68, 36,,, +4, 5, 2, 1, 2, 2, 2, 2, 128, 100, 99, 68, 36,,, +4, 5, 2, 1, 2, 2, 2, 2, 256, 100, 99, 68, 36,,, +4, 5, 2, 1, 2, 2, 2, 2, 384, 100, 99, 68, 36,,, +4, 5, 2, 1, 3, 3, 1, 1, 128, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 1, 1, 256, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 1, 1, 384, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 1, 2, 128, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 1, 2, 256, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 1, 2, 384, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 1, 3, 128, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 1, 3, 256, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 1, 3, 384, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 2, 1, 128, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 2, 1, 256, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 2, 1, 384, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 2, 2, 128, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 2, 2, 256, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 2, 2, 384, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 2, 3, 128, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 2, 3, 256, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 2, 3, 384, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 3, 1, 128, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 3, 1, 256, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 3, 1, 384, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 3, 2, 128, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 3, 2, 256, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 3, 2, 384, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 3, 3, 128, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 3, 3, 256, 100, 94, 68, 36,,, +4, 5, 2, 1, 3, 3, 3, 3, 384, 100, 94, 68, 36,,, +4, 5, 2, 2, 1, 1, 1, 1, 128, 196, 196, 132, 68,,, +4, 5, 2, 2, 1, 1, 1, 1, 256, 196, 196, 132, 68,,, +4, 5, 2, 2, 1, 1, 1, 1, 384, 196, 196, 132, 68,,, +4, 5, 2, 2, 2, 2, 1, 1, 128, 196, 194, 132, 68,,, +4, 5, 2, 2, 2, 2, 1, 1, 256, 196, 194, 132, 68,,, +4, 5, 2, 2, 2, 2, 1, 1, 384, 196, 194, 132, 68,,, +4, 5, 2, 2, 2, 2, 1, 2, 128, 196, 194, 132, 68,,, +4, 5, 2, 2, 2, 2, 1, 2, 256, 196, 194, 132, 68,,, +4, 5, 2, 2, 2, 2, 1, 2, 384, 196, 194, 132, 68,,, +4, 5, 2, 2, 2, 2, 2, 1, 128, 196, 194, 132, 68,,, +4, 5, 2, 2, 2, 2, 2, 1, 256, 196, 194, 132, 68,,, +4, 5, 2, 2, 2, 2, 2, 1, 384, 196, 194, 132, 68,,, +4, 5, 2, 2, 2, 2, 2, 2, 128, 196, 194, 132, 68,,, +4, 5, 2, 2, 2, 2, 2, 2, 256, 196, 194, 132, 68,,, +4, 5, 2, 2, 2, 2, 2, 2, 384, 196, 194, 132, 68,,, +4, 5, 2, 2, 3, 3, 1, 1, 128, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 1, 1, 256, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 1, 1, 384, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 1, 2, 128, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 1, 2, 256, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 1, 2, 384, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 1, 3, 128, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 1, 3, 256, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 1, 3, 384, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 2, 1, 128, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 2, 1, 256, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 2, 1, 384, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 2, 2, 128, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 2, 2, 256, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 2, 2, 384, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 2, 3, 128, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 2, 3, 256, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 2, 3, 384, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 3, 1, 128, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 3, 1, 256, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 3, 1, 384, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 3, 2, 128, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 3, 2, 256, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 3, 2, 384, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 3, 3, 128, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 3, 3, 256, 196, 184, 132, 68,,, +4, 5, 2, 2, 3, 3, 3, 3, 384, 196, 184, 132, 68,,, +4, 5, 2, 3, 1, 1, 1, 1, 128, 240, 240, 196, 100,,, +4, 5, 2, 3, 1, 1, 1, 1, 256, 240, 240, 196, 100,,, +4, 5, 2, 3, 1, 1, 1, 1, 384, 240, 240, 196, 100,,, +4, 5, 2, 3, 2, 2, 1, 1, 128, 240, 240, 196, 100,,, +4, 5, 2, 3, 2, 2, 1, 1, 256, 240, 240, 196, 100,,, +4, 5, 2, 3, 2, 2, 1, 1, 384, 240, 240, 196, 100,,, +4, 5, 2, 3, 2, 2, 1, 2, 128, 240, 240, 196, 100,,, +4, 5, 2, 3, 2, 2, 1, 2, 256, 240, 240, 196, 100,,, +4, 5, 2, 3, 2, 2, 1, 2, 384, 240, 240, 196, 100,,, +4, 5, 2, 3, 2, 2, 2, 1, 128, 240, 240, 196, 100,,, +4, 5, 2, 3, 2, 2, 2, 1, 256, 240, 240, 196, 100,,, +4, 5, 2, 3, 2, 2, 2, 1, 384, 240, 240, 196, 100,,, +4, 5, 2, 3, 2, 2, 2, 2, 128, 240, 240, 196, 100,,, +4, 5, 2, 3, 2, 2, 2, 2, 256, 240, 240, 196, 100,,, +4, 5, 2, 3, 2, 2, 2, 2, 384, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 1, 1, 128, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 1, 1, 256, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 1, 1, 384, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 1, 2, 128, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 1, 2, 256, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 1, 2, 384, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 1, 3, 128, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 1, 3, 256, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 1, 3, 384, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 2, 1, 128, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 2, 1, 256, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 2, 1, 384, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 2, 2, 128, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 2, 2, 256, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 2, 2, 384, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 2, 3, 128, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 2, 3, 256, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 2, 3, 384, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 3, 1, 128, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 3, 1, 256, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 3, 1, 384, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 3, 2, 128, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 3, 2, 256, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 3, 2, 384, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 3, 3, 128, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 3, 3, 256, 240, 240, 196, 100,,, +4, 5, 2, 3, 3, 3, 3, 3, 384, 240, 240, 196, 100,,, +4, 5, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 132,,, +4, 5, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 132,,, +4, 5, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 132,,, +4, 5, 2, 4, 2, 2, 1, 1, 128, 240, 240, 240, 132,,, +4, 5, 2, 4, 2, 2, 1, 1, 256, 240, 240, 240, 132,,, +4, 5, 2, 4, 2, 2, 1, 1, 384, 240, 240, 240, 132,,, +4, 5, 2, 4, 2, 2, 1, 2, 128, 240, 240, 240, 132,,, +4, 5, 2, 4, 2, 2, 1, 2, 256, 240, 240, 240, 132,,, +4, 5, 2, 4, 2, 2, 1, 2, 384, 240, 240, 240, 132,,, +4, 5, 2, 4, 2, 2, 2, 1, 128, 240, 240, 240, 132,,, +4, 5, 2, 4, 2, 2, 2, 1, 256, 240, 240, 240, 132,,, +4, 5, 2, 4, 2, 2, 2, 1, 384, 240, 240, 240, 132,,, +4, 5, 2, 4, 2, 2, 2, 2, 128, 240, 240, 240, 132,,, +4, 5, 2, 4, 2, 2, 2, 2, 256, 240, 240, 240, 132,,, +4, 5, 2, 4, 2, 2, 2, 2, 384, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 1, 1, 128, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 1, 1, 256, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 1, 1, 384, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 1, 2, 128, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 1, 2, 256, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 1, 2, 384, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 1, 3, 128, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 1, 3, 256, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 1, 3, 384, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 2, 1, 128, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 2, 1, 256, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 2, 1, 384, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 2, 2, 128, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 2, 2, 256, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 2, 2, 384, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 2, 3, 128, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 2, 3, 256, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 2, 3, 384, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 3, 1, 128, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 3, 1, 256, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 3, 1, 384, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 3, 2, 128, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 3, 2, 256, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 3, 2, 384, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 3, 3, 128, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 3, 3, 256, 240, 240, 240, 132,,, +4, 5, 2, 4, 3, 3, 3, 3, 384, 240, 240, 240, 132,,, +4, 5, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240, 164,,, +4, 5, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240, 164,,, +4, 5, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240, 164,,, +4, 5, 2, 5, 2, 2, 1, 1, 128, 240, 240, 240, 164,,, +4, 5, 2, 5, 2, 2, 1, 1, 256, 240, 240, 240, 164,,, +4, 5, 2, 5, 2, 2, 1, 1, 384, 240, 240, 240, 164,,, +4, 5, 2, 5, 2, 2, 1, 2, 128, 240, 240, 240, 164,,, +4, 5, 2, 5, 2, 2, 1, 2, 256, 240, 240, 240, 164,,, +4, 5, 2, 5, 2, 2, 1, 2, 384, 240, 240, 240, 164,,, +4, 5, 2, 5, 2, 2, 2, 1, 128, 240, 240, 240, 164,,, +4, 5, 2, 5, 2, 2, 2, 1, 256, 240, 240, 240, 164,,, +4, 5, 2, 5, 2, 2, 2, 1, 384, 240, 240, 240, 164,,, +4, 5, 2, 5, 2, 2, 2, 2, 128, 240, 240, 240, 164,,, +4, 5, 2, 5, 2, 2, 2, 2, 256, 240, 240, 240, 164,,, +4, 5, 2, 5, 2, 2, 2, 2, 384, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 1, 1, 128, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 1, 1, 256, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 1, 1, 384, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 1, 2, 128, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 1, 2, 256, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 1, 2, 384, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 1, 3, 128, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 1, 3, 256, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 1, 3, 384, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 2, 1, 128, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 2, 1, 256, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 2, 1, 384, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 2, 2, 128, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 2, 2, 256, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 2, 2, 384, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 2, 3, 128, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 2, 3, 256, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 2, 3, 384, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 3, 1, 128, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 3, 1, 256, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 3, 1, 384, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 3, 2, 128, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 3, 2, 256, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 3, 2, 384, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 3, 3, 128, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 3, 3, 256, 240, 240, 240, 164,,, +4, 5, 2, 5, 3, 3, 3, 3, 384, 240, 240, 240, 164,,, +4, 5, 3, 1, 1, 1, 1, 1, 128, 100, 100, 68, 36,,, +4, 5, 3, 1, 1, 1, 1, 1, 256, 100, 100, 68, 36,,, +4, 5, 3, 1, 1, 1, 1, 1, 384, 100, 100, 68, 36,,, +4, 5, 3, 1, 2, 2, 1, 1, 128, 100, 99, 68, 36,,, +4, 5, 3, 1, 2, 2, 1, 1, 256, 100, 99, 68, 36,,, +4, 5, 3, 1, 2, 2, 1, 1, 384, 100, 99, 68, 36,,, +4, 5, 3, 1, 2, 2, 1, 2, 128, 100, 99, 68, 36,,, +4, 5, 3, 1, 2, 2, 1, 2, 256, 100, 99, 68, 36,,, +4, 5, 3, 1, 2, 2, 1, 2, 384, 100, 99, 68, 36,,, +4, 5, 3, 1, 2, 2, 2, 1, 128, 100, 99, 68, 36,,, +4, 5, 3, 1, 2, 2, 2, 1, 256, 100, 99, 68, 36,,, +4, 5, 3, 1, 2, 2, 2, 1, 384, 100, 99, 68, 36,,, +4, 5, 3, 1, 2, 2, 2, 2, 128, 100, 99, 68, 36,,, +4, 5, 3, 1, 2, 2, 2, 2, 256, 100, 99, 68, 36,,, +4, 5, 3, 1, 2, 2, 2, 2, 384, 100, 99, 68, 36,,, +4, 5, 3, 1, 3, 3, 1, 1, 128, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 1, 1, 256, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 1, 1, 384, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 1, 2, 128, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 1, 2, 256, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 1, 2, 384, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 1, 3, 128, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 1, 3, 256, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 1, 3, 384, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 2, 1, 128, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 2, 1, 256, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 2, 1, 384, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 2, 2, 128, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 2, 2, 256, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 2, 2, 384, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 2, 3, 128, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 2, 3, 256, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 2, 3, 384, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 3, 1, 128, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 3, 1, 256, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 3, 1, 384, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 3, 2, 128, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 3, 2, 256, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 3, 2, 384, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 3, 3, 128, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 3, 3, 256, 100, 94, 68, 36,,, +4, 5, 3, 1, 3, 3, 3, 3, 384, 100, 94, 68, 36,,, +4, 5, 3, 2, 1, 1, 1, 1, 128, 196, 196, 132, 68,,, +4, 5, 3, 2, 1, 1, 1, 1, 256, 196, 196, 132, 68,,, +4, 5, 3, 2, 1, 1, 1, 1, 384, 196, 196, 132, 68,,, +4, 5, 3, 2, 2, 2, 1, 1, 128, 196, 194, 132, 68,,, +4, 5, 3, 2, 2, 2, 1, 1, 256, 196, 194, 132, 68,,, +4, 5, 3, 2, 2, 2, 1, 1, 384, 196, 194, 132, 68,,, +4, 5, 3, 2, 2, 2, 1, 2, 128, 196, 194, 132, 68,,, +4, 5, 3, 2, 2, 2, 1, 2, 256, 196, 194, 132, 68,,, +4, 5, 3, 2, 2, 2, 1, 2, 384, 196, 194, 132, 68,,, +4, 5, 3, 2, 2, 2, 2, 1, 128, 196, 194, 132, 68,,, +4, 5, 3, 2, 2, 2, 2, 1, 256, 196, 194, 132, 68,,, +4, 5, 3, 2, 2, 2, 2, 1, 384, 196, 194, 132, 68,,, +4, 5, 3, 2, 2, 2, 2, 2, 128, 196, 194, 132, 68,,, +4, 5, 3, 2, 2, 2, 2, 2, 256, 196, 194, 132, 68,,, +4, 5, 3, 2, 2, 2, 2, 2, 384, 196, 194, 132, 68,,, +4, 5, 3, 2, 3, 3, 1, 1, 128, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 1, 1, 256, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 1, 1, 384, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 1, 2, 128, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 1, 2, 256, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 1, 2, 384, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 1, 3, 128, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 1, 3, 256, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 1, 3, 384, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 2, 1, 128, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 2, 1, 256, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 2, 1, 384, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 2, 2, 128, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 2, 2, 256, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 2, 2, 384, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 2, 3, 128, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 2, 3, 256, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 2, 3, 384, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 3, 1, 128, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 3, 1, 256, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 3, 1, 384, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 3, 2, 128, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 3, 2, 256, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 3, 2, 384, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 3, 3, 128, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 3, 3, 256, 196, 184, 132, 68,,, +4, 5, 3, 2, 3, 3, 3, 3, 384, 196, 184, 132, 68,,, +4, 5, 3, 3, 1, 1, 1, 1, 128, 240, 240, 196, 100,,, +4, 5, 3, 3, 1, 1, 1, 1, 256, 240, 240, 196, 100,,, +4, 5, 3, 3, 1, 1, 1, 1, 384, 240, 240, 196, 100,,, +4, 5, 3, 3, 2, 2, 1, 1, 128, 240, 240, 196, 100,,, +4, 5, 3, 3, 2, 2, 1, 1, 256, 240, 240, 196, 100,,, +4, 5, 3, 3, 2, 2, 1, 1, 384, 240, 240, 196, 100,,, +4, 5, 3, 3, 2, 2, 1, 2, 128, 240, 240, 196, 100,,, +4, 5, 3, 3, 2, 2, 1, 2, 256, 240, 240, 196, 100,,, +4, 5, 3, 3, 2, 2, 1, 2, 384, 240, 240, 196, 100,,, +4, 5, 3, 3, 2, 2, 2, 1, 128, 240, 240, 196, 100,,, +4, 5, 3, 3, 2, 2, 2, 1, 256, 240, 240, 196, 100,,, +4, 5, 3, 3, 2, 2, 2, 1, 384, 240, 240, 196, 100,,, +4, 5, 3, 3, 2, 2, 2, 2, 128, 240, 240, 196, 100,,, +4, 5, 3, 3, 2, 2, 2, 2, 256, 240, 240, 196, 100,,, +4, 5, 3, 3, 2, 2, 2, 2, 384, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 1, 1, 128, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 1, 1, 256, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 1, 1, 384, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 1, 2, 128, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 1, 2, 256, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 1, 2, 384, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 1, 3, 128, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 1, 3, 256, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 1, 3, 384, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 2, 1, 128, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 2, 1, 256, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 2, 1, 384, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 2, 2, 128, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 2, 2, 256, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 2, 2, 384, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 2, 3, 128, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 2, 3, 256, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 2, 3, 384, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 3, 1, 128, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 3, 1, 256, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 3, 1, 384, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 3, 2, 128, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 3, 2, 256, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 3, 2, 384, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 3, 3, 128, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 3, 3, 256, 240, 240, 196, 100,,, +4, 5, 3, 3, 3, 3, 3, 3, 384, 240, 240, 196, 100,,, +4, 5, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240, 132,,, +4, 5, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240, 132,,, +4, 5, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240, 132,,, +4, 5, 3, 4, 2, 2, 1, 1, 128, 240, 240, 240, 132,,, +4, 5, 3, 4, 2, 2, 1, 1, 256, 240, 240, 240, 132,,, +4, 5, 3, 4, 2, 2, 1, 1, 384, 240, 240, 240, 132,,, +4, 5, 3, 4, 2, 2, 1, 2, 128, 240, 240, 240, 132,,, +4, 5, 3, 4, 2, 2, 1, 2, 256, 240, 240, 240, 132,,, +4, 5, 3, 4, 2, 2, 1, 2, 384, 240, 240, 240, 132,,, +4, 5, 3, 4, 2, 2, 2, 1, 128, 240, 240, 240, 132,,, +4, 5, 3, 4, 2, 2, 2, 1, 256, 240, 240, 240, 132,,, +4, 5, 3, 4, 2, 2, 2, 1, 384, 240, 240, 240, 132,,, +4, 5, 3, 4, 2, 2, 2, 2, 128, 240, 240, 240, 132,,, +4, 5, 3, 4, 2, 2, 2, 2, 256, 240, 240, 240, 132,,, +4, 5, 3, 4, 2, 2, 2, 2, 384, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 1, 1, 128, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 1, 1, 256, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 1, 1, 384, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 1, 2, 128, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 1, 2, 256, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 1, 2, 384, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 1, 3, 128, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 1, 3, 256, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 1, 3, 384, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 2, 1, 128, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 2, 1, 256, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 2, 1, 384, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 2, 2, 128, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 2, 2, 256, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 2, 2, 384, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 2, 3, 128, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 2, 3, 256, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 2, 3, 384, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 3, 1, 128, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 3, 1, 256, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 3, 1, 384, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 3, 2, 128, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 3, 2, 256, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 3, 2, 384, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 3, 3, 128, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 3, 3, 256, 240, 240, 240, 132,,, +4, 5, 3, 4, 3, 3, 3, 3, 384, 240, 240, 240, 132,,, +4, 5, 3, 5, 1, 1, 1, 1, 128, 240, 240, 240, 164,,, +4, 5, 3, 5, 1, 1, 1, 1, 256, 240, 240, 240, 164,,, +4, 5, 3, 5, 1, 1, 1, 1, 384, 240, 240, 240, 164,,, +4, 5, 3, 5, 2, 2, 1, 1, 128, 240, 240, 240, 164,,, +4, 5, 3, 5, 2, 2, 1, 1, 256, 240, 240, 240, 164,,, +4, 5, 3, 5, 2, 2, 1, 1, 384, 240, 240, 240, 164,,, +4, 5, 3, 5, 2, 2, 1, 2, 128, 240, 240, 240, 164,,, +4, 5, 3, 5, 2, 2, 1, 2, 256, 240, 240, 240, 164,,, +4, 5, 3, 5, 2, 2, 1, 2, 384, 240, 240, 240, 164,,, +4, 5, 3, 5, 2, 2, 2, 1, 128, 240, 240, 240, 164,,, +4, 5, 3, 5, 2, 2, 2, 1, 256, 240, 240, 240, 164,,, +4, 5, 3, 5, 2, 2, 2, 1, 384, 240, 240, 240, 164,,, +4, 5, 3, 5, 2, 2, 2, 2, 128, 240, 240, 240, 164,,, +4, 5, 3, 5, 2, 2, 2, 2, 256, 240, 240, 240, 164,,, +4, 5, 3, 5, 2, 2, 2, 2, 384, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 1, 1, 128, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 1, 1, 256, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 1, 1, 384, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 1, 2, 128, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 1, 2, 256, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 1, 2, 384, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 1, 3, 128, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 1, 3, 256, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 1, 3, 384, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 2, 1, 128, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 2, 1, 256, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 2, 1, 384, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 2, 2, 128, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 2, 2, 256, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 2, 2, 384, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 2, 3, 128, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 2, 3, 256, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 2, 3, 384, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 3, 1, 128, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 3, 1, 256, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 3, 1, 384, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 3, 2, 128, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 3, 2, 256, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 3, 2, 384, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 3, 3, 128, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 3, 3, 256, 240, 240, 240, 164,,, +4, 5, 3, 5, 3, 3, 3, 3, 384, 240, 240, 240, 164,,, +4, 5, 4, 1, 1, 1, 1, 1, 128, 100, 100, 68, 36,,, +4, 5, 4, 1, 1, 1, 1, 1, 256, 100, 100, 68, 36,,, +4, 5, 4, 1, 1, 1, 1, 1, 384, 100, 100, 68, 36,,, +4, 5, 4, 1, 2, 2, 1, 1, 128, 100, 99, 68, 36,,, +4, 5, 4, 1, 2, 2, 1, 1, 256, 100, 99, 68, 36,,, +4, 5, 4, 1, 2, 2, 1, 1, 384, 100, 99, 68, 36,,, +4, 5, 4, 1, 2, 2, 1, 2, 128, 100, 99, 68, 36,,, +4, 5, 4, 1, 2, 2, 1, 2, 256, 100, 99, 68, 36,,, +4, 5, 4, 1, 2, 2, 1, 2, 384, 100, 99, 68, 36,,, +4, 5, 4, 1, 2, 2, 2, 1, 128, 100, 99, 68, 36,,, +4, 5, 4, 1, 2, 2, 2, 1, 256, 100, 99, 68, 36,,, +4, 5, 4, 1, 2, 2, 2, 1, 384, 100, 99, 68, 36,,, +4, 5, 4, 1, 2, 2, 2, 2, 128, 100, 99, 68, 36,,, +4, 5, 4, 1, 2, 2, 2, 2, 256, 100, 99, 68, 36,,, +4, 5, 4, 1, 2, 2, 2, 2, 384, 100, 99, 68, 36,,, +4, 5, 4, 1, 3, 3, 1, 1, 128, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 1, 1, 256, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 1, 1, 384, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 1, 2, 128, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 1, 2, 256, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 1, 2, 384, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 1, 3, 128, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 1, 3, 256, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 1, 3, 384, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 2, 1, 128, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 2, 1, 256, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 2, 1, 384, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 2, 2, 128, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 2, 2, 256, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 2, 2, 384, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 2, 3, 128, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 2, 3, 256, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 2, 3, 384, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 3, 1, 128, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 3, 1, 256, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 3, 1, 384, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 3, 2, 128, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 3, 2, 256, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 3, 2, 384, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 3, 3, 128, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 3, 3, 256, 100, 94, 68, 36,,, +4, 5, 4, 1, 3, 3, 3, 3, 384, 100, 94, 68, 36,,, +4, 5, 4, 2, 1, 1, 1, 1, 128, 196, 196, 132, 68,,, +4, 5, 4, 2, 1, 1, 1, 1, 256, 196, 196, 132, 68,,, +4, 5, 4, 2, 1, 1, 1, 1, 384, 196, 196, 132, 68,,, +4, 5, 4, 2, 2, 2, 1, 1, 128, 196, 194, 132, 68,,, +4, 5, 4, 2, 2, 2, 1, 1, 256, 196, 194, 132, 68,,, +4, 5, 4, 2, 2, 2, 1, 1, 384, 196, 194, 132, 68,,, +4, 5, 4, 2, 2, 2, 1, 2, 128, 196, 194, 132, 68,,, +4, 5, 4, 2, 2, 2, 1, 2, 256, 196, 194, 132, 68,,, +4, 5, 4, 2, 2, 2, 1, 2, 384, 196, 194, 132, 68,,, +4, 5, 4, 2, 2, 2, 2, 1, 128, 196, 194, 132, 68,,, +4, 5, 4, 2, 2, 2, 2, 1, 256, 196, 194, 132, 68,,, +4, 5, 4, 2, 2, 2, 2, 1, 384, 196, 194, 132, 68,,, +4, 5, 4, 2, 2, 2, 2, 2, 128, 196, 194, 132, 68,,, +4, 5, 4, 2, 2, 2, 2, 2, 256, 196, 194, 132, 68,,, +4, 5, 4, 2, 2, 2, 2, 2, 384, 196, 194, 132, 68,,, +4, 5, 4, 2, 3, 3, 1, 1, 128, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 1, 1, 256, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 1, 1, 384, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 1, 2, 128, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 1, 2, 256, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 1, 2, 384, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 1, 3, 128, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 1, 3, 256, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 1, 3, 384, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 2, 1, 128, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 2, 1, 256, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 2, 1, 384, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 2, 2, 128, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 2, 2, 256, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 2, 2, 384, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 2, 3, 128, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 2, 3, 256, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 2, 3, 384, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 3, 1, 128, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 3, 1, 256, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 3, 1, 384, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 3, 2, 128, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 3, 2, 256, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 3, 2, 384, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 3, 3, 128, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 3, 3, 256, 196, 184, 132, 68,,, +4, 5, 4, 2, 3, 3, 3, 3, 384, 196, 184, 132, 68,,, +4, 5, 4, 3, 1, 1, 1, 1, 128, 240, 240, 196, 100,,, +4, 5, 4, 3, 1, 1, 1, 1, 256, 240, 240, 196, 100,,, +4, 5, 4, 3, 1, 1, 1, 1, 384, 240, 240, 196, 100,,, +4, 5, 4, 3, 2, 2, 1, 1, 128, 240, 240, 196, 100,,, +4, 5, 4, 3, 2, 2, 1, 1, 256, 240, 240, 196, 100,,, +4, 5, 4, 3, 2, 2, 1, 1, 384, 240, 240, 196, 100,,, +4, 5, 4, 3, 2, 2, 1, 2, 128, 240, 240, 196, 100,,, +4, 5, 4, 3, 2, 2, 1, 2, 256, 240, 240, 196, 100,,, +4, 5, 4, 3, 2, 2, 1, 2, 384, 240, 240, 196, 100,,, +4, 5, 4, 3, 2, 2, 2, 1, 128, 240, 240, 196, 100,,, +4, 5, 4, 3, 2, 2, 2, 1, 256, 240, 240, 196, 100,,, +4, 5, 4, 3, 2, 2, 2, 1, 384, 240, 240, 196, 100,,, +4, 5, 4, 3, 2, 2, 2, 2, 128, 240, 240, 196, 100,,, +4, 5, 4, 3, 2, 2, 2, 2, 256, 240, 240, 196, 100,,, +4, 5, 4, 3, 2, 2, 2, 2, 384, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 1, 1, 128, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 1, 1, 256, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 1, 1, 384, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 1, 2, 128, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 1, 2, 256, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 1, 2, 384, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 1, 3, 128, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 1, 3, 256, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 1, 3, 384, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 2, 1, 128, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 2, 1, 256, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 2, 1, 384, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 2, 2, 128, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 2, 2, 256, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 2, 2, 384, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 2, 3, 128, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 2, 3, 256, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 2, 3, 384, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 3, 1, 128, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 3, 1, 256, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 3, 1, 384, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 3, 2, 128, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 3, 2, 256, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 3, 2, 384, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 3, 3, 128, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 3, 3, 256, 240, 240, 196, 100,,, +4, 5, 4, 3, 3, 3, 3, 3, 384, 240, 240, 196, 100,,, +4, 5, 4, 4, 1, 1, 1, 1, 128, 240, 240, 240, 132,,, +4, 5, 4, 4, 1, 1, 1, 1, 256, 240, 240, 240, 132,,, +4, 5, 4, 4, 1, 1, 1, 1, 384, 240, 240, 240, 132,,, +4, 5, 4, 4, 2, 2, 1, 1, 128, 240, 240, 240, 132,,, +4, 5, 4, 4, 2, 2, 1, 1, 256, 240, 240, 240, 132,,, +4, 5, 4, 4, 2, 2, 1, 1, 384, 240, 240, 240, 132,,, +4, 5, 4, 4, 2, 2, 1, 2, 128, 240, 240, 240, 132,,, +4, 5, 4, 4, 2, 2, 1, 2, 256, 240, 240, 240, 132,,, +4, 5, 4, 4, 2, 2, 1, 2, 384, 240, 240, 240, 132,,, +4, 5, 4, 4, 2, 2, 2, 1, 128, 240, 240, 240, 132,,, +4, 5, 4, 4, 2, 2, 2, 1, 256, 240, 240, 240, 132,,, +4, 5, 4, 4, 2, 2, 2, 1, 384, 240, 240, 240, 132,,, +4, 5, 4, 4, 2, 2, 2, 2, 128, 240, 240, 240, 132,,, +4, 5, 4, 4, 2, 2, 2, 2, 256, 240, 240, 240, 132,,, +4, 5, 4, 4, 2, 2, 2, 2, 384, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 1, 1, 128, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 1, 1, 256, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 1, 1, 384, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 1, 2, 128, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 1, 2, 256, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 1, 2, 384, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 1, 3, 128, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 1, 3, 256, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 1, 3, 384, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 2, 1, 128, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 2, 1, 256, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 2, 1, 384, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 2, 2, 128, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 2, 2, 256, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 2, 2, 384, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 2, 3, 128, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 2, 3, 256, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 2, 3, 384, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 3, 1, 128, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 3, 1, 256, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 3, 1, 384, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 3, 2, 128, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 3, 2, 256, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 3, 2, 384, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 3, 3, 128, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 3, 3, 256, 240, 240, 240, 132,,, +4, 5, 4, 4, 3, 3, 3, 3, 384, 240, 240, 240, 132,,, +4, 5, 4, 5, 1, 1, 1, 1, 128, 240, 240, 240, 164,,, +4, 5, 4, 5, 1, 1, 1, 1, 256, 240, 240, 240, 164,,, +4, 5, 4, 5, 1, 1, 1, 1, 384, 240, 240, 240, 164,,, +4, 5, 4, 5, 2, 2, 1, 1, 128, 240, 240, 240, 164,,, +4, 5, 4, 5, 2, 2, 1, 1, 256, 240, 240, 240, 164,,, +4, 5, 4, 5, 2, 2, 1, 1, 384, 240, 240, 240, 164,,, +4, 5, 4, 5, 2, 2, 1, 2, 128, 240, 240, 240, 164,,, +4, 5, 4, 5, 2, 2, 1, 2, 256, 240, 240, 240, 164,,, +4, 5, 4, 5, 2, 2, 1, 2, 384, 240, 240, 240, 164,,, +4, 5, 4, 5, 2, 2, 2, 1, 128, 240, 240, 240, 164,,, +4, 5, 4, 5, 2, 2, 2, 1, 256, 240, 240, 240, 164,,, +4, 5, 4, 5, 2, 2, 2, 1, 384, 240, 240, 240, 164,,, +4, 5, 4, 5, 2, 2, 2, 2, 128, 240, 240, 240, 164,,, +4, 5, 4, 5, 2, 2, 2, 2, 256, 240, 240, 240, 164,,, +4, 5, 4, 5, 2, 2, 2, 2, 384, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 1, 1, 128, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 1, 1, 256, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 1, 1, 384, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 1, 2, 128, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 1, 2, 256, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 1, 2, 384, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 1, 3, 128, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 1, 3, 256, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 1, 3, 384, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 2, 1, 128, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 2, 1, 256, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 2, 1, 384, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 2, 2, 128, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 2, 2, 256, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 2, 2, 384, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 2, 3, 128, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 2, 3, 256, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 2, 3, 384, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 3, 1, 128, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 3, 1, 256, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 3, 1, 384, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 3, 2, 128, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 3, 2, 256, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 3, 2, 384, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 3, 3, 128, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 3, 3, 256, 240, 240, 240, 164,,, +4, 5, 4, 5, 3, 3, 3, 3, 384, 240, 240, 240, 164,,, +4, 6, 1, 1, 1, 1, 1, 1, 128, 101, 101, 69,,,, +4, 6, 1, 1, 1, 1, 1, 1, 256, 101, 101, 69,,,, +4, 6, 1, 1, 1, 1, 1, 1, 384, 101, 101, 69,,,, +4, 6, 1, 1, 2, 2, 1, 1, 128, 101, 97, 69,,,, +4, 6, 1, 1, 2, 2, 1, 1, 256, 101, 97, 69,,,, +4, 6, 1, 1, 2, 2, 1, 1, 384, 101, 97, 69,,,, +4, 6, 1, 1, 2, 2, 1, 2, 128, 101, 97, 69,,,, +4, 6, 1, 1, 2, 2, 1, 2, 256, 101, 97, 69,,,, +4, 6, 1, 1, 2, 2, 1, 2, 384, 101, 97, 69,,,, +4, 6, 1, 1, 2, 2, 2, 1, 128, 101, 97, 69,,,, +4, 6, 1, 1, 2, 2, 2, 1, 256, 101, 97, 69,,,, +4, 6, 1, 1, 2, 2, 2, 1, 384, 101, 97, 69,,,, +4, 6, 1, 1, 2, 2, 2, 2, 128, 101, 97, 69,,,, +4, 6, 1, 1, 2, 2, 2, 2, 256, 101, 97, 69,,,, +4, 6, 1, 1, 2, 2, 2, 2, 384, 101, 97, 69,,,, +4, 6, 1, 1, 3, 3, 1, 1, 128, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 1, 1, 256, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 1, 1, 384, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 1, 2, 128, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 1, 2, 256, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 1, 2, 384, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 1, 3, 128, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 1, 3, 256, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 1, 3, 384, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 2, 1, 128, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 2, 1, 256, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 2, 1, 384, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 2, 2, 128, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 2, 2, 256, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 2, 2, 384, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 2, 3, 128, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 2, 3, 256, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 2, 3, 384, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 3, 1, 128, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 3, 1, 256, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 3, 1, 384, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 3, 2, 128, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 3, 2, 256, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 3, 2, 384, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 3, 3, 128, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 3, 3, 256, 101, 92, 69,,,, +4, 6, 1, 1, 3, 3, 3, 3, 384, 101, 92, 69,,,, +4, 6, 1, 2, 1, 1, 1, 1, 128, 197, 197, 133,,,, +4, 6, 1, 2, 1, 1, 1, 1, 256, 197, 197, 133,,,, +4, 6, 1, 2, 1, 1, 1, 1, 384, 197, 197, 133,,,, +4, 6, 1, 2, 2, 2, 1, 1, 128, 197, 189, 133,,,, +4, 6, 1, 2, 2, 2, 1, 1, 256, 197, 189, 133,,,, +4, 6, 1, 2, 2, 2, 1, 1, 384, 197, 189, 133,,,, +4, 6, 1, 2, 2, 2, 1, 2, 128, 197, 189, 133,,,, +4, 6, 1, 2, 2, 2, 1, 2, 256, 197, 189, 133,,,, +4, 6, 1, 2, 2, 2, 1, 2, 384, 197, 189, 133,,,, +4, 6, 1, 2, 2, 2, 2, 1, 128, 197, 189, 133,,,, +4, 6, 1, 2, 2, 2, 2, 1, 256, 197, 189, 133,,,, +4, 6, 1, 2, 2, 2, 2, 1, 384, 197, 189, 133,,,, +4, 6, 1, 2, 2, 2, 2, 2, 128, 197, 189, 133,,,, +4, 6, 1, 2, 2, 2, 2, 2, 256, 197, 189, 133,,,, +4, 6, 1, 2, 2, 2, 2, 2, 384, 197, 189, 133,,,, +4, 6, 1, 2, 3, 3, 1, 1, 128, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 1, 1, 256, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 1, 1, 384, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 1, 2, 128, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 1, 2, 256, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 1, 2, 384, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 1, 3, 128, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 1, 3, 256, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 1, 3, 384, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 2, 1, 128, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 2, 1, 256, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 2, 1, 384, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 2, 2, 128, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 2, 2, 256, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 2, 2, 384, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 2, 3, 128, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 2, 3, 256, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 2, 3, 384, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 3, 1, 128, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 3, 1, 256, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 3, 1, 384, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 3, 2, 128, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 3, 2, 256, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 3, 2, 384, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 3, 3, 128, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 3, 3, 256, 197, 179, 133,,,, +4, 6, 1, 2, 3, 3, 3, 3, 384, 197, 179, 133,,,, +4, 6, 1, 3, 1, 1, 1, 1, 128, 240, 240, 197,,,, +4, 6, 1, 3, 1, 1, 1, 1, 256, 240, 240, 197,,,, +4, 6, 1, 3, 1, 1, 1, 1, 384, 240, 240, 197,,,, +4, 6, 1, 3, 2, 2, 1, 1, 128, 240, 240, 197,,,, +4, 6, 1, 3, 2, 2, 1, 1, 256, 240, 240, 197,,,, +4, 6, 1, 3, 2, 2, 1, 1, 384, 240, 240, 197,,,, +4, 6, 1, 3, 2, 2, 1, 2, 128, 240, 240, 197,,,, +4, 6, 1, 3, 2, 2, 1, 2, 256, 240, 240, 197,,,, +4, 6, 1, 3, 2, 2, 1, 2, 384, 240, 240, 197,,,, +4, 6, 1, 3, 2, 2, 2, 1, 128, 240, 240, 197,,,, +4, 6, 1, 3, 2, 2, 2, 1, 256, 240, 240, 197,,,, +4, 6, 1, 3, 2, 2, 2, 1, 384, 240, 240, 197,,,, +4, 6, 1, 3, 2, 2, 2, 2, 128, 240, 240, 197,,,, +4, 6, 1, 3, 2, 2, 2, 2, 256, 240, 240, 197,,,, +4, 6, 1, 3, 2, 2, 2, 2, 384, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 1, 1, 128, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 1, 1, 256, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 1, 1, 384, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 1, 2, 128, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 1, 2, 256, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 1, 2, 384, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 1, 3, 128, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 1, 3, 256, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 1, 3, 384, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 2, 1, 128, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 2, 1, 256, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 2, 1, 384, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 2, 2, 128, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 2, 2, 256, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 2, 2, 384, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 2, 3, 128, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 2, 3, 256, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 2, 3, 384, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 3, 1, 128, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 3, 1, 256, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 3, 1, 384, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 3, 2, 128, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 3, 2, 256, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 3, 2, 384, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 3, 3, 128, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 3, 3, 256, 240, 240, 197,,,, +4, 6, 1, 3, 3, 3, 3, 3, 384, 240, 240, 197,,,, +4, 6, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 6, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 6, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 6, 1, 4, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 6, 1, 4, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 6, 1, 4, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 6, 1, 4, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 6, 1, 4, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 6, 1, 4, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 6, 1, 4, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 6, 1, 4, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 6, 1, 4, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 6, 1, 4, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 6, 1, 4, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 6, 1, 4, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 6, 1, 4, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 6, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 6, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 6, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 6, 1, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 6, 1, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 6, 1, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 6, 1, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 6, 1, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 6, 1, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 6, 1, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 6, 1, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 6, 1, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 6, 1, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 6, 1, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 6, 1, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 6, 1, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 6, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 6, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 6, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 6, 1, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 6, 1, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 6, 1, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 6, 1, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 6, 1, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 6, 1, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 6, 1, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 6, 1, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 6, 1, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 6, 1, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 6, 1, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 6, 1, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 6, 1, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 6, 2, 1, 1, 1, 1, 1, 128, 101, 101, 69,,,, +4, 6, 2, 1, 1, 1, 1, 1, 256, 101, 101, 69,,,, +4, 6, 2, 1, 1, 1, 1, 1, 384, 101, 101, 69,,,, +4, 6, 2, 1, 2, 2, 1, 1, 128, 101, 97, 69,,,, +4, 6, 2, 1, 2, 2, 1, 1, 256, 101, 97, 69,,,, +4, 6, 2, 1, 2, 2, 1, 1, 384, 101, 97, 69,,,, +4, 6, 2, 1, 2, 2, 1, 2, 128, 101, 97, 69,,,, +4, 6, 2, 1, 2, 2, 1, 2, 256, 101, 97, 69,,,, +4, 6, 2, 1, 2, 2, 1, 2, 384, 101, 97, 69,,,, +4, 6, 2, 1, 2, 2, 2, 1, 128, 101, 97, 69,,,, +4, 6, 2, 1, 2, 2, 2, 1, 256, 101, 97, 69,,,, +4, 6, 2, 1, 2, 2, 2, 1, 384, 101, 97, 69,,,, +4, 6, 2, 1, 2, 2, 2, 2, 128, 101, 97, 69,,,, +4, 6, 2, 1, 2, 2, 2, 2, 256, 101, 97, 69,,,, +4, 6, 2, 1, 2, 2, 2, 2, 384, 101, 97, 69,,,, +4, 6, 2, 1, 3, 3, 1, 1, 128, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 1, 1, 256, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 1, 1, 384, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 1, 2, 128, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 1, 2, 256, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 1, 2, 384, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 1, 3, 128, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 1, 3, 256, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 1, 3, 384, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 2, 1, 128, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 2, 1, 256, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 2, 1, 384, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 2, 2, 128, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 2, 2, 256, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 2, 2, 384, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 2, 3, 128, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 2, 3, 256, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 2, 3, 384, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 3, 1, 128, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 3, 1, 256, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 3, 1, 384, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 3, 2, 128, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 3, 2, 256, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 3, 2, 384, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 3, 3, 128, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 3, 3, 256, 101, 92, 69,,,, +4, 6, 2, 1, 3, 3, 3, 3, 384, 101, 92, 69,,,, +4, 6, 2, 2, 1, 1, 1, 1, 128, 197, 197, 133,,,, +4, 6, 2, 2, 1, 1, 1, 1, 256, 197, 197, 133,,,, +4, 6, 2, 2, 1, 1, 1, 1, 384, 197, 197, 133,,,, +4, 6, 2, 2, 2, 2, 1, 1, 128, 197, 189, 133,,,, +4, 6, 2, 2, 2, 2, 1, 1, 256, 197, 189, 133,,,, +4, 6, 2, 2, 2, 2, 1, 1, 384, 197, 189, 133,,,, +4, 6, 2, 2, 2, 2, 1, 2, 128, 197, 189, 133,,,, +4, 6, 2, 2, 2, 2, 1, 2, 256, 197, 189, 133,,,, +4, 6, 2, 2, 2, 2, 1, 2, 384, 197, 189, 133,,,, +4, 6, 2, 2, 2, 2, 2, 1, 128, 197, 189, 133,,,, +4, 6, 2, 2, 2, 2, 2, 1, 256, 197, 189, 133,,,, +4, 6, 2, 2, 2, 2, 2, 1, 384, 197, 189, 133,,,, +4, 6, 2, 2, 2, 2, 2, 2, 128, 197, 189, 133,,,, +4, 6, 2, 2, 2, 2, 2, 2, 256, 197, 189, 133,,,, +4, 6, 2, 2, 2, 2, 2, 2, 384, 197, 189, 133,,,, +4, 6, 2, 2, 3, 3, 1, 1, 128, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 1, 1, 256, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 1, 1, 384, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 1, 2, 128, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 1, 2, 256, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 1, 2, 384, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 1, 3, 128, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 1, 3, 256, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 1, 3, 384, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 2, 1, 128, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 2, 1, 256, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 2, 1, 384, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 2, 2, 128, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 2, 2, 256, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 2, 2, 384, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 2, 3, 128, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 2, 3, 256, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 2, 3, 384, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 3, 1, 128, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 3, 1, 256, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 3, 1, 384, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 3, 2, 128, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 3, 2, 256, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 3, 2, 384, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 3, 3, 128, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 3, 3, 256, 197, 179, 133,,,, +4, 6, 2, 2, 3, 3, 3, 3, 384, 197, 179, 133,,,, +4, 6, 2, 3, 1, 1, 1, 1, 128, 240, 240, 197,,,, +4, 6, 2, 3, 1, 1, 1, 1, 256, 240, 240, 197,,,, +4, 6, 2, 3, 1, 1, 1, 1, 384, 240, 240, 197,,,, +4, 6, 2, 3, 2, 2, 1, 1, 128, 240, 240, 197,,,, +4, 6, 2, 3, 2, 2, 1, 1, 256, 240, 240, 197,,,, +4, 6, 2, 3, 2, 2, 1, 1, 384, 240, 240, 197,,,, +4, 6, 2, 3, 2, 2, 1, 2, 128, 240, 240, 197,,,, +4, 6, 2, 3, 2, 2, 1, 2, 256, 240, 240, 197,,,, +4, 6, 2, 3, 2, 2, 1, 2, 384, 240, 240, 197,,,, +4, 6, 2, 3, 2, 2, 2, 1, 128, 240, 240, 197,,,, +4, 6, 2, 3, 2, 2, 2, 1, 256, 240, 240, 197,,,, +4, 6, 2, 3, 2, 2, 2, 1, 384, 240, 240, 197,,,, +4, 6, 2, 3, 2, 2, 2, 2, 128, 240, 240, 197,,,, +4, 6, 2, 3, 2, 2, 2, 2, 256, 240, 240, 197,,,, +4, 6, 2, 3, 2, 2, 2, 2, 384, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 1, 1, 128, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 1, 1, 256, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 1, 1, 384, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 1, 2, 128, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 1, 2, 256, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 1, 2, 384, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 1, 3, 128, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 1, 3, 256, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 1, 3, 384, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 2, 1, 128, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 2, 1, 256, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 2, 1, 384, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 2, 2, 128, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 2, 2, 256, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 2, 2, 384, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 2, 3, 128, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 2, 3, 256, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 2, 3, 384, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 3, 1, 128, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 3, 1, 256, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 3, 1, 384, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 3, 2, 128, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 3, 2, 256, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 3, 2, 384, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 3, 3, 128, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 3, 3, 256, 240, 240, 197,,,, +4, 6, 2, 3, 3, 3, 3, 3, 384, 240, 240, 197,,,, +4, 6, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 6, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 6, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 6, 2, 4, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 6, 2, 4, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 6, 2, 4, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 6, 2, 4, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 6, 2, 4, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 6, 2, 4, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 6, 2, 4, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 6, 2, 4, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 6, 2, 4, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 6, 2, 4, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 6, 2, 4, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 6, 2, 4, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 6, 2, 4, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 6, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 6, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 6, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 6, 2, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 6, 2, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 6, 2, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 6, 2, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 6, 2, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 6, 2, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 6, 2, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 6, 2, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 6, 2, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 6, 2, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 6, 2, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 6, 2, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 6, 2, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 6, 2, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 6, 2, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 6, 2, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 6, 2, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 6, 2, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 6, 2, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 6, 2, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 6, 2, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 6, 2, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 6, 2, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 6, 2, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 6, 2, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 6, 2, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 6, 2, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 6, 2, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 6, 2, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 6, 3, 1, 1, 1, 1, 1, 128, 101, 101, 69,,,, +4, 6, 3, 1, 1, 1, 1, 1, 256, 101, 101, 69,,,, +4, 6, 3, 1, 1, 1, 1, 1, 384, 101, 101, 69,,,, +4, 6, 3, 1, 2, 2, 1, 1, 128, 101, 97, 69,,,, +4, 6, 3, 1, 2, 2, 1, 1, 256, 101, 97, 69,,,, +4, 6, 3, 1, 2, 2, 1, 1, 384, 101, 97, 69,,,, +4, 6, 3, 1, 2, 2, 1, 2, 128, 101, 97, 69,,,, +4, 6, 3, 1, 2, 2, 1, 2, 256, 101, 97, 69,,,, +4, 6, 3, 1, 2, 2, 1, 2, 384, 101, 97, 69,,,, +4, 6, 3, 1, 2, 2, 2, 1, 128, 101, 97, 69,,,, +4, 6, 3, 1, 2, 2, 2, 1, 256, 101, 97, 69,,,, +4, 6, 3, 1, 2, 2, 2, 1, 384, 101, 97, 69,,,, +4, 6, 3, 1, 2, 2, 2, 2, 128, 101, 97, 69,,,, +4, 6, 3, 1, 2, 2, 2, 2, 256, 101, 97, 69,,,, +4, 6, 3, 1, 2, 2, 2, 2, 384, 101, 97, 69,,,, +4, 6, 3, 1, 3, 3, 1, 1, 128, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 1, 1, 256, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 1, 1, 384, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 1, 2, 128, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 1, 2, 256, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 1, 2, 384, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 1, 3, 128, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 1, 3, 256, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 1, 3, 384, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 2, 1, 128, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 2, 1, 256, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 2, 1, 384, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 2, 2, 128, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 2, 2, 256, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 2, 2, 384, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 2, 3, 128, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 2, 3, 256, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 2, 3, 384, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 3, 1, 128, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 3, 1, 256, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 3, 1, 384, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 3, 2, 128, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 3, 2, 256, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 3, 2, 384, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 3, 3, 128, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 3, 3, 256, 101, 92, 69,,,, +4, 6, 3, 1, 3, 3, 3, 3, 384, 101, 92, 69,,,, +4, 6, 3, 2, 1, 1, 1, 1, 128, 197, 197, 133,,,, +4, 6, 3, 2, 1, 1, 1, 1, 256, 197, 197, 133,,,, +4, 6, 3, 2, 1, 1, 1, 1, 384, 197, 197, 133,,,, +4, 6, 3, 2, 2, 2, 1, 1, 128, 197, 189, 133,,,, +4, 6, 3, 2, 2, 2, 1, 1, 256, 197, 189, 133,,,, +4, 6, 3, 2, 2, 2, 1, 1, 384, 197, 189, 133,,,, +4, 6, 3, 2, 2, 2, 1, 2, 128, 197, 189, 133,,,, +4, 6, 3, 2, 2, 2, 1, 2, 256, 197, 189, 133,,,, +4, 6, 3, 2, 2, 2, 1, 2, 384, 197, 189, 133,,,, +4, 6, 3, 2, 2, 2, 2, 1, 128, 197, 189, 133,,,, +4, 6, 3, 2, 2, 2, 2, 1, 256, 197, 189, 133,,,, +4, 6, 3, 2, 2, 2, 2, 1, 384, 197, 189, 133,,,, +4, 6, 3, 2, 2, 2, 2, 2, 128, 197, 189, 133,,,, +4, 6, 3, 2, 2, 2, 2, 2, 256, 197, 189, 133,,,, +4, 6, 3, 2, 2, 2, 2, 2, 384, 197, 189, 133,,,, +4, 6, 3, 2, 3, 3, 1, 1, 128, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 1, 1, 256, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 1, 1, 384, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 1, 2, 128, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 1, 2, 256, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 1, 2, 384, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 1, 3, 128, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 1, 3, 256, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 1, 3, 384, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 2, 1, 128, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 2, 1, 256, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 2, 1, 384, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 2, 2, 128, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 2, 2, 256, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 2, 2, 384, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 2, 3, 128, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 2, 3, 256, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 2, 3, 384, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 3, 1, 128, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 3, 1, 256, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 3, 1, 384, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 3, 2, 128, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 3, 2, 256, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 3, 2, 384, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 3, 3, 128, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 3, 3, 256, 197, 179, 133,,,, +4, 6, 3, 2, 3, 3, 3, 3, 384, 197, 179, 133,,,, +4, 6, 3, 3, 1, 1, 1, 1, 128, 240, 240, 197,,,, +4, 6, 3, 3, 1, 1, 1, 1, 256, 240, 240, 197,,,, +4, 6, 3, 3, 1, 1, 1, 1, 384, 240, 240, 197,,,, +4, 6, 3, 3, 2, 2, 1, 1, 128, 240, 240, 197,,,, +4, 6, 3, 3, 2, 2, 1, 1, 256, 240, 240, 197,,,, +4, 6, 3, 3, 2, 2, 1, 1, 384, 240, 240, 197,,,, +4, 6, 3, 3, 2, 2, 1, 2, 128, 240, 240, 197,,,, +4, 6, 3, 3, 2, 2, 1, 2, 256, 240, 240, 197,,,, +4, 6, 3, 3, 2, 2, 1, 2, 384, 240, 240, 197,,,, +4, 6, 3, 3, 2, 2, 2, 1, 128, 240, 240, 197,,,, +4, 6, 3, 3, 2, 2, 2, 1, 256, 240, 240, 197,,,, +4, 6, 3, 3, 2, 2, 2, 1, 384, 240, 240, 197,,,, +4, 6, 3, 3, 2, 2, 2, 2, 128, 240, 240, 197,,,, +4, 6, 3, 3, 2, 2, 2, 2, 256, 240, 240, 197,,,, +4, 6, 3, 3, 2, 2, 2, 2, 384, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 1, 1, 128, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 1, 1, 256, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 1, 1, 384, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 1, 2, 128, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 1, 2, 256, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 1, 2, 384, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 1, 3, 128, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 1, 3, 256, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 1, 3, 384, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 2, 1, 128, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 2, 1, 256, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 2, 1, 384, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 2, 2, 128, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 2, 2, 256, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 2, 2, 384, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 2, 3, 128, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 2, 3, 256, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 2, 3, 384, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 3, 1, 128, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 3, 1, 256, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 3, 1, 384, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 3, 2, 128, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 3, 2, 256, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 3, 2, 384, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 3, 3, 128, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 3, 3, 256, 240, 240, 197,,,, +4, 6, 3, 3, 3, 3, 3, 3, 384, 240, 240, 197,,,, +4, 6, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 6, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 6, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 6, 3, 4, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 6, 3, 4, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 6, 3, 4, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 6, 3, 4, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 6, 3, 4, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 6, 3, 4, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 6, 3, 4, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 6, 3, 4, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 6, 3, 4, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 6, 3, 4, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 6, 3, 4, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 6, 3, 4, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 6, 3, 4, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 6, 3, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 6, 3, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 6, 3, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 6, 3, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 6, 3, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 6, 3, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 6, 3, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 6, 3, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 6, 3, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 6, 3, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 6, 3, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 6, 3, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 6, 3, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 6, 3, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 6, 3, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 6, 3, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 6, 3, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 6, 3, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 6, 3, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 6, 3, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 6, 3, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 6, 3, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 6, 3, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 6, 3, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 6, 3, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 6, 3, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 6, 3, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 6, 3, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 6, 3, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 6, 3, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 6, 3, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 6, 3, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 6, 4, 1, 1, 1, 1, 1, 128, 101, 101, 69,,,, +4, 6, 4, 1, 1, 1, 1, 1, 256, 101, 101, 69,,,, +4, 6, 4, 1, 1, 1, 1, 1, 384, 101, 101, 69,,,, +4, 6, 4, 1, 2, 2, 1, 1, 128, 101, 97, 69,,,, +4, 6, 4, 1, 2, 2, 1, 1, 256, 101, 97, 69,,,, +4, 6, 4, 1, 2, 2, 1, 1, 384, 101, 97, 69,,,, +4, 6, 4, 1, 2, 2, 1, 2, 128, 101, 97, 69,,,, +4, 6, 4, 1, 2, 2, 1, 2, 256, 101, 97, 69,,,, +4, 6, 4, 1, 2, 2, 1, 2, 384, 101, 97, 69,,,, +4, 6, 4, 1, 2, 2, 2, 1, 128, 101, 97, 69,,,, +4, 6, 4, 1, 2, 2, 2, 1, 256, 101, 97, 69,,,, +4, 6, 4, 1, 2, 2, 2, 1, 384, 101, 97, 69,,,, +4, 6, 4, 1, 2, 2, 2, 2, 128, 101, 97, 69,,,, +4, 6, 4, 1, 2, 2, 2, 2, 256, 101, 97, 69,,,, +4, 6, 4, 1, 2, 2, 2, 2, 384, 101, 97, 69,,,, +4, 6, 4, 1, 3, 3, 1, 1, 128, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 1, 1, 256, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 1, 1, 384, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 1, 2, 128, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 1, 2, 256, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 1, 2, 384, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 1, 3, 128, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 1, 3, 256, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 1, 3, 384, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 2, 1, 128, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 2, 1, 256, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 2, 1, 384, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 2, 2, 128, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 2, 2, 256, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 2, 2, 384, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 2, 3, 128, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 2, 3, 256, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 2, 3, 384, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 3, 1, 128, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 3, 1, 256, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 3, 1, 384, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 3, 2, 128, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 3, 2, 256, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 3, 2, 384, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 3, 3, 128, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 3, 3, 256, 101, 92, 69,,,, +4, 6, 4, 1, 3, 3, 3, 3, 384, 101, 92, 69,,,, +4, 6, 4, 2, 1, 1, 1, 1, 128, 197, 197, 133,,,, +4, 6, 4, 2, 1, 1, 1, 1, 256, 197, 197, 133,,,, +4, 6, 4, 2, 1, 1, 1, 1, 384, 197, 197, 133,,,, +4, 6, 4, 2, 2, 2, 1, 1, 128, 197, 189, 133,,,, +4, 6, 4, 2, 2, 2, 1, 1, 256, 197, 189, 133,,,, +4, 6, 4, 2, 2, 2, 1, 1, 384, 197, 189, 133,,,, +4, 6, 4, 2, 2, 2, 1, 2, 128, 197, 189, 133,,,, +4, 6, 4, 2, 2, 2, 1, 2, 256, 197, 189, 133,,,, +4, 6, 4, 2, 2, 2, 1, 2, 384, 197, 189, 133,,,, +4, 6, 4, 2, 2, 2, 2, 1, 128, 197, 189, 133,,,, +4, 6, 4, 2, 2, 2, 2, 1, 256, 197, 189, 133,,,, +4, 6, 4, 2, 2, 2, 2, 1, 384, 197, 189, 133,,,, +4, 6, 4, 2, 2, 2, 2, 2, 128, 197, 189, 133,,,, +4, 6, 4, 2, 2, 2, 2, 2, 256, 197, 189, 133,,,, +4, 6, 4, 2, 2, 2, 2, 2, 384, 197, 189, 133,,,, +4, 6, 4, 2, 3, 3, 1, 1, 128, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 1, 1, 256, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 1, 1, 384, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 1, 2, 128, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 1, 2, 256, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 1, 2, 384, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 1, 3, 128, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 1, 3, 256, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 1, 3, 384, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 2, 1, 128, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 2, 1, 256, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 2, 1, 384, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 2, 2, 128, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 2, 2, 256, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 2, 2, 384, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 2, 3, 128, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 2, 3, 256, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 2, 3, 384, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 3, 1, 128, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 3, 1, 256, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 3, 1, 384, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 3, 2, 128, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 3, 2, 256, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 3, 2, 384, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 3, 3, 128, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 3, 3, 256, 197, 179, 133,,,, +4, 6, 4, 2, 3, 3, 3, 3, 384, 197, 179, 133,,,, +4, 6, 4, 3, 1, 1, 1, 1, 128, 240, 240, 197,,,, +4, 6, 4, 3, 1, 1, 1, 1, 256, 240, 240, 197,,,, +4, 6, 4, 3, 1, 1, 1, 1, 384, 240, 240, 197,,,, +4, 6, 4, 3, 2, 2, 1, 1, 128, 240, 240, 197,,,, +4, 6, 4, 3, 2, 2, 1, 1, 256, 240, 240, 197,,,, +4, 6, 4, 3, 2, 2, 1, 1, 384, 240, 240, 197,,,, +4, 6, 4, 3, 2, 2, 1, 2, 128, 240, 240, 197,,,, +4, 6, 4, 3, 2, 2, 1, 2, 256, 240, 240, 197,,,, +4, 6, 4, 3, 2, 2, 1, 2, 384, 240, 240, 197,,,, +4, 6, 4, 3, 2, 2, 2, 1, 128, 240, 240, 197,,,, +4, 6, 4, 3, 2, 2, 2, 1, 256, 240, 240, 197,,,, +4, 6, 4, 3, 2, 2, 2, 1, 384, 240, 240, 197,,,, +4, 6, 4, 3, 2, 2, 2, 2, 128, 240, 240, 197,,,, +4, 6, 4, 3, 2, 2, 2, 2, 256, 240, 240, 197,,,, +4, 6, 4, 3, 2, 2, 2, 2, 384, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 1, 1, 128, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 1, 1, 256, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 1, 1, 384, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 1, 2, 128, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 1, 2, 256, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 1, 2, 384, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 1, 3, 128, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 1, 3, 256, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 1, 3, 384, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 2, 1, 128, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 2, 1, 256, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 2, 1, 384, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 2, 2, 128, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 2, 2, 256, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 2, 2, 384, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 2, 3, 128, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 2, 3, 256, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 2, 3, 384, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 3, 1, 128, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 3, 1, 256, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 3, 1, 384, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 3, 2, 128, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 3, 2, 256, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 3, 2, 384, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 3, 3, 128, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 3, 3, 256, 240, 240, 197,,,, +4, 6, 4, 3, 3, 3, 3, 3, 384, 240, 240, 197,,,, +4, 6, 4, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 6, 4, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 6, 4, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 6, 4, 4, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 6, 4, 4, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 6, 4, 4, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 6, 4, 4, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 6, 4, 4, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 6, 4, 4, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 6, 4, 4, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 6, 4, 4, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 6, 4, 4, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 6, 4, 4, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 6, 4, 4, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 6, 4, 4, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 6, 4, 4, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 6, 4, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 6, 4, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 6, 4, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 6, 4, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 6, 4, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 6, 4, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 6, 4, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 6, 4, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 6, 4, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 6, 4, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 6, 4, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 6, 4, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 6, 4, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 6, 4, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 6, 4, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 6, 4, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 6, 4, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 6, 4, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 6, 4, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 6, 4, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 6, 4, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 6, 4, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 6, 4, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 6, 4, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 6, 4, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 6, 4, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 6, 4, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 6, 4, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 6, 4, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 6, 4, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 6, 4, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 6, 4, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 7, 1, 1, 1, 1, 1, 1, 128, 102, 102, 70,,,, +4, 7, 1, 1, 1, 1, 1, 1, 256, 102, 102, 70,,,, +4, 7, 1, 1, 1, 1, 1, 1, 384, 102, 102, 70,,,, +4, 7, 1, 1, 2, 2, 1, 1, 128, 102, 94, 69,,,, +4, 7, 1, 1, 2, 2, 1, 1, 256, 102, 94, 69,,,, +4, 7, 1, 1, 2, 2, 1, 1, 384, 102, 94, 69,,,, +4, 7, 1, 1, 2, 2, 1, 2, 128, 102, 94, 69,,,, +4, 7, 1, 1, 2, 2, 1, 2, 256, 102, 94, 69,,,, +4, 7, 1, 1, 2, 2, 1, 2, 384, 102, 94, 69,,,, +4, 7, 1, 1, 2, 2, 2, 1, 128, 102, 94, 69,,,, +4, 7, 1, 1, 2, 2, 2, 1, 256, 102, 94, 69,,,, +4, 7, 1, 1, 2, 2, 2, 1, 384, 102, 94, 69,,,, +4, 7, 1, 1, 2, 2, 2, 2, 128, 102, 94, 69,,,, +4, 7, 1, 1, 2, 2, 2, 2, 256, 102, 94, 69,,,, +4, 7, 1, 1, 2, 2, 2, 2, 384, 102, 94, 69,,,, +4, 7, 1, 1, 3, 3, 1, 1, 128, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 1, 1, 256, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 1, 1, 384, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 1, 2, 128, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 1, 2, 256, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 1, 2, 384, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 1, 3, 128, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 1, 3, 256, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 1, 3, 384, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 2, 1, 128, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 2, 1, 256, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 2, 1, 384, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 2, 2, 128, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 2, 2, 256, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 2, 2, 384, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 2, 3, 128, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 2, 3, 256, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 2, 3, 384, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 3, 1, 128, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 3, 1, 256, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 3, 1, 384, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 3, 2, 128, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 3, 2, 256, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 3, 2, 384, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 3, 3, 128, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 3, 3, 256, 101, 89, 66,,,, +4, 7, 1, 1, 3, 3, 3, 3, 384, 101, 89, 66,,,, +4, 7, 1, 2, 1, 1, 1, 1, 128, 198, 198, 134,,,, +4, 7, 1, 2, 1, 1, 1, 1, 256, 198, 198, 134,,,, +4, 7, 1, 2, 1, 1, 1, 1, 384, 198, 198, 134,,,, +4, 7, 1, 2, 2, 2, 1, 1, 128, 198, 182, 132,,,, +4, 7, 1, 2, 2, 2, 1, 1, 256, 198, 182, 132,,,, +4, 7, 1, 2, 2, 2, 1, 1, 384, 198, 182, 132,,,, +4, 7, 1, 2, 2, 2, 1, 2, 128, 198, 182, 132,,,, +4, 7, 1, 2, 2, 2, 1, 2, 256, 198, 182, 132,,,, +4, 7, 1, 2, 2, 2, 1, 2, 384, 198, 182, 132,,,, +4, 7, 1, 2, 2, 2, 2, 1, 128, 198, 182, 132,,,, +4, 7, 1, 2, 2, 2, 2, 1, 256, 198, 182, 132,,,, +4, 7, 1, 2, 2, 2, 2, 1, 384, 198, 182, 132,,,, +4, 7, 1, 2, 2, 2, 2, 2, 128, 198, 182, 132,,,, +4, 7, 1, 2, 2, 2, 2, 2, 256, 198, 182, 132,,,, +4, 7, 1, 2, 2, 2, 2, 2, 384, 198, 182, 132,,,, +4, 7, 1, 2, 3, 3, 1, 1, 128, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 1, 1, 256, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 1, 1, 384, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 1, 2, 128, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 1, 2, 256, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 1, 2, 384, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 1, 3, 128, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 1, 3, 256, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 1, 3, 384, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 2, 1, 128, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 2, 1, 256, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 2, 1, 384, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 2, 2, 128, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 2, 2, 256, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 2, 2, 384, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 2, 3, 128, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 2, 3, 256, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 2, 3, 384, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 3, 1, 128, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 3, 1, 256, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 3, 1, 384, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 3, 2, 128, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 3, 2, 256, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 3, 2, 384, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 3, 3, 128, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 3, 3, 256, 196, 172, 126,,,, +4, 7, 1, 2, 3, 3, 3, 3, 384, 196, 172, 126,,,, +4, 7, 1, 3, 1, 1, 1, 1, 128, 240, 240, 198,,,, +4, 7, 1, 3, 1, 1, 1, 1, 256, 240, 240, 198,,,, +4, 7, 1, 3, 1, 1, 1, 1, 384, 240, 240, 198,,,, +4, 7, 1, 3, 2, 2, 1, 1, 128, 240, 240, 195,,,, +4, 7, 1, 3, 2, 2, 1, 1, 256, 240, 240, 195,,,, +4, 7, 1, 3, 2, 2, 1, 1, 384, 240, 240, 195,,,, +4, 7, 1, 3, 2, 2, 1, 2, 128, 240, 240, 195,,,, +4, 7, 1, 3, 2, 2, 1, 2, 256, 240, 240, 195,,,, +4, 7, 1, 3, 2, 2, 1, 2, 384, 240, 240, 195,,,, +4, 7, 1, 3, 2, 2, 2, 1, 128, 240, 240, 195,,,, +4, 7, 1, 3, 2, 2, 2, 1, 256, 240, 240, 195,,,, +4, 7, 1, 3, 2, 2, 2, 1, 384, 240, 240, 195,,,, +4, 7, 1, 3, 2, 2, 2, 2, 128, 240, 240, 195,,,, +4, 7, 1, 3, 2, 2, 2, 2, 256, 240, 240, 195,,,, +4, 7, 1, 3, 2, 2, 2, 2, 384, 240, 240, 195,,,, +4, 7, 1, 3, 3, 3, 1, 1, 128, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 1, 1, 256, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 1, 1, 384, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 1, 2, 128, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 1, 2, 256, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 1, 2, 384, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 1, 3, 128, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 1, 3, 256, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 1, 3, 384, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 2, 1, 128, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 2, 1, 256, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 2, 1, 384, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 2, 2, 128, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 2, 2, 256, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 2, 2, 384, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 2, 3, 128, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 2, 3, 256, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 2, 3, 384, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 3, 1, 128, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 3, 1, 256, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 3, 1, 384, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 3, 2, 128, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 3, 2, 256, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 3, 2, 384, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 3, 3, 128, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 3, 3, 256, 240, 240, 186,,,, +4, 7, 1, 3, 3, 3, 3, 3, 384, 240, 240, 186,,,, +4, 7, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 7, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 7, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 7, 1, 4, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 7, 1, 4, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 7, 1, 4, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 7, 1, 4, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 7, 1, 4, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 7, 1, 4, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 7, 1, 4, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 7, 1, 4, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 7, 1, 4, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 7, 1, 4, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 7, 1, 4, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 7, 1, 4, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 7, 1, 4, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 7, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 7, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 7, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 7, 1, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 7, 1, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 7, 1, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 7, 1, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 7, 1, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 7, 1, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 7, 1, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 7, 1, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 7, 1, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 7, 1, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 7, 1, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 7, 1, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 7, 1, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 7, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 7, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 7, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 7, 1, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 7, 1, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 7, 1, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 7, 1, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 7, 1, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 7, 1, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 7, 1, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 7, 1, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 7, 1, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 7, 1, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 7, 1, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 7, 1, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 7, 1, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 7, 1, 7, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 7, 1, 7, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 7, 1, 7, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 7, 1, 7, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 7, 1, 7, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 7, 1, 7, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 7, 1, 7, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 7, 1, 7, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 7, 1, 7, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 7, 1, 7, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 7, 1, 7, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 7, 1, 7, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 7, 1, 7, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 7, 1, 7, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 7, 1, 7, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 7, 1, 7, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 7, 2, 1, 1, 1, 1, 1, 128, 102, 102, 70,,,, +4, 7, 2, 1, 1, 1, 1, 1, 256, 102, 102, 70,,,, +4, 7, 2, 1, 1, 1, 1, 1, 384, 102, 102, 70,,,, +4, 7, 2, 1, 2, 2, 1, 1, 128, 102, 94, 69,,,, +4, 7, 2, 1, 2, 2, 1, 1, 256, 102, 94, 69,,,, +4, 7, 2, 1, 2, 2, 1, 1, 384, 102, 94, 69,,,, +4, 7, 2, 1, 2, 2, 1, 2, 128, 102, 94, 69,,,, +4, 7, 2, 1, 2, 2, 1, 2, 256, 102, 94, 69,,,, +4, 7, 2, 1, 2, 2, 1, 2, 384, 102, 94, 69,,,, +4, 7, 2, 1, 2, 2, 2, 1, 128, 102, 94, 69,,,, +4, 7, 2, 1, 2, 2, 2, 1, 256, 102, 94, 69,,,, +4, 7, 2, 1, 2, 2, 2, 1, 384, 102, 94, 69,,,, +4, 7, 2, 1, 2, 2, 2, 2, 128, 102, 94, 69,,,, +4, 7, 2, 1, 2, 2, 2, 2, 256, 102, 94, 69,,,, +4, 7, 2, 1, 2, 2, 2, 2, 384, 102, 94, 69,,,, +4, 7, 2, 1, 3, 3, 1, 1, 128, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 1, 1, 256, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 1, 1, 384, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 1, 2, 128, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 1, 2, 256, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 1, 2, 384, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 1, 3, 128, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 1, 3, 256, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 1, 3, 384, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 2, 1, 128, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 2, 1, 256, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 2, 1, 384, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 2, 2, 128, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 2, 2, 256, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 2, 2, 384, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 2, 3, 128, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 2, 3, 256, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 2, 3, 384, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 3, 1, 128, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 3, 1, 256, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 3, 1, 384, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 3, 2, 128, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 3, 2, 256, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 3, 2, 384, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 3, 3, 128, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 3, 3, 256, 101, 89, 66,,,, +4, 7, 2, 1, 3, 3, 3, 3, 384, 101, 89, 66,,,, +4, 7, 2, 2, 1, 1, 1, 1, 128, 198, 198, 134,,,, +4, 7, 2, 2, 1, 1, 1, 1, 256, 198, 198, 134,,,, +4, 7, 2, 2, 1, 1, 1, 1, 384, 198, 198, 134,,,, +4, 7, 2, 2, 2, 2, 1, 1, 128, 198, 182, 132,,,, +4, 7, 2, 2, 2, 2, 1, 1, 256, 198, 182, 132,,,, +4, 7, 2, 2, 2, 2, 1, 1, 384, 198, 182, 132,,,, +4, 7, 2, 2, 2, 2, 1, 2, 128, 198, 182, 132,,,, +4, 7, 2, 2, 2, 2, 1, 2, 256, 198, 182, 132,,,, +4, 7, 2, 2, 2, 2, 1, 2, 384, 198, 182, 132,,,, +4, 7, 2, 2, 2, 2, 2, 1, 128, 198, 182, 132,,,, +4, 7, 2, 2, 2, 2, 2, 1, 256, 198, 182, 132,,,, +4, 7, 2, 2, 2, 2, 2, 1, 384, 198, 182, 132,,,, +4, 7, 2, 2, 2, 2, 2, 2, 128, 198, 182, 132,,,, +4, 7, 2, 2, 2, 2, 2, 2, 256, 198, 182, 132,,,, +4, 7, 2, 2, 2, 2, 2, 2, 384, 198, 182, 132,,,, +4, 7, 2, 2, 3, 3, 1, 1, 128, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 1, 1, 256, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 1, 1, 384, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 1, 2, 128, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 1, 2, 256, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 1, 2, 384, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 1, 3, 128, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 1, 3, 256, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 1, 3, 384, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 2, 1, 128, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 2, 1, 256, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 2, 1, 384, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 2, 2, 128, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 2, 2, 256, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 2, 2, 384, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 2, 3, 128, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 2, 3, 256, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 2, 3, 384, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 3, 1, 128, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 3, 1, 256, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 3, 1, 384, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 3, 2, 128, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 3, 2, 256, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 3, 2, 384, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 3, 3, 128, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 3, 3, 256, 196, 172, 126,,,, +4, 7, 2, 2, 3, 3, 3, 3, 384, 196, 172, 126,,,, +4, 7, 2, 3, 1, 1, 1, 1, 128, 240, 240, 198,,,, +4, 7, 2, 3, 1, 1, 1, 1, 256, 240, 240, 198,,,, +4, 7, 2, 3, 1, 1, 1, 1, 384, 240, 240, 198,,,, +4, 7, 2, 3, 2, 2, 1, 1, 128, 240, 240, 195,,,, +4, 7, 2, 3, 2, 2, 1, 1, 256, 240, 240, 195,,,, +4, 7, 2, 3, 2, 2, 1, 1, 384, 240, 240, 195,,,, +4, 7, 2, 3, 2, 2, 1, 2, 128, 240, 240, 195,,,, +4, 7, 2, 3, 2, 2, 1, 2, 256, 240, 240, 195,,,, +4, 7, 2, 3, 2, 2, 1, 2, 384, 240, 240, 195,,,, +4, 7, 2, 3, 2, 2, 2, 1, 128, 240, 240, 195,,,, +4, 7, 2, 3, 2, 2, 2, 1, 256, 240, 240, 195,,,, +4, 7, 2, 3, 2, 2, 2, 1, 384, 240, 240, 195,,,, +4, 7, 2, 3, 2, 2, 2, 2, 128, 240, 240, 195,,,, +4, 7, 2, 3, 2, 2, 2, 2, 256, 240, 240, 195,,,, +4, 7, 2, 3, 2, 2, 2, 2, 384, 240, 240, 195,,,, +4, 7, 2, 3, 3, 3, 1, 1, 128, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 1, 1, 256, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 1, 1, 384, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 1, 2, 128, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 1, 2, 256, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 1, 2, 384, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 1, 3, 128, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 1, 3, 256, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 1, 3, 384, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 2, 1, 128, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 2, 1, 256, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 2, 1, 384, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 2, 2, 128, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 2, 2, 256, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 2, 2, 384, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 2, 3, 128, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 2, 3, 256, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 2, 3, 384, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 3, 1, 128, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 3, 1, 256, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 3, 1, 384, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 3, 2, 128, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 3, 2, 256, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 3, 2, 384, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 3, 3, 128, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 3, 3, 256, 240, 240, 186,,,, +4, 7, 2, 3, 3, 3, 3, 3, 384, 240, 240, 186,,,, +4, 7, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 7, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 7, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 7, 2, 4, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 7, 2, 4, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 7, 2, 4, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 7, 2, 4, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 7, 2, 4, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 7, 2, 4, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 7, 2, 4, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 7, 2, 4, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 7, 2, 4, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 7, 2, 4, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 7, 2, 4, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 7, 2, 4, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 7, 2, 4, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 7, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 7, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 7, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 7, 2, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 7, 2, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 7, 2, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 7, 2, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 7, 2, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 7, 2, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 7, 2, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 7, 2, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 7, 2, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 7, 2, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 7, 2, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 7, 2, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 7, 2, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 7, 2, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 7, 2, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 7, 2, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 7, 2, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 7, 2, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 7, 2, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 7, 2, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 7, 2, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 7, 2, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 7, 2, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 7, 2, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 7, 2, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 7, 2, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 7, 2, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 7, 2, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 7, 2, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 7, 2, 7, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 7, 2, 7, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 7, 2, 7, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 7, 2, 7, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 7, 2, 7, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 7, 2, 7, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 7, 2, 7, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 7, 2, 7, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 7, 2, 7, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 7, 2, 7, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 7, 2, 7, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 7, 2, 7, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 7, 2, 7, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 7, 2, 7, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 7, 2, 7, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 7, 2, 7, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 7, 3, 1, 1, 1, 1, 1, 128, 102, 102, 70,,,, +4, 7, 3, 1, 1, 1, 1, 1, 256, 102, 102, 70,,,, +4, 7, 3, 1, 1, 1, 1, 1, 384, 102, 102, 70,,,, +4, 7, 3, 1, 2, 2, 1, 1, 128, 102, 94, 69,,,, +4, 7, 3, 1, 2, 2, 1, 1, 256, 102, 94, 69,,,, +4, 7, 3, 1, 2, 2, 1, 1, 384, 102, 94, 69,,,, +4, 7, 3, 1, 2, 2, 1, 2, 128, 102, 94, 69,,,, +4, 7, 3, 1, 2, 2, 1, 2, 256, 102, 94, 69,,,, +4, 7, 3, 1, 2, 2, 1, 2, 384, 102, 94, 69,,,, +4, 7, 3, 1, 2, 2, 2, 1, 128, 102, 94, 69,,,, +4, 7, 3, 1, 2, 2, 2, 1, 256, 102, 94, 69,,,, +4, 7, 3, 1, 2, 2, 2, 1, 384, 102, 94, 69,,,, +4, 7, 3, 1, 2, 2, 2, 2, 128, 102, 94, 69,,,, +4, 7, 3, 1, 2, 2, 2, 2, 256, 102, 94, 69,,,, +4, 7, 3, 1, 2, 2, 2, 2, 384, 102, 94, 69,,,, +4, 7, 3, 1, 3, 3, 1, 1, 128, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 1, 1, 256, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 1, 1, 384, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 1, 2, 128, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 1, 2, 256, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 1, 2, 384, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 1, 3, 128, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 1, 3, 256, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 1, 3, 384, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 2, 1, 128, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 2, 1, 256, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 2, 1, 384, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 2, 2, 128, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 2, 2, 256, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 2, 2, 384, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 2, 3, 128, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 2, 3, 256, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 2, 3, 384, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 3, 1, 128, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 3, 1, 256, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 3, 1, 384, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 3, 2, 128, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 3, 2, 256, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 3, 2, 384, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 3, 3, 128, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 3, 3, 256, 101, 89, 66,,,, +4, 7, 3, 1, 3, 3, 3, 3, 384, 101, 89, 66,,,, +4, 7, 3, 2, 1, 1, 1, 1, 128, 198, 198, 134,,,, +4, 7, 3, 2, 1, 1, 1, 1, 256, 198, 198, 134,,,, +4, 7, 3, 2, 1, 1, 1, 1, 384, 198, 198, 134,,,, +4, 7, 3, 2, 2, 2, 1, 1, 128, 198, 182, 132,,,, +4, 7, 3, 2, 2, 2, 1, 1, 256, 198, 182, 132,,,, +4, 7, 3, 2, 2, 2, 1, 1, 384, 198, 182, 132,,,, +4, 7, 3, 2, 2, 2, 1, 2, 128, 198, 182, 132,,,, +4, 7, 3, 2, 2, 2, 1, 2, 256, 198, 182, 132,,,, +4, 7, 3, 2, 2, 2, 1, 2, 384, 198, 182, 132,,,, +4, 7, 3, 2, 2, 2, 2, 1, 128, 198, 182, 132,,,, +4, 7, 3, 2, 2, 2, 2, 1, 256, 198, 182, 132,,,, +4, 7, 3, 2, 2, 2, 2, 1, 384, 198, 182, 132,,,, +4, 7, 3, 2, 2, 2, 2, 2, 128, 198, 182, 132,,,, +4, 7, 3, 2, 2, 2, 2, 2, 256, 198, 182, 132,,,, +4, 7, 3, 2, 2, 2, 2, 2, 384, 198, 182, 132,,,, +4, 7, 3, 2, 3, 3, 1, 1, 128, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 1, 1, 256, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 1, 1, 384, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 1, 2, 128, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 1, 2, 256, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 1, 2, 384, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 1, 3, 128, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 1, 3, 256, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 1, 3, 384, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 2, 1, 128, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 2, 1, 256, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 2, 1, 384, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 2, 2, 128, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 2, 2, 256, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 2, 2, 384, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 2, 3, 128, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 2, 3, 256, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 2, 3, 384, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 3, 1, 128, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 3, 1, 256, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 3, 1, 384, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 3, 2, 128, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 3, 2, 256, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 3, 2, 384, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 3, 3, 128, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 3, 3, 256, 196, 172, 126,,,, +4, 7, 3, 2, 3, 3, 3, 3, 384, 196, 172, 126,,,, +4, 7, 3, 3, 1, 1, 1, 1, 128, 240, 240, 198,,,, +4, 7, 3, 3, 1, 1, 1, 1, 256, 240, 240, 198,,,, +4, 7, 3, 3, 1, 1, 1, 1, 384, 240, 240, 198,,,, +4, 7, 3, 3, 2, 2, 1, 1, 128, 240, 240, 195,,,, +4, 7, 3, 3, 2, 2, 1, 1, 256, 240, 240, 195,,,, +4, 7, 3, 3, 2, 2, 1, 1, 384, 240, 240, 195,,,, +4, 7, 3, 3, 2, 2, 1, 2, 128, 240, 240, 195,,,, +4, 7, 3, 3, 2, 2, 1, 2, 256, 240, 240, 195,,,, +4, 7, 3, 3, 2, 2, 1, 2, 384, 240, 240, 195,,,, +4, 7, 3, 3, 2, 2, 2, 1, 128, 240, 240, 195,,,, +4, 7, 3, 3, 2, 2, 2, 1, 256, 240, 240, 195,,,, +4, 7, 3, 3, 2, 2, 2, 1, 384, 240, 240, 195,,,, +4, 7, 3, 3, 2, 2, 2, 2, 128, 240, 240, 195,,,, +4, 7, 3, 3, 2, 2, 2, 2, 256, 240, 240, 195,,,, +4, 7, 3, 3, 2, 2, 2, 2, 384, 240, 240, 195,,,, +4, 7, 3, 3, 3, 3, 1, 1, 128, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 1, 1, 256, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 1, 1, 384, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 1, 2, 128, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 1, 2, 256, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 1, 2, 384, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 1, 3, 128, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 1, 3, 256, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 1, 3, 384, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 2, 1, 128, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 2, 1, 256, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 2, 1, 384, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 2, 2, 128, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 2, 2, 256, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 2, 2, 384, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 2, 3, 128, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 2, 3, 256, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 2, 3, 384, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 3, 1, 128, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 3, 1, 256, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 3, 1, 384, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 3, 2, 128, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 3, 2, 256, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 3, 2, 384, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 3, 3, 128, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 3, 3, 256, 240, 240, 186,,,, +4, 7, 3, 3, 3, 3, 3, 3, 384, 240, 240, 186,,,, +4, 7, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 7, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 7, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 7, 3, 4, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 7, 3, 4, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 7, 3, 4, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 7, 3, 4, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 7, 3, 4, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 7, 3, 4, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 7, 3, 4, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 7, 3, 4, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 7, 3, 4, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 7, 3, 4, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 7, 3, 4, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 7, 3, 4, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 7, 3, 4, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 7, 3, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 7, 3, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 7, 3, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 7, 3, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 7, 3, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 7, 3, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 7, 3, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 7, 3, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 7, 3, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 7, 3, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 7, 3, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 7, 3, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 7, 3, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 7, 3, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 7, 3, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 7, 3, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 7, 3, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 7, 3, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 7, 3, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 7, 3, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 7, 3, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 7, 3, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 7, 3, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 7, 3, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 7, 3, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 7, 3, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 7, 3, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 7, 3, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 7, 3, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 7, 3, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 7, 3, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 7, 3, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 7, 3, 7, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 7, 3, 7, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 7, 3, 7, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 7, 3, 7, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 7, 3, 7, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 7, 3, 7, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 7, 3, 7, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 7, 3, 7, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 7, 3, 7, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 7, 3, 7, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 7, 3, 7, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 7, 3, 7, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 7, 3, 7, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 7, 3, 7, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 7, 3, 7, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 7, 3, 7, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 7, 4, 1, 1, 1, 1, 1, 128, 102, 102, 70,,,, +4, 7, 4, 1, 1, 1, 1, 1, 256, 102, 102, 70,,,, +4, 7, 4, 1, 1, 1, 1, 1, 384, 102, 102, 70,,,, +4, 7, 4, 1, 2, 2, 1, 1, 128, 102, 94, 69,,,, +4, 7, 4, 1, 2, 2, 1, 1, 256, 102, 94, 69,,,, +4, 7, 4, 1, 2, 2, 1, 1, 384, 102, 94, 69,,,, +4, 7, 4, 1, 2, 2, 1, 2, 128, 102, 94, 69,,,, +4, 7, 4, 1, 2, 2, 1, 2, 256, 102, 94, 69,,,, +4, 7, 4, 1, 2, 2, 1, 2, 384, 102, 94, 69,,,, +4, 7, 4, 1, 2, 2, 2, 1, 128, 102, 94, 69,,,, +4, 7, 4, 1, 2, 2, 2, 1, 256, 102, 94, 69,,,, +4, 7, 4, 1, 2, 2, 2, 1, 384, 102, 94, 69,,,, +4, 7, 4, 1, 2, 2, 2, 2, 128, 102, 94, 69,,,, +4, 7, 4, 1, 2, 2, 2, 2, 256, 102, 94, 69,,,, +4, 7, 4, 1, 2, 2, 2, 2, 384, 102, 94, 69,,,, +4, 7, 4, 1, 3, 3, 1, 1, 128, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 1, 1, 256, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 1, 1, 384, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 1, 2, 128, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 1, 2, 256, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 1, 2, 384, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 1, 3, 128, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 1, 3, 256, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 1, 3, 384, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 2, 1, 128, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 2, 1, 256, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 2, 1, 384, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 2, 2, 128, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 2, 2, 256, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 2, 2, 384, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 2, 3, 128, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 2, 3, 256, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 2, 3, 384, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 3, 1, 128, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 3, 1, 256, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 3, 1, 384, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 3, 2, 128, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 3, 2, 256, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 3, 2, 384, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 3, 3, 128, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 3, 3, 256, 101, 89, 66,,,, +4, 7, 4, 1, 3, 3, 3, 3, 384, 101, 89, 66,,,, +4, 7, 4, 2, 1, 1, 1, 1, 128, 198, 198, 134,,,, +4, 7, 4, 2, 1, 1, 1, 1, 256, 198, 198, 134,,,, +4, 7, 4, 2, 1, 1, 1, 1, 384, 198, 198, 134,,,, +4, 7, 4, 2, 2, 2, 1, 1, 128, 198, 182, 132,,,, +4, 7, 4, 2, 2, 2, 1, 1, 256, 198, 182, 132,,,, +4, 7, 4, 2, 2, 2, 1, 1, 384, 198, 182, 132,,,, +4, 7, 4, 2, 2, 2, 1, 2, 128, 198, 182, 132,,,, +4, 7, 4, 2, 2, 2, 1, 2, 256, 198, 182, 132,,,, +4, 7, 4, 2, 2, 2, 1, 2, 384, 198, 182, 132,,,, +4, 7, 4, 2, 2, 2, 2, 1, 128, 198, 182, 132,,,, +4, 7, 4, 2, 2, 2, 2, 1, 256, 198, 182, 132,,,, +4, 7, 4, 2, 2, 2, 2, 1, 384, 198, 182, 132,,,, +4, 7, 4, 2, 2, 2, 2, 2, 128, 198, 182, 132,,,, +4, 7, 4, 2, 2, 2, 2, 2, 256, 198, 182, 132,,,, +4, 7, 4, 2, 2, 2, 2, 2, 384, 198, 182, 132,,,, +4, 7, 4, 2, 3, 3, 1, 1, 128, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 1, 1, 256, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 1, 1, 384, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 1, 2, 128, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 1, 2, 256, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 1, 2, 384, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 1, 3, 128, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 1, 3, 256, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 1, 3, 384, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 2, 1, 128, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 2, 1, 256, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 2, 1, 384, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 2, 2, 128, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 2, 2, 256, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 2, 2, 384, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 2, 3, 128, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 2, 3, 256, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 2, 3, 384, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 3, 1, 128, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 3, 1, 256, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 3, 1, 384, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 3, 2, 128, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 3, 2, 256, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 3, 2, 384, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 3, 3, 128, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 3, 3, 256, 196, 172, 126,,,, +4, 7, 4, 2, 3, 3, 3, 3, 384, 196, 172, 126,,,, +4, 7, 4, 3, 1, 1, 1, 1, 128, 240, 240, 198,,,, +4, 7, 4, 3, 1, 1, 1, 1, 256, 240, 240, 198,,,, +4, 7, 4, 3, 1, 1, 1, 1, 384, 240, 240, 198,,,, +4, 7, 4, 3, 2, 2, 1, 1, 128, 240, 240, 195,,,, +4, 7, 4, 3, 2, 2, 1, 1, 256, 240, 240, 195,,,, +4, 7, 4, 3, 2, 2, 1, 1, 384, 240, 240, 195,,,, +4, 7, 4, 3, 2, 2, 1, 2, 128, 240, 240, 195,,,, +4, 7, 4, 3, 2, 2, 1, 2, 256, 240, 240, 195,,,, +4, 7, 4, 3, 2, 2, 1, 2, 384, 240, 240, 195,,,, +4, 7, 4, 3, 2, 2, 2, 1, 128, 240, 240, 195,,,, +4, 7, 4, 3, 2, 2, 2, 1, 256, 240, 240, 195,,,, +4, 7, 4, 3, 2, 2, 2, 1, 384, 240, 240, 195,,,, +4, 7, 4, 3, 2, 2, 2, 2, 128, 240, 240, 195,,,, +4, 7, 4, 3, 2, 2, 2, 2, 256, 240, 240, 195,,,, +4, 7, 4, 3, 2, 2, 2, 2, 384, 240, 240, 195,,,, +4, 7, 4, 3, 3, 3, 1, 1, 128, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 1, 1, 256, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 1, 1, 384, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 1, 2, 128, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 1, 2, 256, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 1, 2, 384, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 1, 3, 128, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 1, 3, 256, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 1, 3, 384, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 2, 1, 128, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 2, 1, 256, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 2, 1, 384, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 2, 2, 128, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 2, 2, 256, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 2, 2, 384, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 2, 3, 128, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 2, 3, 256, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 2, 3, 384, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 3, 1, 128, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 3, 1, 256, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 3, 1, 384, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 3, 2, 128, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 3, 2, 256, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 3, 2, 384, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 3, 3, 128, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 3, 3, 256, 240, 240, 186,,,, +4, 7, 4, 3, 3, 3, 3, 3, 384, 240, 240, 186,,,, +4, 7, 4, 4, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 7, 4, 4, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 7, 4, 4, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 7, 4, 4, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 7, 4, 4, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 7, 4, 4, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 7, 4, 4, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 7, 4, 4, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 7, 4, 4, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 7, 4, 4, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 7, 4, 4, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 7, 4, 4, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 7, 4, 4, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 7, 4, 4, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 7, 4, 4, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 7, 4, 4, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 7, 4, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 7, 4, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 7, 4, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 7, 4, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 7, 4, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 7, 4, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 7, 4, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 7, 4, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 7, 4, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 7, 4, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 7, 4, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 7, 4, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 7, 4, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 7, 4, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 7, 4, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 7, 4, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 7, 4, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 7, 4, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 7, 4, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 7, 4, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 7, 4, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 7, 4, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 7, 4, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 7, 4, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 7, 4, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 7, 4, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 7, 4, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 7, 4, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 7, 4, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 7, 4, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 7, 4, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 7, 4, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +4, 7, 4, 7, 1, 1, 1, 1, 128, 240, 240, 240,,,, +4, 7, 4, 7, 1, 1, 1, 1, 256, 240, 240, 240,,,, +4, 7, 4, 7, 1, 1, 1, 1, 384, 240, 240, 240,,,, +4, 7, 4, 7, 2, 2, 1, 1, 128, 240, 240, 240,,,, +4, 7, 4, 7, 2, 2, 1, 1, 256, 240, 240, 240,,,, +4, 7, 4, 7, 2, 2, 1, 1, 384, 240, 240, 240,,,, +4, 7, 4, 7, 2, 2, 1, 2, 128, 240, 240, 240,,,, +4, 7, 4, 7, 2, 2, 1, 2, 256, 240, 240, 240,,,, +4, 7, 4, 7, 2, 2, 1, 2, 384, 240, 240, 240,,,, +4, 7, 4, 7, 2, 2, 2, 1, 128, 240, 240, 240,,,, +4, 7, 4, 7, 2, 2, 2, 1, 256, 240, 240, 240,,,, +4, 7, 4, 7, 2, 2, 2, 1, 384, 240, 240, 240,,,, +4, 7, 4, 7, 2, 2, 2, 2, 128, 240, 240, 240,,,, +4, 7, 4, 7, 2, 2, 2, 2, 256, 240, 240, 240,,,, +4, 7, 4, 7, 2, 2, 2, 2, 384, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 1, 1, 128, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 1, 1, 256, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 1, 1, 384, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 1, 2, 128, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 1, 2, 256, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 1, 2, 384, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 1, 3, 128, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 1, 3, 256, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 1, 3, 384, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 2, 1, 128, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 2, 1, 256, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 2, 1, 384, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 2, 2, 128, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 2, 2, 256, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 2, 2, 384, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 2, 3, 128, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 2, 3, 256, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 2, 3, 384, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 3, 1, 128, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 3, 1, 256, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 3, 1, 384, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 3, 2, 128, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 3, 2, 256, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 3, 2, 384, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 3, 3, 128, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 3, 3, 256, 240, 240, 240,,,, +4, 7, 4, 7, 3, 3, 3, 3, 384, 240, 240, 240,,,, +5, 1, 1, 1, 1, 1, 1, 1, 128, 76, 76, 76, 76, 51, 25, +5, 1, 1, 1, 1, 1, 1, 1, 256, 76, 76, 76, 76, 51, 25, +5, 1, 1, 1, 1, 1, 1, 1, 384, 76, 76, 76, 76, 51, 25, +5, 1, 2, 1, 1, 1, 1, 1, 128, 76, 76, 76, 76, 51, 25, +5, 1, 2, 1, 1, 1, 1, 1, 256, 76, 76, 76, 76, 51, 25, +5, 1, 2, 1, 1, 1, 1, 1, 384, 76, 76, 76, 76, 51, 25, +5, 1, 3, 1, 1, 1, 1, 1, 128, 76, 76, 76, 76, 51, 25, +5, 1, 3, 1, 1, 1, 1, 1, 256, 76, 76, 76, 76, 51, 25, +5, 1, 3, 1, 1, 1, 1, 1, 384, 76, 76, 76, 76, 51, 25, +5, 1, 4, 1, 1, 1, 1, 1, 128, 76, 76, 76, 76, 51, 25, +5, 1, 4, 1, 1, 1, 1, 1, 256, 76, 76, 76, 76, 51, 25, +5, 1, 4, 1, 1, 1, 1, 1, 384, 76, 76, 76, 76, 51, 25, +5, 1, 5, 1, 1, 1, 1, 1, 128, 76, 76, 76, 76, 51, 25, +5, 1, 5, 1, 1, 1, 1, 1, 256, 76, 76, 76, 76, 51, 25, +5, 1, 5, 1, 1, 1, 1, 1, 384, 76, 76, 76, 76, 51, 25, +5, 2, 1, 1, 1, 1, 1, 1, 128, 77, 77, 77, 52,,, +5, 2, 1, 1, 1, 1, 1, 1, 256, 77, 77, 77, 52,,, +5, 2, 1, 1, 1, 1, 1, 1, 384, 77, 77, 77, 52,,, +5, 2, 1, 1, 2, 2, 1, 1, 128, 77, 77, 77, 52,,, +5, 2, 1, 1, 2, 2, 1, 1, 256, 77, 77, 77, 52,,, +5, 2, 1, 1, 2, 2, 1, 1, 384, 77, 77, 77, 52,,, +5, 2, 1, 1, 2, 2, 1, 2, 128, 77, 77, 77, 52,,, +5, 2, 1, 1, 2, 2, 1, 2, 256, 77, 77, 77, 52,,, +5, 2, 1, 1, 2, 2, 1, 2, 384, 77, 77, 77, 52,,, +5, 2, 1, 1, 2, 2, 2, 1, 128, 77, 77, 77, 52,,, +5, 2, 1, 1, 2, 2, 2, 1, 256, 77, 77, 77, 52,,, +5, 2, 1, 1, 2, 2, 2, 1, 384, 77, 77, 77, 52,,, +5, 2, 1, 1, 2, 2, 2, 2, 128, 77, 77, 77, 52,,, +5, 2, 1, 1, 2, 2, 2, 2, 256, 77, 77, 77, 52,,, +5, 2, 1, 1, 2, 2, 2, 2, 384, 77, 77, 77, 52,,, +5, 2, 1, 2, 1, 1, 1, 1, 128, 153, 153, 153, 103,,, +5, 2, 1, 2, 1, 1, 1, 1, 256, 153, 153, 153, 103,,, +5, 2, 1, 2, 1, 1, 1, 1, 384, 153, 153, 153, 103,,, +5, 2, 1, 2, 2, 2, 1, 1, 128, 153, 153, 153, 103,,, +5, 2, 1, 2, 2, 2, 1, 1, 256, 153, 153, 153, 103,,, +5, 2, 1, 2, 2, 2, 1, 1, 384, 153, 153, 153, 103,,, +5, 2, 1, 2, 2, 2, 1, 2, 128, 153, 153, 153, 103,,, +5, 2, 1, 2, 2, 2, 1, 2, 256, 153, 153, 153, 103,,, +5, 2, 1, 2, 2, 2, 1, 2, 384, 153, 153, 153, 103,,, +5, 2, 1, 2, 2, 2, 2, 1, 128, 153, 153, 153, 103,,, +5, 2, 1, 2, 2, 2, 2, 1, 256, 153, 153, 153, 103,,, +5, 2, 1, 2, 2, 2, 2, 1, 384, 153, 153, 153, 103,,, +5, 2, 1, 2, 2, 2, 2, 2, 128, 153, 153, 153, 103,,, +5, 2, 1, 2, 2, 2, 2, 2, 256, 153, 153, 153, 103,,, +5, 2, 1, 2, 2, 2, 2, 2, 384, 153, 153, 153, 103,,, +5, 2, 2, 1, 1, 1, 1, 1, 128, 77, 77, 77, 52,,, +5, 2, 2, 1, 1, 1, 1, 1, 256, 77, 77, 77, 52,,, +5, 2, 2, 1, 1, 1, 1, 1, 384, 77, 77, 77, 52,,, +5, 2, 2, 1, 2, 2, 1, 1, 128, 77, 77, 77, 52,,, +5, 2, 2, 1, 2, 2, 1, 1, 256, 77, 77, 77, 52,,, +5, 2, 2, 1, 2, 2, 1, 1, 384, 77, 77, 77, 52,,, +5, 2, 2, 1, 2, 2, 1, 2, 128, 77, 77, 77, 52,,, +5, 2, 2, 1, 2, 2, 1, 2, 256, 77, 77, 77, 52,,, +5, 2, 2, 1, 2, 2, 1, 2, 384, 77, 77, 77, 52,,, +5, 2, 2, 1, 2, 2, 2, 1, 128, 77, 77, 77, 52,,, +5, 2, 2, 1, 2, 2, 2, 1, 256, 77, 77, 77, 52,,, +5, 2, 2, 1, 2, 2, 2, 1, 384, 77, 77, 77, 52,,, +5, 2, 2, 1, 2, 2, 2, 2, 128, 77, 77, 77, 52,,, +5, 2, 2, 1, 2, 2, 2, 2, 256, 77, 77, 77, 52,,, +5, 2, 2, 1, 2, 2, 2, 2, 384, 77, 77, 77, 52,,, +5, 2, 2, 2, 1, 1, 1, 1, 128, 153, 153, 153, 103,,, +5, 2, 2, 2, 1, 1, 1, 1, 256, 153, 153, 153, 103,,, +5, 2, 2, 2, 1, 1, 1, 1, 384, 153, 153, 153, 103,,, +5, 2, 2, 2, 2, 2, 1, 1, 128, 153, 153, 153, 103,,, +5, 2, 2, 2, 2, 2, 1, 1, 256, 153, 153, 153, 103,,, +5, 2, 2, 2, 2, 2, 1, 1, 384, 153, 153, 153, 103,,, +5, 2, 2, 2, 2, 2, 1, 2, 128, 153, 153, 153, 103,,, +5, 2, 2, 2, 2, 2, 1, 2, 256, 153, 153, 153, 103,,, +5, 2, 2, 2, 2, 2, 1, 2, 384, 153, 153, 153, 103,,, +5, 2, 2, 2, 2, 2, 2, 1, 128, 153, 153, 153, 103,,, +5, 2, 2, 2, 2, 2, 2, 1, 256, 153, 153, 153, 103,,, +5, 2, 2, 2, 2, 2, 2, 1, 384, 153, 153, 153, 103,,, +5, 2, 2, 2, 2, 2, 2, 2, 128, 153, 153, 153, 103,,, +5, 2, 2, 2, 2, 2, 2, 2, 256, 153, 153, 153, 103,,, +5, 2, 2, 2, 2, 2, 2, 2, 384, 153, 153, 153, 103,,, +5, 2, 3, 1, 1, 1, 1, 1, 128, 77, 77, 77, 52,,, +5, 2, 3, 1, 1, 1, 1, 1, 256, 77, 77, 77, 52,,, +5, 2, 3, 1, 1, 1, 1, 1, 384, 77, 77, 77, 52,,, +5, 2, 3, 1, 2, 2, 1, 1, 128, 77, 77, 77, 52,,, +5, 2, 3, 1, 2, 2, 1, 1, 256, 77, 77, 77, 52,,, +5, 2, 3, 1, 2, 2, 1, 1, 384, 77, 77, 77, 52,,, +5, 2, 3, 1, 2, 2, 1, 2, 128, 77, 77, 77, 52,,, +5, 2, 3, 1, 2, 2, 1, 2, 256, 77, 77, 77, 52,,, +5, 2, 3, 1, 2, 2, 1, 2, 384, 77, 77, 77, 52,,, +5, 2, 3, 1, 2, 2, 2, 1, 128, 77, 77, 77, 52,,, +5, 2, 3, 1, 2, 2, 2, 1, 256, 77, 77, 77, 52,,, +5, 2, 3, 1, 2, 2, 2, 1, 384, 77, 77, 77, 52,,, +5, 2, 3, 1, 2, 2, 2, 2, 128, 77, 77, 77, 52,,, +5, 2, 3, 1, 2, 2, 2, 2, 256, 77, 77, 77, 52,,, +5, 2, 3, 1, 2, 2, 2, 2, 384, 77, 77, 77, 52,,, +5, 2, 3, 2, 1, 1, 1, 1, 128, 153, 153, 153, 103,,, +5, 2, 3, 2, 1, 1, 1, 1, 256, 153, 153, 153, 103,,, +5, 2, 3, 2, 1, 1, 1, 1, 384, 153, 153, 153, 103,,, +5, 2, 3, 2, 2, 2, 1, 1, 128, 153, 153, 153, 103,,, +5, 2, 3, 2, 2, 2, 1, 1, 256, 153, 153, 153, 103,,, +5, 2, 3, 2, 2, 2, 1, 1, 384, 153, 153, 153, 103,,, +5, 2, 3, 2, 2, 2, 1, 2, 128, 153, 153, 153, 103,,, +5, 2, 3, 2, 2, 2, 1, 2, 256, 153, 153, 153, 103,,, +5, 2, 3, 2, 2, 2, 1, 2, 384, 153, 153, 153, 103,,, +5, 2, 3, 2, 2, 2, 2, 1, 128, 153, 153, 153, 103,,, +5, 2, 3, 2, 2, 2, 2, 1, 256, 153, 153, 153, 103,,, +5, 2, 3, 2, 2, 2, 2, 1, 384, 153, 153, 153, 103,,, +5, 2, 3, 2, 2, 2, 2, 2, 128, 153, 153, 153, 103,,, +5, 2, 3, 2, 2, 2, 2, 2, 256, 153, 153, 153, 103,,, +5, 2, 3, 2, 2, 2, 2, 2, 384, 153, 153, 153, 103,,, +5, 2, 4, 1, 1, 1, 1, 1, 128, 77, 77, 77, 52,,, +5, 2, 4, 1, 1, 1, 1, 1, 256, 77, 77, 77, 52,,, +5, 2, 4, 1, 1, 1, 1, 1, 384, 77, 77, 77, 52,,, +5, 2, 4, 1, 2, 2, 1, 1, 128, 77, 77, 77, 52,,, +5, 2, 4, 1, 2, 2, 1, 1, 256, 77, 77, 77, 52,,, +5, 2, 4, 1, 2, 2, 1, 1, 384, 77, 77, 77, 52,,, +5, 2, 4, 1, 2, 2, 1, 2, 128, 77, 77, 77, 52,,, +5, 2, 4, 1, 2, 2, 1, 2, 256, 77, 77, 77, 52,,, +5, 2, 4, 1, 2, 2, 1, 2, 384, 77, 77, 77, 52,,, +5, 2, 4, 1, 2, 2, 2, 1, 128, 77, 77, 77, 52,,, +5, 2, 4, 1, 2, 2, 2, 1, 256, 77, 77, 77, 52,,, +5, 2, 4, 1, 2, 2, 2, 1, 384, 77, 77, 77, 52,,, +5, 2, 4, 1, 2, 2, 2, 2, 128, 77, 77, 77, 52,,, +5, 2, 4, 1, 2, 2, 2, 2, 256, 77, 77, 77, 52,,, +5, 2, 4, 1, 2, 2, 2, 2, 384, 77, 77, 77, 52,,, +5, 2, 4, 2, 1, 1, 1, 1, 128, 153, 153, 153, 103,,, +5, 2, 4, 2, 1, 1, 1, 1, 256, 153, 153, 153, 103,,, +5, 2, 4, 2, 1, 1, 1, 1, 384, 153, 153, 153, 103,,, +5, 2, 4, 2, 2, 2, 1, 1, 128, 153, 153, 153, 103,,, +5, 2, 4, 2, 2, 2, 1, 1, 256, 153, 153, 153, 103,,, +5, 2, 4, 2, 2, 2, 1, 1, 384, 153, 153, 153, 103,,, +5, 2, 4, 2, 2, 2, 1, 2, 128, 153, 153, 153, 103,,, +5, 2, 4, 2, 2, 2, 1, 2, 256, 153, 153, 153, 103,,, +5, 2, 4, 2, 2, 2, 1, 2, 384, 153, 153, 153, 103,,, +5, 2, 4, 2, 2, 2, 2, 1, 128, 153, 153, 153, 103,,, +5, 2, 4, 2, 2, 2, 2, 1, 256, 153, 153, 153, 103,,, +5, 2, 4, 2, 2, 2, 2, 1, 384, 153, 153, 153, 103,,, +5, 2, 4, 2, 2, 2, 2, 2, 128, 153, 153, 153, 103,,, +5, 2, 4, 2, 2, 2, 2, 2, 256, 153, 153, 153, 103,,, +5, 2, 4, 2, 2, 2, 2, 2, 384, 153, 153, 153, 103,,, +5, 2, 5, 1, 1, 1, 1, 1, 128, 77, 77, 77, 52,,, +5, 2, 5, 1, 1, 1, 1, 1, 256, 77, 77, 77, 52,,, +5, 2, 5, 1, 1, 1, 1, 1, 384, 77, 77, 77, 52,,, +5, 2, 5, 1, 2, 2, 1, 1, 128, 77, 77, 77, 52,,, +5, 2, 5, 1, 2, 2, 1, 1, 256, 77, 77, 77, 52,,, +5, 2, 5, 1, 2, 2, 1, 1, 384, 77, 77, 77, 52,,, +5, 2, 5, 1, 2, 2, 1, 2, 128, 77, 77, 77, 52,,, +5, 2, 5, 1, 2, 2, 1, 2, 256, 77, 77, 77, 52,,, +5, 2, 5, 1, 2, 2, 1, 2, 384, 77, 77, 77, 52,,, +5, 2, 5, 1, 2, 2, 2, 1, 128, 77, 77, 77, 52,,, +5, 2, 5, 1, 2, 2, 2, 1, 256, 77, 77, 77, 52,,, +5, 2, 5, 1, 2, 2, 2, 1, 384, 77, 77, 77, 52,,, +5, 2, 5, 1, 2, 2, 2, 2, 128, 77, 77, 77, 52,,, +5, 2, 5, 1, 2, 2, 2, 2, 256, 77, 77, 77, 52,,, +5, 2, 5, 1, 2, 2, 2, 2, 384, 77, 77, 77, 52,,, +5, 2, 5, 2, 1, 1, 1, 1, 128, 153, 153, 153, 103,,, +5, 2, 5, 2, 1, 1, 1, 1, 256, 153, 153, 153, 103,,, +5, 2, 5, 2, 1, 1, 1, 1, 384, 153, 153, 153, 103,,, +5, 2, 5, 2, 2, 2, 1, 1, 128, 153, 153, 153, 103,,, +5, 2, 5, 2, 2, 2, 1, 1, 256, 153, 153, 153, 103,,, +5, 2, 5, 2, 2, 2, 1, 1, 384, 153, 153, 153, 103,,, +5, 2, 5, 2, 2, 2, 1, 2, 128, 153, 153, 153, 103,,, +5, 2, 5, 2, 2, 2, 1, 2, 256, 153, 153, 153, 103,,, +5, 2, 5, 2, 2, 2, 1, 2, 384, 153, 153, 153, 103,,, +5, 2, 5, 2, 2, 2, 2, 1, 128, 153, 153, 153, 103,,, +5, 2, 5, 2, 2, 2, 2, 1, 256, 153, 153, 153, 103,,, +5, 2, 5, 2, 2, 2, 2, 1, 384, 153, 153, 153, 103,,, +5, 2, 5, 2, 2, 2, 2, 2, 128, 153, 153, 153, 103,,, +5, 2, 5, 2, 2, 2, 2, 2, 256, 153, 153, 153, 103,,, +5, 2, 5, 2, 2, 2, 2, 2, 384, 153, 153, 153, 103,,, +5, 3, 1, 1, 1, 1, 1, 1, 128, 78, 78, 78, 53,,, +5, 3, 1, 1, 1, 1, 1, 1, 256, 78, 78, 78, 53,,, +5, 3, 1, 1, 1, 1, 1, 1, 384, 78, 78, 78, 53,,, +5, 3, 1, 1, 2, 2, 1, 1, 128, 78, 78, 72, 50,,, +5, 3, 1, 1, 2, 2, 1, 1, 256, 78, 78, 72, 50,,, +5, 3, 1, 1, 2, 2, 1, 1, 384, 78, 78, 72, 50,,, +5, 3, 1, 1, 2, 2, 1, 2, 128, 78, 78, 72, 50,,, +5, 3, 1, 1, 2, 2, 1, 2, 256, 78, 78, 72, 50,,, +5, 3, 1, 1, 2, 2, 1, 2, 384, 78, 78, 72, 50,,, +5, 3, 1, 1, 2, 2, 2, 1, 128, 78, 78, 72, 50,,, +5, 3, 1, 1, 2, 2, 2, 1, 256, 78, 78, 72, 50,,, +5, 3, 1, 1, 2, 2, 2, 1, 384, 78, 78, 72, 50,,, +5, 3, 1, 1, 2, 2, 2, 2, 128, 78, 78, 72, 50,,, +5, 3, 1, 1, 2, 2, 2, 2, 256, 78, 78, 72, 50,,, +5, 3, 1, 1, 2, 2, 2, 2, 384, 78, 78, 72, 50,,, +5, 3, 1, 1, 3, 3, 1, 1, 128, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 1, 1, 256, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 1, 1, 384, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 1, 2, 128, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 1, 2, 256, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 1, 2, 384, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 1, 3, 128, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 1, 3, 256, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 1, 3, 384, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 2, 1, 128, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 2, 1, 256, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 2, 1, 384, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 2, 2, 128, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 2, 2, 256, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 2, 2, 384, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 2, 3, 128, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 2, 3, 256, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 2, 3, 384, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 3, 1, 128, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 3, 1, 256, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 3, 1, 384, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 3, 2, 128, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 3, 2, 256, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 3, 2, 384, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 3, 3, 128, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 3, 3, 256, 78, 78, 69, 48,,, +5, 3, 1, 1, 3, 3, 3, 3, 384, 78, 78, 69, 48,,, +5, 3, 1, 2, 1, 1, 1, 1, 128, 154, 154, 154, 104,,, +5, 3, 1, 2, 1, 1, 1, 1, 256, 154, 154, 154, 104,,, +5, 3, 1, 2, 1, 1, 1, 1, 384, 154, 154, 154, 104,,, +5, 3, 1, 2, 2, 2, 1, 1, 128, 154, 154, 142, 98,,, +5, 3, 1, 2, 2, 2, 1, 1, 256, 154, 154, 142, 98,,, +5, 3, 1, 2, 2, 2, 1, 1, 384, 154, 154, 142, 98,,, +5, 3, 1, 2, 2, 2, 1, 2, 128, 154, 154, 142, 98,,, +5, 3, 1, 2, 2, 2, 1, 2, 256, 154, 154, 142, 98,,, +5, 3, 1, 2, 2, 2, 1, 2, 384, 154, 154, 142, 98,,, +5, 3, 1, 2, 2, 2, 2, 1, 128, 154, 154, 142, 98,,, +5, 3, 1, 2, 2, 2, 2, 1, 256, 154, 154, 142, 98,,, +5, 3, 1, 2, 2, 2, 2, 1, 384, 154, 154, 142, 98,,, +5, 3, 1, 2, 2, 2, 2, 2, 128, 154, 154, 142, 98,,, +5, 3, 1, 2, 2, 2, 2, 2, 256, 154, 154, 142, 98,,, +5, 3, 1, 2, 2, 2, 2, 2, 384, 154, 154, 142, 98,,, +5, 3, 1, 2, 3, 3, 1, 1, 128, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 1, 1, 256, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 1, 1, 384, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 1, 2, 128, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 1, 2, 256, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 1, 2, 384, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 1, 3, 128, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 1, 3, 256, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 1, 3, 384, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 2, 1, 128, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 2, 1, 256, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 2, 1, 384, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 2, 2, 128, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 2, 2, 256, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 2, 2, 384, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 2, 3, 128, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 2, 3, 256, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 2, 3, 384, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 3, 1, 128, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 3, 1, 256, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 3, 1, 384, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 3, 2, 128, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 3, 2, 256, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 3, 2, 384, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 3, 3, 128, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 3, 3, 256, 154, 154, 136, 94,,, +5, 3, 1, 2, 3, 3, 3, 3, 384, 154, 154, 136, 94,,, +5, 3, 1, 3, 1, 1, 1, 1, 128, 230, 230, 230, 155,,, +5, 3, 1, 3, 1, 1, 1, 1, 256, 230, 230, 230, 155,,, +5, 3, 1, 3, 1, 1, 1, 1, 384, 230, 230, 230, 155,,, +5, 3, 1, 3, 2, 2, 1, 1, 128, 230, 230, 212, 146,,, +5, 3, 1, 3, 2, 2, 1, 1, 256, 230, 230, 212, 146,,, +5, 3, 1, 3, 2, 2, 1, 1, 384, 230, 230, 212, 146,,, +5, 3, 1, 3, 2, 2, 1, 2, 128, 230, 230, 212, 146,,, +5, 3, 1, 3, 2, 2, 1, 2, 256, 230, 230, 212, 146,,, +5, 3, 1, 3, 2, 2, 1, 2, 384, 230, 230, 212, 146,,, +5, 3, 1, 3, 2, 2, 2, 1, 128, 230, 230, 212, 146,,, +5, 3, 1, 3, 2, 2, 2, 1, 256, 230, 230, 212, 146,,, +5, 3, 1, 3, 2, 2, 2, 1, 384, 230, 230, 212, 146,,, +5, 3, 1, 3, 2, 2, 2, 2, 128, 230, 230, 212, 146,,, +5, 3, 1, 3, 2, 2, 2, 2, 256, 230, 230, 212, 146,,, +5, 3, 1, 3, 2, 2, 2, 2, 384, 230, 230, 212, 146,,, +5, 3, 1, 3, 3, 3, 1, 1, 128, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 1, 1, 256, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 1, 1, 384, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 1, 2, 128, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 1, 2, 256, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 1, 2, 384, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 1, 3, 128, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 1, 3, 256, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 1, 3, 384, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 2, 1, 128, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 2, 1, 256, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 2, 1, 384, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 2, 2, 128, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 2, 2, 256, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 2, 2, 384, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 2, 3, 128, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 2, 3, 256, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 2, 3, 384, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 3, 1, 128, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 3, 1, 256, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 3, 1, 384, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 3, 2, 128, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 3, 2, 256, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 3, 2, 384, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 3, 3, 128, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 3, 3, 256, 230, 230, 203, 140,,, +5, 3, 1, 3, 3, 3, 3, 3, 384, 230, 230, 203, 140,,, +5, 3, 2, 1, 1, 1, 1, 1, 128, 78, 78, 78, 53,,, +5, 3, 2, 1, 1, 1, 1, 1, 256, 78, 78, 78, 53,,, +5, 3, 2, 1, 1, 1, 1, 1, 384, 78, 78, 78, 53,,, +5, 3, 2, 1, 2, 2, 1, 1, 128, 78, 78, 72, 50,,, +5, 3, 2, 1, 2, 2, 1, 1, 256, 78, 78, 72, 50,,, +5, 3, 2, 1, 2, 2, 1, 1, 384, 78, 78, 72, 50,,, +5, 3, 2, 1, 2, 2, 1, 2, 128, 78, 78, 72, 50,,, +5, 3, 2, 1, 2, 2, 1, 2, 256, 78, 78, 72, 50,,, +5, 3, 2, 1, 2, 2, 1, 2, 384, 78, 78, 72, 50,,, +5, 3, 2, 1, 2, 2, 2, 1, 128, 78, 78, 72, 50,,, +5, 3, 2, 1, 2, 2, 2, 1, 256, 78, 78, 72, 50,,, +5, 3, 2, 1, 2, 2, 2, 1, 384, 78, 78, 72, 50,,, +5, 3, 2, 1, 2, 2, 2, 2, 128, 78, 78, 72, 50,,, +5, 3, 2, 1, 2, 2, 2, 2, 256, 78, 78, 72, 50,,, +5, 3, 2, 1, 2, 2, 2, 2, 384, 78, 78, 72, 50,,, +5, 3, 2, 1, 3, 3, 1, 1, 128, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 1, 1, 256, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 1, 1, 384, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 1, 2, 128, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 1, 2, 256, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 1, 2, 384, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 1, 3, 128, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 1, 3, 256, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 1, 3, 384, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 2, 1, 128, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 2, 1, 256, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 2, 1, 384, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 2, 2, 128, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 2, 2, 256, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 2, 2, 384, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 2, 3, 128, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 2, 3, 256, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 2, 3, 384, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 3, 1, 128, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 3, 1, 256, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 3, 1, 384, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 3, 2, 128, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 3, 2, 256, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 3, 2, 384, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 3, 3, 128, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 3, 3, 256, 78, 78, 69, 48,,, +5, 3, 2, 1, 3, 3, 3, 3, 384, 78, 78, 69, 48,,, +5, 3, 2, 2, 1, 1, 1, 1, 128, 154, 154, 154, 104,,, +5, 3, 2, 2, 1, 1, 1, 1, 256, 154, 154, 154, 104,,, +5, 3, 2, 2, 1, 1, 1, 1, 384, 154, 154, 154, 104,,, +5, 3, 2, 2, 2, 2, 1, 1, 128, 154, 154, 142, 98,,, +5, 3, 2, 2, 2, 2, 1, 1, 256, 154, 154, 142, 98,,, +5, 3, 2, 2, 2, 2, 1, 1, 384, 154, 154, 142, 98,,, +5, 3, 2, 2, 2, 2, 1, 2, 128, 154, 154, 142, 98,,, +5, 3, 2, 2, 2, 2, 1, 2, 256, 154, 154, 142, 98,,, +5, 3, 2, 2, 2, 2, 1, 2, 384, 154, 154, 142, 98,,, +5, 3, 2, 2, 2, 2, 2, 1, 128, 154, 154, 142, 98,,, +5, 3, 2, 2, 2, 2, 2, 1, 256, 154, 154, 142, 98,,, +5, 3, 2, 2, 2, 2, 2, 1, 384, 154, 154, 142, 98,,, +5, 3, 2, 2, 2, 2, 2, 2, 128, 154, 154, 142, 98,,, +5, 3, 2, 2, 2, 2, 2, 2, 256, 154, 154, 142, 98,,, +5, 3, 2, 2, 2, 2, 2, 2, 384, 154, 154, 142, 98,,, +5, 3, 2, 2, 3, 3, 1, 1, 128, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 1, 1, 256, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 1, 1, 384, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 1, 2, 128, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 1, 2, 256, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 1, 2, 384, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 1, 3, 128, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 1, 3, 256, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 1, 3, 384, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 2, 1, 128, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 2, 1, 256, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 2, 1, 384, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 2, 2, 128, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 2, 2, 256, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 2, 2, 384, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 2, 3, 128, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 2, 3, 256, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 2, 3, 384, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 3, 1, 128, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 3, 1, 256, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 3, 1, 384, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 3, 2, 128, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 3, 2, 256, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 3, 2, 384, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 3, 3, 128, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 3, 3, 256, 154, 154, 136, 94,,, +5, 3, 2, 2, 3, 3, 3, 3, 384, 154, 154, 136, 94,,, +5, 3, 2, 3, 1, 1, 1, 1, 128, 230, 230, 230, 155,,, +5, 3, 2, 3, 1, 1, 1, 1, 256, 230, 230, 230, 155,,, +5, 3, 2, 3, 1, 1, 1, 1, 384, 230, 230, 230, 155,,, +5, 3, 2, 3, 2, 2, 1, 1, 128, 230, 230, 212, 146,,, +5, 3, 2, 3, 2, 2, 1, 1, 256, 230, 230, 212, 146,,, +5, 3, 2, 3, 2, 2, 1, 1, 384, 230, 230, 212, 146,,, +5, 3, 2, 3, 2, 2, 1, 2, 128, 230, 230, 212, 146,,, +5, 3, 2, 3, 2, 2, 1, 2, 256, 230, 230, 212, 146,,, +5, 3, 2, 3, 2, 2, 1, 2, 384, 230, 230, 212, 146,,, +5, 3, 2, 3, 2, 2, 2, 1, 128, 230, 230, 212, 146,,, +5, 3, 2, 3, 2, 2, 2, 1, 256, 230, 230, 212, 146,,, +5, 3, 2, 3, 2, 2, 2, 1, 384, 230, 230, 212, 146,,, +5, 3, 2, 3, 2, 2, 2, 2, 128, 230, 230, 212, 146,,, +5, 3, 2, 3, 2, 2, 2, 2, 256, 230, 230, 212, 146,,, +5, 3, 2, 3, 2, 2, 2, 2, 384, 230, 230, 212, 146,,, +5, 3, 2, 3, 3, 3, 1, 1, 128, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 1, 1, 256, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 1, 1, 384, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 1, 2, 128, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 1, 2, 256, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 1, 2, 384, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 1, 3, 128, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 1, 3, 256, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 1, 3, 384, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 2, 1, 128, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 2, 1, 256, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 2, 1, 384, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 2, 2, 128, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 2, 2, 256, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 2, 2, 384, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 2, 3, 128, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 2, 3, 256, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 2, 3, 384, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 3, 1, 128, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 3, 1, 256, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 3, 1, 384, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 3, 2, 128, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 3, 2, 256, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 3, 2, 384, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 3, 3, 128, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 3, 3, 256, 230, 230, 203, 140,,, +5, 3, 2, 3, 3, 3, 3, 3, 384, 230, 230, 203, 140,,, +5, 3, 3, 1, 1, 1, 1, 1, 128, 78, 78, 78, 53,,, +5, 3, 3, 1, 1, 1, 1, 1, 256, 78, 78, 78, 53,,, +5, 3, 3, 1, 1, 1, 1, 1, 384, 78, 78, 78, 53,,, +5, 3, 3, 1, 2, 2, 1, 1, 128, 78, 78, 72, 50,,, +5, 3, 3, 1, 2, 2, 1, 1, 256, 78, 78, 72, 50,,, +5, 3, 3, 1, 2, 2, 1, 1, 384, 78, 78, 72, 50,,, +5, 3, 3, 1, 2, 2, 1, 2, 128, 78, 78, 72, 50,,, +5, 3, 3, 1, 2, 2, 1, 2, 256, 78, 78, 72, 50,,, +5, 3, 3, 1, 2, 2, 1, 2, 384, 78, 78, 72, 50,,, +5, 3, 3, 1, 2, 2, 2, 1, 128, 78, 78, 72, 50,,, +5, 3, 3, 1, 2, 2, 2, 1, 256, 78, 78, 72, 50,,, +5, 3, 3, 1, 2, 2, 2, 1, 384, 78, 78, 72, 50,,, +5, 3, 3, 1, 2, 2, 2, 2, 128, 78, 78, 72, 50,,, +5, 3, 3, 1, 2, 2, 2, 2, 256, 78, 78, 72, 50,,, +5, 3, 3, 1, 2, 2, 2, 2, 384, 78, 78, 72, 50,,, +5, 3, 3, 1, 3, 3, 1, 1, 128, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 1, 1, 256, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 1, 1, 384, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 1, 2, 128, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 1, 2, 256, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 1, 2, 384, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 1, 3, 128, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 1, 3, 256, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 1, 3, 384, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 2, 1, 128, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 2, 1, 256, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 2, 1, 384, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 2, 2, 128, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 2, 2, 256, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 2, 2, 384, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 2, 3, 128, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 2, 3, 256, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 2, 3, 384, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 3, 1, 128, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 3, 1, 256, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 3, 1, 384, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 3, 2, 128, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 3, 2, 256, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 3, 2, 384, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 3, 3, 128, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 3, 3, 256, 78, 78, 69, 48,,, +5, 3, 3, 1, 3, 3, 3, 3, 384, 78, 78, 69, 48,,, +5, 3, 3, 2, 1, 1, 1, 1, 128, 154, 154, 154, 104,,, +5, 3, 3, 2, 1, 1, 1, 1, 256, 154, 154, 154, 104,,, +5, 3, 3, 2, 1, 1, 1, 1, 384, 154, 154, 154, 104,,, +5, 3, 3, 2, 2, 2, 1, 1, 128, 154, 154, 142, 98,,, +5, 3, 3, 2, 2, 2, 1, 1, 256, 154, 154, 142, 98,,, +5, 3, 3, 2, 2, 2, 1, 1, 384, 154, 154, 142, 98,,, +5, 3, 3, 2, 2, 2, 1, 2, 128, 154, 154, 142, 98,,, +5, 3, 3, 2, 2, 2, 1, 2, 256, 154, 154, 142, 98,,, +5, 3, 3, 2, 2, 2, 1, 2, 384, 154, 154, 142, 98,,, +5, 3, 3, 2, 2, 2, 2, 1, 128, 154, 154, 142, 98,,, +5, 3, 3, 2, 2, 2, 2, 1, 256, 154, 154, 142, 98,,, +5, 3, 3, 2, 2, 2, 2, 1, 384, 154, 154, 142, 98,,, +5, 3, 3, 2, 2, 2, 2, 2, 128, 154, 154, 142, 98,,, +5, 3, 3, 2, 2, 2, 2, 2, 256, 154, 154, 142, 98,,, +5, 3, 3, 2, 2, 2, 2, 2, 384, 154, 154, 142, 98,,, +5, 3, 3, 2, 3, 3, 1, 1, 128, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 1, 1, 256, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 1, 1, 384, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 1, 2, 128, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 1, 2, 256, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 1, 2, 384, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 1, 3, 128, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 1, 3, 256, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 1, 3, 384, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 2, 1, 128, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 2, 1, 256, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 2, 1, 384, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 2, 2, 128, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 2, 2, 256, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 2, 2, 384, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 2, 3, 128, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 2, 3, 256, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 2, 3, 384, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 3, 1, 128, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 3, 1, 256, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 3, 1, 384, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 3, 2, 128, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 3, 2, 256, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 3, 2, 384, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 3, 3, 128, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 3, 3, 256, 154, 154, 136, 94,,, +5, 3, 3, 2, 3, 3, 3, 3, 384, 154, 154, 136, 94,,, +5, 3, 3, 3, 1, 1, 1, 1, 128, 230, 230, 230, 155,,, +5, 3, 3, 3, 1, 1, 1, 1, 256, 230, 230, 230, 155,,, +5, 3, 3, 3, 1, 1, 1, 1, 384, 230, 230, 230, 155,,, +5, 3, 3, 3, 2, 2, 1, 1, 128, 230, 230, 212, 146,,, +5, 3, 3, 3, 2, 2, 1, 1, 256, 230, 230, 212, 146,,, +5, 3, 3, 3, 2, 2, 1, 1, 384, 230, 230, 212, 146,,, +5, 3, 3, 3, 2, 2, 1, 2, 128, 230, 230, 212, 146,,, +5, 3, 3, 3, 2, 2, 1, 2, 256, 230, 230, 212, 146,,, +5, 3, 3, 3, 2, 2, 1, 2, 384, 230, 230, 212, 146,,, +5, 3, 3, 3, 2, 2, 2, 1, 128, 230, 230, 212, 146,,, +5, 3, 3, 3, 2, 2, 2, 1, 256, 230, 230, 212, 146,,, +5, 3, 3, 3, 2, 2, 2, 1, 384, 230, 230, 212, 146,,, +5, 3, 3, 3, 2, 2, 2, 2, 128, 230, 230, 212, 146,,, +5, 3, 3, 3, 2, 2, 2, 2, 256, 230, 230, 212, 146,,, +5, 3, 3, 3, 2, 2, 2, 2, 384, 230, 230, 212, 146,,, +5, 3, 3, 3, 3, 3, 1, 1, 128, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 1, 1, 256, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 1, 1, 384, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 1, 2, 128, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 1, 2, 256, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 1, 2, 384, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 1, 3, 128, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 1, 3, 256, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 1, 3, 384, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 2, 1, 128, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 2, 1, 256, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 2, 1, 384, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 2, 2, 128, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 2, 2, 256, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 2, 2, 384, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 2, 3, 128, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 2, 3, 256, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 2, 3, 384, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 3, 1, 128, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 3, 1, 256, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 3, 1, 384, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 3, 2, 128, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 3, 2, 256, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 3, 2, 384, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 3, 3, 128, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 3, 3, 256, 230, 230, 203, 140,,, +5, 3, 3, 3, 3, 3, 3, 3, 384, 230, 230, 203, 140,,, +5, 3, 4, 1, 1, 1, 1, 1, 128, 78, 78, 78, 53,,, +5, 3, 4, 1, 1, 1, 1, 1, 256, 78, 78, 78, 53,,, +5, 3, 4, 1, 1, 1, 1, 1, 384, 78, 78, 78, 53,,, +5, 3, 4, 1, 2, 2, 1, 1, 128, 78, 78, 72, 50,,, +5, 3, 4, 1, 2, 2, 1, 1, 256, 78, 78, 72, 50,,, +5, 3, 4, 1, 2, 2, 1, 1, 384, 78, 78, 72, 50,,, +5, 3, 4, 1, 2, 2, 1, 2, 128, 78, 78, 72, 50,,, +5, 3, 4, 1, 2, 2, 1, 2, 256, 78, 78, 72, 50,,, +5, 3, 4, 1, 2, 2, 1, 2, 384, 78, 78, 72, 50,,, +5, 3, 4, 1, 2, 2, 2, 1, 128, 78, 78, 72, 50,,, +5, 3, 4, 1, 2, 2, 2, 1, 256, 78, 78, 72, 50,,, +5, 3, 4, 1, 2, 2, 2, 1, 384, 78, 78, 72, 50,,, +5, 3, 4, 1, 2, 2, 2, 2, 128, 78, 78, 72, 50,,, +5, 3, 4, 1, 2, 2, 2, 2, 256, 78, 78, 72, 50,,, +5, 3, 4, 1, 2, 2, 2, 2, 384, 78, 78, 72, 50,,, +5, 3, 4, 1, 3, 3, 1, 1, 128, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 1, 1, 256, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 1, 1, 384, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 1, 2, 128, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 1, 2, 256, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 1, 2, 384, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 1, 3, 128, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 1, 3, 256, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 1, 3, 384, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 2, 1, 128, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 2, 1, 256, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 2, 1, 384, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 2, 2, 128, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 2, 2, 256, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 2, 2, 384, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 2, 3, 128, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 2, 3, 256, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 2, 3, 384, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 3, 1, 128, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 3, 1, 256, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 3, 1, 384, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 3, 2, 128, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 3, 2, 256, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 3, 2, 384, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 3, 3, 128, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 3, 3, 256, 78, 78, 69, 48,,, +5, 3, 4, 1, 3, 3, 3, 3, 384, 78, 78, 69, 48,,, +5, 3, 4, 2, 1, 1, 1, 1, 128, 154, 154, 154, 104,,, +5, 3, 4, 2, 1, 1, 1, 1, 256, 154, 154, 154, 104,,, +5, 3, 4, 2, 1, 1, 1, 1, 384, 154, 154, 154, 104,,, +5, 3, 4, 2, 2, 2, 1, 1, 128, 154, 154, 142, 98,,, +5, 3, 4, 2, 2, 2, 1, 1, 256, 154, 154, 142, 98,,, +5, 3, 4, 2, 2, 2, 1, 1, 384, 154, 154, 142, 98,,, +5, 3, 4, 2, 2, 2, 1, 2, 128, 154, 154, 142, 98,,, +5, 3, 4, 2, 2, 2, 1, 2, 256, 154, 154, 142, 98,,, +5, 3, 4, 2, 2, 2, 1, 2, 384, 154, 154, 142, 98,,, +5, 3, 4, 2, 2, 2, 2, 1, 128, 154, 154, 142, 98,,, +5, 3, 4, 2, 2, 2, 2, 1, 256, 154, 154, 142, 98,,, +5, 3, 4, 2, 2, 2, 2, 1, 384, 154, 154, 142, 98,,, +5, 3, 4, 2, 2, 2, 2, 2, 128, 154, 154, 142, 98,,, +5, 3, 4, 2, 2, 2, 2, 2, 256, 154, 154, 142, 98,,, +5, 3, 4, 2, 2, 2, 2, 2, 384, 154, 154, 142, 98,,, +5, 3, 4, 2, 3, 3, 1, 1, 128, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 1, 1, 256, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 1, 1, 384, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 1, 2, 128, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 1, 2, 256, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 1, 2, 384, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 1, 3, 128, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 1, 3, 256, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 1, 3, 384, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 2, 1, 128, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 2, 1, 256, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 2, 1, 384, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 2, 2, 128, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 2, 2, 256, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 2, 2, 384, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 2, 3, 128, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 2, 3, 256, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 2, 3, 384, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 3, 1, 128, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 3, 1, 256, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 3, 1, 384, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 3, 2, 128, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 3, 2, 256, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 3, 2, 384, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 3, 3, 128, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 3, 3, 256, 154, 154, 136, 94,,, +5, 3, 4, 2, 3, 3, 3, 3, 384, 154, 154, 136, 94,,, +5, 3, 4, 3, 1, 1, 1, 1, 128, 230, 230, 230, 155,,, +5, 3, 4, 3, 1, 1, 1, 1, 256, 230, 230, 230, 155,,, +5, 3, 4, 3, 1, 1, 1, 1, 384, 230, 230, 230, 155,,, +5, 3, 4, 3, 2, 2, 1, 1, 128, 230, 230, 212, 146,,, +5, 3, 4, 3, 2, 2, 1, 1, 256, 230, 230, 212, 146,,, +5, 3, 4, 3, 2, 2, 1, 1, 384, 230, 230, 212, 146,,, +5, 3, 4, 3, 2, 2, 1, 2, 128, 230, 230, 212, 146,,, +5, 3, 4, 3, 2, 2, 1, 2, 256, 230, 230, 212, 146,,, +5, 3, 4, 3, 2, 2, 1, 2, 384, 230, 230, 212, 146,,, +5, 3, 4, 3, 2, 2, 2, 1, 128, 230, 230, 212, 146,,, +5, 3, 4, 3, 2, 2, 2, 1, 256, 230, 230, 212, 146,,, +5, 3, 4, 3, 2, 2, 2, 1, 384, 230, 230, 212, 146,,, +5, 3, 4, 3, 2, 2, 2, 2, 128, 230, 230, 212, 146,,, +5, 3, 4, 3, 2, 2, 2, 2, 256, 230, 230, 212, 146,,, +5, 3, 4, 3, 2, 2, 2, 2, 384, 230, 230, 212, 146,,, +5, 3, 4, 3, 3, 3, 1, 1, 128, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 1, 1, 256, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 1, 1, 384, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 1, 2, 128, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 1, 2, 256, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 1, 2, 384, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 1, 3, 128, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 1, 3, 256, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 1, 3, 384, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 2, 1, 128, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 2, 1, 256, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 2, 1, 384, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 2, 2, 128, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 2, 2, 256, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 2, 2, 384, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 2, 3, 128, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 2, 3, 256, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 2, 3, 384, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 3, 1, 128, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 3, 1, 256, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 3, 1, 384, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 3, 2, 128, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 3, 2, 256, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 3, 2, 384, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 3, 3, 128, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 3, 3, 256, 230, 230, 203, 140,,, +5, 3, 4, 3, 3, 3, 3, 3, 384, 230, 230, 203, 140,,, +5, 3, 5, 1, 1, 1, 1, 1, 128, 78, 78, 78, 53,,, +5, 3, 5, 1, 1, 1, 1, 1, 256, 78, 78, 78, 53,,, +5, 3, 5, 1, 1, 1, 1, 1, 384, 78, 78, 78, 53,,, +5, 3, 5, 1, 2, 2, 1, 1, 128, 78, 78, 72, 50,,, +5, 3, 5, 1, 2, 2, 1, 1, 256, 78, 78, 72, 50,,, +5, 3, 5, 1, 2, 2, 1, 1, 384, 78, 78, 72, 50,,, +5, 3, 5, 1, 2, 2, 1, 2, 128, 78, 78, 72, 50,,, +5, 3, 5, 1, 2, 2, 1, 2, 256, 78, 78, 72, 50,,, +5, 3, 5, 1, 2, 2, 1, 2, 384, 78, 78, 72, 50,,, +5, 3, 5, 1, 2, 2, 2, 1, 128, 78, 78, 72, 50,,, +5, 3, 5, 1, 2, 2, 2, 1, 256, 78, 78, 72, 50,,, +5, 3, 5, 1, 2, 2, 2, 1, 384, 78, 78, 72, 50,,, +5, 3, 5, 1, 2, 2, 2, 2, 128, 78, 78, 72, 50,,, +5, 3, 5, 1, 2, 2, 2, 2, 256, 78, 78, 72, 50,,, +5, 3, 5, 1, 2, 2, 2, 2, 384, 78, 78, 72, 50,,, +5, 3, 5, 1, 3, 3, 1, 1, 128, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 1, 1, 256, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 1, 1, 384, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 1, 2, 128, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 1, 2, 256, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 1, 2, 384, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 1, 3, 128, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 1, 3, 256, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 1, 3, 384, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 2, 1, 128, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 2, 1, 256, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 2, 1, 384, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 2, 2, 128, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 2, 2, 256, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 2, 2, 384, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 2, 3, 128, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 2, 3, 256, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 2, 3, 384, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 3, 1, 128, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 3, 1, 256, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 3, 1, 384, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 3, 2, 128, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 3, 2, 256, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 3, 2, 384, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 3, 3, 128, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 3, 3, 256, 78, 78, 69, 48,,, +5, 3, 5, 1, 3, 3, 3, 3, 384, 78, 78, 69, 48,,, +5, 3, 5, 2, 1, 1, 1, 1, 128, 154, 154, 154, 104,,, +5, 3, 5, 2, 1, 1, 1, 1, 256, 154, 154, 154, 104,,, +5, 3, 5, 2, 1, 1, 1, 1, 384, 154, 154, 154, 104,,, +5, 3, 5, 2, 2, 2, 1, 1, 128, 154, 154, 142, 98,,, +5, 3, 5, 2, 2, 2, 1, 1, 256, 154, 154, 142, 98,,, +5, 3, 5, 2, 2, 2, 1, 1, 384, 154, 154, 142, 98,,, +5, 3, 5, 2, 2, 2, 1, 2, 128, 154, 154, 142, 98,,, +5, 3, 5, 2, 2, 2, 1, 2, 256, 154, 154, 142, 98,,, +5, 3, 5, 2, 2, 2, 1, 2, 384, 154, 154, 142, 98,,, +5, 3, 5, 2, 2, 2, 2, 1, 128, 154, 154, 142, 98,,, +5, 3, 5, 2, 2, 2, 2, 1, 256, 154, 154, 142, 98,,, +5, 3, 5, 2, 2, 2, 2, 1, 384, 154, 154, 142, 98,,, +5, 3, 5, 2, 2, 2, 2, 2, 128, 154, 154, 142, 98,,, +5, 3, 5, 2, 2, 2, 2, 2, 256, 154, 154, 142, 98,,, +5, 3, 5, 2, 2, 2, 2, 2, 384, 154, 154, 142, 98,,, +5, 3, 5, 2, 3, 3, 1, 1, 128, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 1, 1, 256, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 1, 1, 384, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 1, 2, 128, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 1, 2, 256, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 1, 2, 384, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 1, 3, 128, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 1, 3, 256, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 1, 3, 384, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 2, 1, 128, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 2, 1, 256, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 2, 1, 384, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 2, 2, 128, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 2, 2, 256, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 2, 2, 384, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 2, 3, 128, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 2, 3, 256, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 2, 3, 384, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 3, 1, 128, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 3, 1, 256, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 3, 1, 384, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 3, 2, 128, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 3, 2, 256, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 3, 2, 384, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 3, 3, 128, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 3, 3, 256, 154, 154, 136, 94,,, +5, 3, 5, 2, 3, 3, 3, 3, 384, 154, 154, 136, 94,,, +5, 3, 5, 3, 1, 1, 1, 1, 128, 230, 230, 230, 155,,, +5, 3, 5, 3, 1, 1, 1, 1, 256, 230, 230, 230, 155,,, +5, 3, 5, 3, 1, 1, 1, 1, 384, 230, 230, 230, 155,,, +5, 3, 5, 3, 2, 2, 1, 1, 128, 230, 230, 212, 146,,, +5, 3, 5, 3, 2, 2, 1, 1, 256, 230, 230, 212, 146,,, +5, 3, 5, 3, 2, 2, 1, 1, 384, 230, 230, 212, 146,,, +5, 3, 5, 3, 2, 2, 1, 2, 128, 230, 230, 212, 146,,, +5, 3, 5, 3, 2, 2, 1, 2, 256, 230, 230, 212, 146,,, +5, 3, 5, 3, 2, 2, 1, 2, 384, 230, 230, 212, 146,,, +5, 3, 5, 3, 2, 2, 2, 1, 128, 230, 230, 212, 146,,, +5, 3, 5, 3, 2, 2, 2, 1, 256, 230, 230, 212, 146,,, +5, 3, 5, 3, 2, 2, 2, 1, 384, 230, 230, 212, 146,,, +5, 3, 5, 3, 2, 2, 2, 2, 128, 230, 230, 212, 146,,, +5, 3, 5, 3, 2, 2, 2, 2, 256, 230, 230, 212, 146,,, +5, 3, 5, 3, 2, 2, 2, 2, 384, 230, 230, 212, 146,,, +5, 3, 5, 3, 3, 3, 1, 1, 128, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 1, 1, 256, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 1, 1, 384, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 1, 2, 128, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 1, 2, 256, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 1, 2, 384, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 1, 3, 128, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 1, 3, 256, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 1, 3, 384, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 2, 1, 128, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 2, 1, 256, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 2, 1, 384, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 2, 2, 128, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 2, 2, 256, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 2, 2, 384, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 2, 3, 128, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 2, 3, 256, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 2, 3, 384, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 3, 1, 128, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 3, 1, 256, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 3, 1, 384, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 3, 2, 128, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 3, 2, 256, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 3, 2, 384, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 3, 3, 128, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 3, 3, 256, 230, 230, 203, 140,,, +5, 3, 5, 3, 3, 3, 3, 3, 384, 230, 230, 203, 140,,, +5, 4, 1, 1, 1, 1, 1, 1, 128, 79, 79, 54, 28,,, +5, 4, 1, 1, 1, 1, 1, 1, 256, 79, 79, 54, 28,,, +5, 4, 1, 1, 1, 1, 1, 1, 384, 79, 79, 54, 28,,, +5, 4, 1, 1, 2, 2, 1, 1, 128, 79, 79, 54, 28,,, +5, 4, 1, 1, 2, 2, 1, 1, 256, 79, 79, 54, 28,,, +5, 4, 1, 1, 2, 2, 1, 1, 384, 79, 79, 54, 28,,, +5, 4, 1, 1, 2, 2, 1, 2, 128, 79, 79, 54, 28,,, +5, 4, 1, 1, 2, 2, 1, 2, 256, 79, 79, 54, 28,,, +5, 4, 1, 1, 2, 2, 1, 2, 384, 79, 79, 54, 28,,, +5, 4, 1, 1, 2, 2, 2, 1, 128, 79, 79, 54, 28,,, +5, 4, 1, 1, 2, 2, 2, 1, 256, 79, 79, 54, 28,,, +5, 4, 1, 1, 2, 2, 2, 1, 384, 79, 79, 54, 28,,, +5, 4, 1, 1, 2, 2, 2, 2, 128, 79, 79, 54, 28,,, +5, 4, 1, 1, 2, 2, 2, 2, 256, 79, 79, 54, 28,,, +5, 4, 1, 1, 2, 2, 2, 2, 384, 79, 79, 54, 28,,, +5, 4, 1, 1, 3, 3, 1, 1, 128, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 1, 1, 256, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 1, 1, 384, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 1, 2, 128, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 1, 2, 256, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 1, 2, 384, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 1, 3, 128, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 1, 3, 256, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 1, 3, 384, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 2, 1, 128, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 2, 1, 256, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 2, 1, 384, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 2, 2, 128, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 2, 2, 256, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 2, 2, 384, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 2, 3, 128, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 2, 3, 256, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 2, 3, 384, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 3, 1, 128, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 3, 1, 256, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 3, 1, 384, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 3, 2, 128, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 3, 2, 256, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 3, 2, 384, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 3, 3, 128, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 3, 3, 256, 79, 77, 54, 28,,, +5, 4, 1, 1, 3, 3, 3, 3, 384, 79, 77, 54, 28,,, +5, 4, 1, 2, 1, 1, 1, 1, 128, 155, 155, 105, 53,,, +5, 4, 1, 2, 1, 1, 1, 1, 256, 155, 155, 105, 53,,, +5, 4, 1, 2, 1, 1, 1, 1, 384, 155, 155, 105, 53,,, +5, 4, 1, 2, 2, 2, 1, 1, 128, 155, 155, 105, 53,,, +5, 4, 1, 2, 2, 2, 1, 1, 256, 155, 155, 105, 53,,, +5, 4, 1, 2, 2, 2, 1, 1, 384, 155, 155, 105, 53,,, +5, 4, 1, 2, 2, 2, 1, 2, 128, 155, 155, 105, 53,,, +5, 4, 1, 2, 2, 2, 1, 2, 256, 155, 155, 105, 53,,, +5, 4, 1, 2, 2, 2, 1, 2, 384, 155, 155, 105, 53,,, +5, 4, 1, 2, 2, 2, 2, 1, 128, 155, 155, 105, 53,,, +5, 4, 1, 2, 2, 2, 2, 1, 256, 155, 155, 105, 53,,, +5, 4, 1, 2, 2, 2, 2, 1, 384, 155, 155, 105, 53,,, +5, 4, 1, 2, 2, 2, 2, 2, 128, 155, 155, 105, 53,,, +5, 4, 1, 2, 2, 2, 2, 2, 256, 155, 155, 105, 53,,, +5, 4, 1, 2, 2, 2, 2, 2, 384, 155, 155, 105, 53,,, +5, 4, 1, 2, 3, 3, 1, 1, 128, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 1, 1, 256, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 1, 1, 384, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 1, 2, 128, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 1, 2, 256, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 1, 2, 384, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 1, 3, 128, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 1, 3, 256, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 1, 3, 384, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 2, 1, 128, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 2, 1, 256, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 2, 1, 384, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 2, 2, 128, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 2, 2, 256, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 2, 2, 384, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 2, 3, 128, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 2, 3, 256, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 2, 3, 384, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 3, 1, 128, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 3, 1, 256, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 3, 1, 384, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 3, 2, 128, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 3, 2, 256, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 3, 2, 384, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 3, 3, 128, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 3, 3, 256, 155, 151, 105, 53,,, +5, 4, 1, 2, 3, 3, 3, 3, 384, 155, 151, 105, 53,,, +5, 4, 1, 3, 1, 1, 1, 1, 128, 231, 231, 156, 78,,, +5, 4, 1, 3, 1, 1, 1, 1, 256, 231, 231, 156, 78,,, +5, 4, 1, 3, 1, 1, 1, 1, 384, 231, 231, 156, 78,,, +5, 4, 1, 3, 2, 2, 1, 1, 128, 231, 231, 156, 78,,, +5, 4, 1, 3, 2, 2, 1, 1, 256, 231, 231, 156, 78,,, +5, 4, 1, 3, 2, 2, 1, 1, 384, 231, 231, 156, 78,,, +5, 4, 1, 3, 2, 2, 1, 2, 128, 231, 231, 156, 78,,, +5, 4, 1, 3, 2, 2, 1, 2, 256, 231, 231, 156, 78,,, +5, 4, 1, 3, 2, 2, 1, 2, 384, 231, 231, 156, 78,,, +5, 4, 1, 3, 2, 2, 2, 1, 128, 231, 231, 156, 78,,, +5, 4, 1, 3, 2, 2, 2, 1, 256, 231, 231, 156, 78,,, +5, 4, 1, 3, 2, 2, 2, 1, 384, 231, 231, 156, 78,,, +5, 4, 1, 3, 2, 2, 2, 2, 128, 231, 231, 156, 78,,, +5, 4, 1, 3, 2, 2, 2, 2, 256, 231, 231, 156, 78,,, +5, 4, 1, 3, 2, 2, 2, 2, 384, 231, 231, 156, 78,,, +5, 4, 1, 3, 3, 3, 1, 1, 128, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 1, 1, 256, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 1, 1, 384, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 1, 2, 128, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 1, 2, 256, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 1, 2, 384, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 1, 3, 128, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 1, 3, 256, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 1, 3, 384, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 2, 1, 128, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 2, 1, 256, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 2, 1, 384, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 2, 2, 128, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 2, 2, 256, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 2, 2, 384, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 2, 3, 128, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 2, 3, 256, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 2, 3, 384, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 3, 1, 128, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 3, 1, 256, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 3, 1, 384, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 3, 2, 128, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 3, 2, 256, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 3, 2, 384, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 3, 3, 128, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 3, 3, 256, 231, 225, 156, 78,,, +5, 4, 1, 3, 3, 3, 3, 3, 384, 231, 225, 156, 78,,, +5, 4, 1, 4, 1, 1, 1, 1, 128, 240, 240, 207, 103,,, +5, 4, 1, 4, 1, 1, 1, 1, 256, 240, 240, 207, 103,,, +5, 4, 1, 4, 1, 1, 1, 1, 384, 240, 240, 207, 103,,, +5, 4, 1, 4, 2, 2, 1, 1, 128, 240, 240, 207, 103,,, +5, 4, 1, 4, 2, 2, 1, 1, 256, 240, 240, 207, 103,,, +5, 4, 1, 4, 2, 2, 1, 1, 384, 240, 240, 207, 103,,, +5, 4, 1, 4, 2, 2, 1, 2, 128, 240, 240, 207, 103,,, +5, 4, 1, 4, 2, 2, 1, 2, 256, 240, 240, 207, 103,,, +5, 4, 1, 4, 2, 2, 1, 2, 384, 240, 240, 207, 103,,, +5, 4, 1, 4, 2, 2, 2, 1, 128, 240, 240, 207, 103,,, +5, 4, 1, 4, 2, 2, 2, 1, 256, 240, 240, 207, 103,,, +5, 4, 1, 4, 2, 2, 2, 1, 384, 240, 240, 207, 103,,, +5, 4, 1, 4, 2, 2, 2, 2, 128, 240, 240, 207, 103,,, +5, 4, 1, 4, 2, 2, 2, 2, 256, 240, 240, 207, 103,,, +5, 4, 1, 4, 2, 2, 2, 2, 384, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 1, 1, 128, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 1, 1, 256, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 1, 1, 384, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 1, 2, 128, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 1, 2, 256, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 1, 2, 384, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 1, 3, 128, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 1, 3, 256, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 1, 3, 384, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 2, 1, 128, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 2, 1, 256, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 2, 1, 384, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 2, 2, 128, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 2, 2, 256, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 2, 2, 384, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 2, 3, 128, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 2, 3, 256, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 2, 3, 384, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 3, 1, 128, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 3, 1, 256, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 3, 1, 384, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 3, 2, 128, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 3, 2, 256, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 3, 2, 384, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 3, 3, 128, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 3, 3, 256, 240, 240, 207, 103,,, +5, 4, 1, 4, 3, 3, 3, 3, 384, 240, 240, 207, 103,,, +5, 4, 2, 1, 1, 1, 1, 1, 128, 79, 79, 54, 28,,, +5, 4, 2, 1, 1, 1, 1, 1, 256, 79, 79, 54, 28,,, +5, 4, 2, 1, 1, 1, 1, 1, 384, 79, 79, 54, 28,,, +5, 4, 2, 1, 2, 2, 1, 1, 128, 79, 79, 54, 28,,, +5, 4, 2, 1, 2, 2, 1, 1, 256, 79, 79, 54, 28,,, +5, 4, 2, 1, 2, 2, 1, 1, 384, 79, 79, 54, 28,,, +5, 4, 2, 1, 2, 2, 1, 2, 128, 79, 79, 54, 28,,, +5, 4, 2, 1, 2, 2, 1, 2, 256, 79, 79, 54, 28,,, +5, 4, 2, 1, 2, 2, 1, 2, 384, 79, 79, 54, 28,,, +5, 4, 2, 1, 2, 2, 2, 1, 128, 79, 79, 54, 28,,, +5, 4, 2, 1, 2, 2, 2, 1, 256, 79, 79, 54, 28,,, +5, 4, 2, 1, 2, 2, 2, 1, 384, 79, 79, 54, 28,,, +5, 4, 2, 1, 2, 2, 2, 2, 128, 79, 79, 54, 28,,, +5, 4, 2, 1, 2, 2, 2, 2, 256, 79, 79, 54, 28,,, +5, 4, 2, 1, 2, 2, 2, 2, 384, 79, 79, 54, 28,,, +5, 4, 2, 1, 3, 3, 1, 1, 128, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 1, 1, 256, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 1, 1, 384, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 1, 2, 128, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 1, 2, 256, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 1, 2, 384, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 1, 3, 128, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 1, 3, 256, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 1, 3, 384, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 2, 1, 128, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 2, 1, 256, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 2, 1, 384, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 2, 2, 128, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 2, 2, 256, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 2, 2, 384, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 2, 3, 128, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 2, 3, 256, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 2, 3, 384, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 3, 1, 128, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 3, 1, 256, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 3, 1, 384, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 3, 2, 128, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 3, 2, 256, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 3, 2, 384, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 3, 3, 128, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 3, 3, 256, 79, 77, 54, 28,,, +5, 4, 2, 1, 3, 3, 3, 3, 384, 79, 77, 54, 28,,, +5, 4, 2, 2, 1, 1, 1, 1, 128, 155, 155, 105, 53,,, +5, 4, 2, 2, 1, 1, 1, 1, 256, 155, 155, 105, 53,,, +5, 4, 2, 2, 1, 1, 1, 1, 384, 155, 155, 105, 53,,, +5, 4, 2, 2, 2, 2, 1, 1, 128, 155, 155, 105, 53,,, +5, 4, 2, 2, 2, 2, 1, 1, 256, 155, 155, 105, 53,,, +5, 4, 2, 2, 2, 2, 1, 1, 384, 155, 155, 105, 53,,, +5, 4, 2, 2, 2, 2, 1, 2, 128, 155, 155, 105, 53,,, +5, 4, 2, 2, 2, 2, 1, 2, 256, 155, 155, 105, 53,,, +5, 4, 2, 2, 2, 2, 1, 2, 384, 155, 155, 105, 53,,, +5, 4, 2, 2, 2, 2, 2, 1, 128, 155, 155, 105, 53,,, +5, 4, 2, 2, 2, 2, 2, 1, 256, 155, 155, 105, 53,,, +5, 4, 2, 2, 2, 2, 2, 1, 384, 155, 155, 105, 53,,, +5, 4, 2, 2, 2, 2, 2, 2, 128, 155, 155, 105, 53,,, +5, 4, 2, 2, 2, 2, 2, 2, 256, 155, 155, 105, 53,,, +5, 4, 2, 2, 2, 2, 2, 2, 384, 155, 155, 105, 53,,, +5, 4, 2, 2, 3, 3, 1, 1, 128, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 1, 1, 256, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 1, 1, 384, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 1, 2, 128, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 1, 2, 256, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 1, 2, 384, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 1, 3, 128, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 1, 3, 256, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 1, 3, 384, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 2, 1, 128, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 2, 1, 256, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 2, 1, 384, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 2, 2, 128, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 2, 2, 256, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 2, 2, 384, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 2, 3, 128, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 2, 3, 256, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 2, 3, 384, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 3, 1, 128, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 3, 1, 256, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 3, 1, 384, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 3, 2, 128, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 3, 2, 256, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 3, 2, 384, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 3, 3, 128, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 3, 3, 256, 155, 151, 105, 53,,, +5, 4, 2, 2, 3, 3, 3, 3, 384, 155, 151, 105, 53,,, +5, 4, 2, 3, 1, 1, 1, 1, 128, 231, 231, 156, 78,,, +5, 4, 2, 3, 1, 1, 1, 1, 256, 231, 231, 156, 78,,, +5, 4, 2, 3, 1, 1, 1, 1, 384, 231, 231, 156, 78,,, +5, 4, 2, 3, 2, 2, 1, 1, 128, 231, 231, 156, 78,,, +5, 4, 2, 3, 2, 2, 1, 1, 256, 231, 231, 156, 78,,, +5, 4, 2, 3, 2, 2, 1, 1, 384, 231, 231, 156, 78,,, +5, 4, 2, 3, 2, 2, 1, 2, 128, 231, 231, 156, 78,,, +5, 4, 2, 3, 2, 2, 1, 2, 256, 231, 231, 156, 78,,, +5, 4, 2, 3, 2, 2, 1, 2, 384, 231, 231, 156, 78,,, +5, 4, 2, 3, 2, 2, 2, 1, 128, 231, 231, 156, 78,,, +5, 4, 2, 3, 2, 2, 2, 1, 256, 231, 231, 156, 78,,, +5, 4, 2, 3, 2, 2, 2, 1, 384, 231, 231, 156, 78,,, +5, 4, 2, 3, 2, 2, 2, 2, 128, 231, 231, 156, 78,,, +5, 4, 2, 3, 2, 2, 2, 2, 256, 231, 231, 156, 78,,, +5, 4, 2, 3, 2, 2, 2, 2, 384, 231, 231, 156, 78,,, +5, 4, 2, 3, 3, 3, 1, 1, 128, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 1, 1, 256, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 1, 1, 384, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 1, 2, 128, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 1, 2, 256, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 1, 2, 384, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 1, 3, 128, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 1, 3, 256, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 1, 3, 384, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 2, 1, 128, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 2, 1, 256, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 2, 1, 384, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 2, 2, 128, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 2, 2, 256, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 2, 2, 384, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 2, 3, 128, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 2, 3, 256, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 2, 3, 384, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 3, 1, 128, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 3, 1, 256, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 3, 1, 384, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 3, 2, 128, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 3, 2, 256, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 3, 2, 384, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 3, 3, 128, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 3, 3, 256, 231, 225, 156, 78,,, +5, 4, 2, 3, 3, 3, 3, 3, 384, 231, 225, 156, 78,,, +5, 4, 2, 4, 1, 1, 1, 1, 128, 240, 240, 207, 103,,, +5, 4, 2, 4, 1, 1, 1, 1, 256, 240, 240, 207, 103,,, +5, 4, 2, 4, 1, 1, 1, 1, 384, 240, 240, 207, 103,,, +5, 4, 2, 4, 2, 2, 1, 1, 128, 240, 240, 207, 103,,, +5, 4, 2, 4, 2, 2, 1, 1, 256, 240, 240, 207, 103,,, +5, 4, 2, 4, 2, 2, 1, 1, 384, 240, 240, 207, 103,,, +5, 4, 2, 4, 2, 2, 1, 2, 128, 240, 240, 207, 103,,, +5, 4, 2, 4, 2, 2, 1, 2, 256, 240, 240, 207, 103,,, +5, 4, 2, 4, 2, 2, 1, 2, 384, 240, 240, 207, 103,,, +5, 4, 2, 4, 2, 2, 2, 1, 128, 240, 240, 207, 103,,, +5, 4, 2, 4, 2, 2, 2, 1, 256, 240, 240, 207, 103,,, +5, 4, 2, 4, 2, 2, 2, 1, 384, 240, 240, 207, 103,,, +5, 4, 2, 4, 2, 2, 2, 2, 128, 240, 240, 207, 103,,, +5, 4, 2, 4, 2, 2, 2, 2, 256, 240, 240, 207, 103,,, +5, 4, 2, 4, 2, 2, 2, 2, 384, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 1, 1, 128, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 1, 1, 256, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 1, 1, 384, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 1, 2, 128, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 1, 2, 256, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 1, 2, 384, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 1, 3, 128, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 1, 3, 256, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 1, 3, 384, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 2, 1, 128, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 2, 1, 256, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 2, 1, 384, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 2, 2, 128, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 2, 2, 256, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 2, 2, 384, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 2, 3, 128, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 2, 3, 256, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 2, 3, 384, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 3, 1, 128, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 3, 1, 256, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 3, 1, 384, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 3, 2, 128, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 3, 2, 256, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 3, 2, 384, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 3, 3, 128, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 3, 3, 256, 240, 240, 207, 103,,, +5, 4, 2, 4, 3, 3, 3, 3, 384, 240, 240, 207, 103,,, +5, 4, 3, 1, 1, 1, 1, 1, 128, 79, 79, 54, 28,,, +5, 4, 3, 1, 1, 1, 1, 1, 256, 79, 79, 54, 28,,, +5, 4, 3, 1, 1, 1, 1, 1, 384, 79, 79, 54, 28,,, +5, 4, 3, 1, 2, 2, 1, 1, 128, 79, 79, 54, 28,,, +5, 4, 3, 1, 2, 2, 1, 1, 256, 79, 79, 54, 28,,, +5, 4, 3, 1, 2, 2, 1, 1, 384, 79, 79, 54, 28,,, +5, 4, 3, 1, 2, 2, 1, 2, 128, 79, 79, 54, 28,,, +5, 4, 3, 1, 2, 2, 1, 2, 256, 79, 79, 54, 28,,, +5, 4, 3, 1, 2, 2, 1, 2, 384, 79, 79, 54, 28,,, +5, 4, 3, 1, 2, 2, 2, 1, 128, 79, 79, 54, 28,,, +5, 4, 3, 1, 2, 2, 2, 1, 256, 79, 79, 54, 28,,, +5, 4, 3, 1, 2, 2, 2, 1, 384, 79, 79, 54, 28,,, +5, 4, 3, 1, 2, 2, 2, 2, 128, 79, 79, 54, 28,,, +5, 4, 3, 1, 2, 2, 2, 2, 256, 79, 79, 54, 28,,, +5, 4, 3, 1, 2, 2, 2, 2, 384, 79, 79, 54, 28,,, +5, 4, 3, 1, 3, 3, 1, 1, 128, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 1, 1, 256, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 1, 1, 384, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 1, 2, 128, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 1, 2, 256, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 1, 2, 384, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 1, 3, 128, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 1, 3, 256, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 1, 3, 384, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 2, 1, 128, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 2, 1, 256, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 2, 1, 384, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 2, 2, 128, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 2, 2, 256, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 2, 2, 384, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 2, 3, 128, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 2, 3, 256, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 2, 3, 384, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 3, 1, 128, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 3, 1, 256, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 3, 1, 384, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 3, 2, 128, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 3, 2, 256, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 3, 2, 384, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 3, 3, 128, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 3, 3, 256, 79, 77, 54, 28,,, +5, 4, 3, 1, 3, 3, 3, 3, 384, 79, 77, 54, 28,,, +5, 4, 3, 2, 1, 1, 1, 1, 128, 155, 155, 105, 53,,, +5, 4, 3, 2, 1, 1, 1, 1, 256, 155, 155, 105, 53,,, +5, 4, 3, 2, 1, 1, 1, 1, 384, 155, 155, 105, 53,,, +5, 4, 3, 2, 2, 2, 1, 1, 128, 155, 155, 105, 53,,, +5, 4, 3, 2, 2, 2, 1, 1, 256, 155, 155, 105, 53,,, +5, 4, 3, 2, 2, 2, 1, 1, 384, 155, 155, 105, 53,,, +5, 4, 3, 2, 2, 2, 1, 2, 128, 155, 155, 105, 53,,, +5, 4, 3, 2, 2, 2, 1, 2, 256, 155, 155, 105, 53,,, +5, 4, 3, 2, 2, 2, 1, 2, 384, 155, 155, 105, 53,,, +5, 4, 3, 2, 2, 2, 2, 1, 128, 155, 155, 105, 53,,, +5, 4, 3, 2, 2, 2, 2, 1, 256, 155, 155, 105, 53,,, +5, 4, 3, 2, 2, 2, 2, 1, 384, 155, 155, 105, 53,,, +5, 4, 3, 2, 2, 2, 2, 2, 128, 155, 155, 105, 53,,, +5, 4, 3, 2, 2, 2, 2, 2, 256, 155, 155, 105, 53,,, +5, 4, 3, 2, 2, 2, 2, 2, 384, 155, 155, 105, 53,,, +5, 4, 3, 2, 3, 3, 1, 1, 128, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 1, 1, 256, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 1, 1, 384, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 1, 2, 128, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 1, 2, 256, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 1, 2, 384, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 1, 3, 128, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 1, 3, 256, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 1, 3, 384, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 2, 1, 128, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 2, 1, 256, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 2, 1, 384, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 2, 2, 128, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 2, 2, 256, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 2, 2, 384, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 2, 3, 128, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 2, 3, 256, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 2, 3, 384, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 3, 1, 128, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 3, 1, 256, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 3, 1, 384, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 3, 2, 128, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 3, 2, 256, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 3, 2, 384, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 3, 3, 128, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 3, 3, 256, 155, 151, 105, 53,,, +5, 4, 3, 2, 3, 3, 3, 3, 384, 155, 151, 105, 53,,, +5, 4, 3, 3, 1, 1, 1, 1, 128, 231, 231, 156, 78,,, +5, 4, 3, 3, 1, 1, 1, 1, 256, 231, 231, 156, 78,,, +5, 4, 3, 3, 1, 1, 1, 1, 384, 231, 231, 156, 78,,, +5, 4, 3, 3, 2, 2, 1, 1, 128, 231, 231, 156, 78,,, +5, 4, 3, 3, 2, 2, 1, 1, 256, 231, 231, 156, 78,,, +5, 4, 3, 3, 2, 2, 1, 1, 384, 231, 231, 156, 78,,, +5, 4, 3, 3, 2, 2, 1, 2, 128, 231, 231, 156, 78,,, +5, 4, 3, 3, 2, 2, 1, 2, 256, 231, 231, 156, 78,,, +5, 4, 3, 3, 2, 2, 1, 2, 384, 231, 231, 156, 78,,, +5, 4, 3, 3, 2, 2, 2, 1, 128, 231, 231, 156, 78,,, +5, 4, 3, 3, 2, 2, 2, 1, 256, 231, 231, 156, 78,,, +5, 4, 3, 3, 2, 2, 2, 1, 384, 231, 231, 156, 78,,, +5, 4, 3, 3, 2, 2, 2, 2, 128, 231, 231, 156, 78,,, +5, 4, 3, 3, 2, 2, 2, 2, 256, 231, 231, 156, 78,,, +5, 4, 3, 3, 2, 2, 2, 2, 384, 231, 231, 156, 78,,, +5, 4, 3, 3, 3, 3, 1, 1, 128, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 1, 1, 256, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 1, 1, 384, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 1, 2, 128, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 1, 2, 256, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 1, 2, 384, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 1, 3, 128, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 1, 3, 256, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 1, 3, 384, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 2, 1, 128, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 2, 1, 256, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 2, 1, 384, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 2, 2, 128, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 2, 2, 256, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 2, 2, 384, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 2, 3, 128, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 2, 3, 256, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 2, 3, 384, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 3, 1, 128, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 3, 1, 256, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 3, 1, 384, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 3, 2, 128, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 3, 2, 256, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 3, 2, 384, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 3, 3, 128, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 3, 3, 256, 231, 225, 156, 78,,, +5, 4, 3, 3, 3, 3, 3, 3, 384, 231, 225, 156, 78,,, +5, 4, 3, 4, 1, 1, 1, 1, 128, 240, 240, 207, 103,,, +5, 4, 3, 4, 1, 1, 1, 1, 256, 240, 240, 207, 103,,, +5, 4, 3, 4, 1, 1, 1, 1, 384, 240, 240, 207, 103,,, +5, 4, 3, 4, 2, 2, 1, 1, 128, 240, 240, 207, 103,,, +5, 4, 3, 4, 2, 2, 1, 1, 256, 240, 240, 207, 103,,, +5, 4, 3, 4, 2, 2, 1, 1, 384, 240, 240, 207, 103,,, +5, 4, 3, 4, 2, 2, 1, 2, 128, 240, 240, 207, 103,,, +5, 4, 3, 4, 2, 2, 1, 2, 256, 240, 240, 207, 103,,, +5, 4, 3, 4, 2, 2, 1, 2, 384, 240, 240, 207, 103,,, +5, 4, 3, 4, 2, 2, 2, 1, 128, 240, 240, 207, 103,,, +5, 4, 3, 4, 2, 2, 2, 1, 256, 240, 240, 207, 103,,, +5, 4, 3, 4, 2, 2, 2, 1, 384, 240, 240, 207, 103,,, +5, 4, 3, 4, 2, 2, 2, 2, 128, 240, 240, 207, 103,,, +5, 4, 3, 4, 2, 2, 2, 2, 256, 240, 240, 207, 103,,, +5, 4, 3, 4, 2, 2, 2, 2, 384, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 1, 1, 128, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 1, 1, 256, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 1, 1, 384, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 1, 2, 128, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 1, 2, 256, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 1, 2, 384, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 1, 3, 128, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 1, 3, 256, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 1, 3, 384, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 2, 1, 128, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 2, 1, 256, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 2, 1, 384, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 2, 2, 128, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 2, 2, 256, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 2, 2, 384, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 2, 3, 128, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 2, 3, 256, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 2, 3, 384, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 3, 1, 128, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 3, 1, 256, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 3, 1, 384, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 3, 2, 128, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 3, 2, 256, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 3, 2, 384, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 3, 3, 128, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 3, 3, 256, 240, 240, 207, 103,,, +5, 4, 3, 4, 3, 3, 3, 3, 384, 240, 240, 207, 103,,, +5, 4, 4, 1, 1, 1, 1, 1, 128, 79, 79, 54, 28,,, +5, 4, 4, 1, 1, 1, 1, 1, 256, 79, 79, 54, 28,,, +5, 4, 4, 1, 1, 1, 1, 1, 384, 79, 79, 54, 28,,, +5, 4, 4, 1, 2, 2, 1, 1, 128, 79, 79, 54, 28,,, +5, 4, 4, 1, 2, 2, 1, 1, 256, 79, 79, 54, 28,,, +5, 4, 4, 1, 2, 2, 1, 1, 384, 79, 79, 54, 28,,, +5, 4, 4, 1, 2, 2, 1, 2, 128, 79, 79, 54, 28,,, +5, 4, 4, 1, 2, 2, 1, 2, 256, 79, 79, 54, 28,,, +5, 4, 4, 1, 2, 2, 1, 2, 384, 79, 79, 54, 28,,, +5, 4, 4, 1, 2, 2, 2, 1, 128, 79, 79, 54, 28,,, +5, 4, 4, 1, 2, 2, 2, 1, 256, 79, 79, 54, 28,,, +5, 4, 4, 1, 2, 2, 2, 1, 384, 79, 79, 54, 28,,, +5, 4, 4, 1, 2, 2, 2, 2, 128, 79, 79, 54, 28,,, +5, 4, 4, 1, 2, 2, 2, 2, 256, 79, 79, 54, 28,,, +5, 4, 4, 1, 2, 2, 2, 2, 384, 79, 79, 54, 28,,, +5, 4, 4, 1, 3, 3, 1, 1, 128, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 1, 1, 256, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 1, 1, 384, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 1, 2, 128, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 1, 2, 256, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 1, 2, 384, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 1, 3, 128, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 1, 3, 256, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 1, 3, 384, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 2, 1, 128, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 2, 1, 256, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 2, 1, 384, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 2, 2, 128, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 2, 2, 256, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 2, 2, 384, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 2, 3, 128, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 2, 3, 256, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 2, 3, 384, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 3, 1, 128, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 3, 1, 256, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 3, 1, 384, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 3, 2, 128, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 3, 2, 256, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 3, 2, 384, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 3, 3, 128, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 3, 3, 256, 79, 77, 54, 28,,, +5, 4, 4, 1, 3, 3, 3, 3, 384, 79, 77, 54, 28,,, +5, 4, 4, 2, 1, 1, 1, 1, 128, 155, 155, 105, 53,,, +5, 4, 4, 2, 1, 1, 1, 1, 256, 155, 155, 105, 53,,, +5, 4, 4, 2, 1, 1, 1, 1, 384, 155, 155, 105, 53,,, +5, 4, 4, 2, 2, 2, 1, 1, 128, 155, 155, 105, 53,,, +5, 4, 4, 2, 2, 2, 1, 1, 256, 155, 155, 105, 53,,, +5, 4, 4, 2, 2, 2, 1, 1, 384, 155, 155, 105, 53,,, +5, 4, 4, 2, 2, 2, 1, 2, 128, 155, 155, 105, 53,,, +5, 4, 4, 2, 2, 2, 1, 2, 256, 155, 155, 105, 53,,, +5, 4, 4, 2, 2, 2, 1, 2, 384, 155, 155, 105, 53,,, +5, 4, 4, 2, 2, 2, 2, 1, 128, 155, 155, 105, 53,,, +5, 4, 4, 2, 2, 2, 2, 1, 256, 155, 155, 105, 53,,, +5, 4, 4, 2, 2, 2, 2, 1, 384, 155, 155, 105, 53,,, +5, 4, 4, 2, 2, 2, 2, 2, 128, 155, 155, 105, 53,,, +5, 4, 4, 2, 2, 2, 2, 2, 256, 155, 155, 105, 53,,, +5, 4, 4, 2, 2, 2, 2, 2, 384, 155, 155, 105, 53,,, +5, 4, 4, 2, 3, 3, 1, 1, 128, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 1, 1, 256, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 1, 1, 384, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 1, 2, 128, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 1, 2, 256, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 1, 2, 384, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 1, 3, 128, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 1, 3, 256, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 1, 3, 384, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 2, 1, 128, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 2, 1, 256, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 2, 1, 384, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 2, 2, 128, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 2, 2, 256, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 2, 2, 384, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 2, 3, 128, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 2, 3, 256, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 2, 3, 384, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 3, 1, 128, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 3, 1, 256, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 3, 1, 384, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 3, 2, 128, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 3, 2, 256, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 3, 2, 384, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 3, 3, 128, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 3, 3, 256, 155, 151, 105, 53,,, +5, 4, 4, 2, 3, 3, 3, 3, 384, 155, 151, 105, 53,,, +5, 4, 4, 3, 1, 1, 1, 1, 128, 231, 231, 156, 78,,, +5, 4, 4, 3, 1, 1, 1, 1, 256, 231, 231, 156, 78,,, +5, 4, 4, 3, 1, 1, 1, 1, 384, 231, 231, 156, 78,,, +5, 4, 4, 3, 2, 2, 1, 1, 128, 231, 231, 156, 78,,, +5, 4, 4, 3, 2, 2, 1, 1, 256, 231, 231, 156, 78,,, +5, 4, 4, 3, 2, 2, 1, 1, 384, 231, 231, 156, 78,,, +5, 4, 4, 3, 2, 2, 1, 2, 128, 231, 231, 156, 78,,, +5, 4, 4, 3, 2, 2, 1, 2, 256, 231, 231, 156, 78,,, +5, 4, 4, 3, 2, 2, 1, 2, 384, 231, 231, 156, 78,,, +5, 4, 4, 3, 2, 2, 2, 1, 128, 231, 231, 156, 78,,, +5, 4, 4, 3, 2, 2, 2, 1, 256, 231, 231, 156, 78,,, +5, 4, 4, 3, 2, 2, 2, 1, 384, 231, 231, 156, 78,,, +5, 4, 4, 3, 2, 2, 2, 2, 128, 231, 231, 156, 78,,, +5, 4, 4, 3, 2, 2, 2, 2, 256, 231, 231, 156, 78,,, +5, 4, 4, 3, 2, 2, 2, 2, 384, 231, 231, 156, 78,,, +5, 4, 4, 3, 3, 3, 1, 1, 128, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 1, 1, 256, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 1, 1, 384, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 1, 2, 128, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 1, 2, 256, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 1, 2, 384, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 1, 3, 128, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 1, 3, 256, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 1, 3, 384, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 2, 1, 128, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 2, 1, 256, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 2, 1, 384, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 2, 2, 128, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 2, 2, 256, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 2, 2, 384, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 2, 3, 128, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 2, 3, 256, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 2, 3, 384, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 3, 1, 128, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 3, 1, 256, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 3, 1, 384, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 3, 2, 128, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 3, 2, 256, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 3, 2, 384, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 3, 3, 128, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 3, 3, 256, 231, 225, 156, 78,,, +5, 4, 4, 3, 3, 3, 3, 3, 384, 231, 225, 156, 78,,, +5, 4, 4, 4, 1, 1, 1, 1, 128, 240, 240, 207, 103,,, +5, 4, 4, 4, 1, 1, 1, 1, 256, 240, 240, 207, 103,,, +5, 4, 4, 4, 1, 1, 1, 1, 384, 240, 240, 207, 103,,, +5, 4, 4, 4, 2, 2, 1, 1, 128, 240, 240, 207, 103,,, +5, 4, 4, 4, 2, 2, 1, 1, 256, 240, 240, 207, 103,,, +5, 4, 4, 4, 2, 2, 1, 1, 384, 240, 240, 207, 103,,, +5, 4, 4, 4, 2, 2, 1, 2, 128, 240, 240, 207, 103,,, +5, 4, 4, 4, 2, 2, 1, 2, 256, 240, 240, 207, 103,,, +5, 4, 4, 4, 2, 2, 1, 2, 384, 240, 240, 207, 103,,, +5, 4, 4, 4, 2, 2, 2, 1, 128, 240, 240, 207, 103,,, +5, 4, 4, 4, 2, 2, 2, 1, 256, 240, 240, 207, 103,,, +5, 4, 4, 4, 2, 2, 2, 1, 384, 240, 240, 207, 103,,, +5, 4, 4, 4, 2, 2, 2, 2, 128, 240, 240, 207, 103,,, +5, 4, 4, 4, 2, 2, 2, 2, 256, 240, 240, 207, 103,,, +5, 4, 4, 4, 2, 2, 2, 2, 384, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 1, 1, 128, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 1, 1, 256, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 1, 1, 384, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 1, 2, 128, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 1, 2, 256, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 1, 2, 384, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 1, 3, 128, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 1, 3, 256, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 1, 3, 384, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 2, 1, 128, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 2, 1, 256, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 2, 1, 384, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 2, 2, 128, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 2, 2, 256, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 2, 2, 384, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 2, 3, 128, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 2, 3, 256, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 2, 3, 384, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 3, 1, 128, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 3, 1, 256, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 3, 1, 384, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 3, 2, 128, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 3, 2, 256, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 3, 2, 384, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 3, 3, 128, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 3, 3, 256, 240, 240, 207, 103,,, +5, 4, 4, 4, 3, 3, 3, 3, 384, 240, 240, 207, 103,,, +5, 4, 5, 1, 1, 1, 1, 1, 128, 79, 79, 54, 28,,, +5, 4, 5, 1, 1, 1, 1, 1, 256, 79, 79, 54, 28,,, +5, 4, 5, 1, 1, 1, 1, 1, 384, 79, 79, 54, 28,,, +5, 4, 5, 1, 2, 2, 1, 1, 128, 79, 79, 54, 28,,, +5, 4, 5, 1, 2, 2, 1, 1, 256, 79, 79, 54, 28,,, +5, 4, 5, 1, 2, 2, 1, 1, 384, 79, 79, 54, 28,,, +5, 4, 5, 1, 2, 2, 1, 2, 128, 79, 79, 54, 28,,, +5, 4, 5, 1, 2, 2, 1, 2, 256, 79, 79, 54, 28,,, +5, 4, 5, 1, 2, 2, 1, 2, 384, 79, 79, 54, 28,,, +5, 4, 5, 1, 2, 2, 2, 1, 128, 79, 79, 54, 28,,, +5, 4, 5, 1, 2, 2, 2, 1, 256, 79, 79, 54, 28,,, +5, 4, 5, 1, 2, 2, 2, 1, 384, 79, 79, 54, 28,,, +5, 4, 5, 1, 2, 2, 2, 2, 128, 79, 79, 54, 28,,, +5, 4, 5, 1, 2, 2, 2, 2, 256, 79, 79, 54, 28,,, +5, 4, 5, 1, 2, 2, 2, 2, 384, 79, 79, 54, 28,,, +5, 4, 5, 1, 3, 3, 1, 1, 128, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 1, 1, 256, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 1, 1, 384, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 1, 2, 128, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 1, 2, 256, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 1, 2, 384, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 1, 3, 128, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 1, 3, 256, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 1, 3, 384, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 2, 1, 128, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 2, 1, 256, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 2, 1, 384, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 2, 2, 128, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 2, 2, 256, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 2, 2, 384, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 2, 3, 128, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 2, 3, 256, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 2, 3, 384, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 3, 1, 128, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 3, 1, 256, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 3, 1, 384, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 3, 2, 128, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 3, 2, 256, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 3, 2, 384, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 3, 3, 128, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 3, 3, 256, 79, 77, 54, 28,,, +5, 4, 5, 1, 3, 3, 3, 3, 384, 79, 77, 54, 28,,, +5, 4, 5, 2, 1, 1, 1, 1, 128, 155, 155, 105, 53,,, +5, 4, 5, 2, 1, 1, 1, 1, 256, 155, 155, 105, 53,,, +5, 4, 5, 2, 1, 1, 1, 1, 384, 155, 155, 105, 53,,, +5, 4, 5, 2, 2, 2, 1, 1, 128, 155, 155, 105, 53,,, +5, 4, 5, 2, 2, 2, 1, 1, 256, 155, 155, 105, 53,,, +5, 4, 5, 2, 2, 2, 1, 1, 384, 155, 155, 105, 53,,, +5, 4, 5, 2, 2, 2, 1, 2, 128, 155, 155, 105, 53,,, +5, 4, 5, 2, 2, 2, 1, 2, 256, 155, 155, 105, 53,,, +5, 4, 5, 2, 2, 2, 1, 2, 384, 155, 155, 105, 53,,, +5, 4, 5, 2, 2, 2, 2, 1, 128, 155, 155, 105, 53,,, +5, 4, 5, 2, 2, 2, 2, 1, 256, 155, 155, 105, 53,,, +5, 4, 5, 2, 2, 2, 2, 1, 384, 155, 155, 105, 53,,, +5, 4, 5, 2, 2, 2, 2, 2, 128, 155, 155, 105, 53,,, +5, 4, 5, 2, 2, 2, 2, 2, 256, 155, 155, 105, 53,,, +5, 4, 5, 2, 2, 2, 2, 2, 384, 155, 155, 105, 53,,, +5, 4, 5, 2, 3, 3, 1, 1, 128, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 1, 1, 256, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 1, 1, 384, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 1, 2, 128, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 1, 2, 256, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 1, 2, 384, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 1, 3, 128, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 1, 3, 256, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 1, 3, 384, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 2, 1, 128, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 2, 1, 256, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 2, 1, 384, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 2, 2, 128, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 2, 2, 256, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 2, 2, 384, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 2, 3, 128, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 2, 3, 256, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 2, 3, 384, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 3, 1, 128, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 3, 1, 256, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 3, 1, 384, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 3, 2, 128, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 3, 2, 256, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 3, 2, 384, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 3, 3, 128, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 3, 3, 256, 155, 151, 105, 53,,, +5, 4, 5, 2, 3, 3, 3, 3, 384, 155, 151, 105, 53,,, +5, 4, 5, 3, 1, 1, 1, 1, 128, 231, 231, 156, 78,,, +5, 4, 5, 3, 1, 1, 1, 1, 256, 231, 231, 156, 78,,, +5, 4, 5, 3, 1, 1, 1, 1, 384, 231, 231, 156, 78,,, +5, 4, 5, 3, 2, 2, 1, 1, 128, 231, 231, 156, 78,,, +5, 4, 5, 3, 2, 2, 1, 1, 256, 231, 231, 156, 78,,, +5, 4, 5, 3, 2, 2, 1, 1, 384, 231, 231, 156, 78,,, +5, 4, 5, 3, 2, 2, 1, 2, 128, 231, 231, 156, 78,,, +5, 4, 5, 3, 2, 2, 1, 2, 256, 231, 231, 156, 78,,, +5, 4, 5, 3, 2, 2, 1, 2, 384, 231, 231, 156, 78,,, +5, 4, 5, 3, 2, 2, 2, 1, 128, 231, 231, 156, 78,,, +5, 4, 5, 3, 2, 2, 2, 1, 256, 231, 231, 156, 78,,, +5, 4, 5, 3, 2, 2, 2, 1, 384, 231, 231, 156, 78,,, +5, 4, 5, 3, 2, 2, 2, 2, 128, 231, 231, 156, 78,,, +5, 4, 5, 3, 2, 2, 2, 2, 256, 231, 231, 156, 78,,, +5, 4, 5, 3, 2, 2, 2, 2, 384, 231, 231, 156, 78,,, +5, 4, 5, 3, 3, 3, 1, 1, 128, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 1, 1, 256, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 1, 1, 384, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 1, 2, 128, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 1, 2, 256, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 1, 2, 384, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 1, 3, 128, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 1, 3, 256, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 1, 3, 384, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 2, 1, 128, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 2, 1, 256, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 2, 1, 384, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 2, 2, 128, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 2, 2, 256, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 2, 2, 384, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 2, 3, 128, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 2, 3, 256, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 2, 3, 384, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 3, 1, 128, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 3, 1, 256, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 3, 1, 384, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 3, 2, 128, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 3, 2, 256, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 3, 2, 384, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 3, 3, 128, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 3, 3, 256, 231, 225, 156, 78,,, +5, 4, 5, 3, 3, 3, 3, 3, 384, 231, 225, 156, 78,,, +5, 4, 5, 4, 1, 1, 1, 1, 128, 240, 240, 207, 103,,, +5, 4, 5, 4, 1, 1, 1, 1, 256, 240, 240, 207, 103,,, +5, 4, 5, 4, 1, 1, 1, 1, 384, 240, 240, 207, 103,,, +5, 4, 5, 4, 2, 2, 1, 1, 128, 240, 240, 207, 103,,, +5, 4, 5, 4, 2, 2, 1, 1, 256, 240, 240, 207, 103,,, +5, 4, 5, 4, 2, 2, 1, 1, 384, 240, 240, 207, 103,,, +5, 4, 5, 4, 2, 2, 1, 2, 128, 240, 240, 207, 103,,, +5, 4, 5, 4, 2, 2, 1, 2, 256, 240, 240, 207, 103,,, +5, 4, 5, 4, 2, 2, 1, 2, 384, 240, 240, 207, 103,,, +5, 4, 5, 4, 2, 2, 2, 1, 128, 240, 240, 207, 103,,, +5, 4, 5, 4, 2, 2, 2, 1, 256, 240, 240, 207, 103,,, +5, 4, 5, 4, 2, 2, 2, 1, 384, 240, 240, 207, 103,,, +5, 4, 5, 4, 2, 2, 2, 2, 128, 240, 240, 207, 103,,, +5, 4, 5, 4, 2, 2, 2, 2, 256, 240, 240, 207, 103,,, +5, 4, 5, 4, 2, 2, 2, 2, 384, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 1, 1, 128, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 1, 1, 256, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 1, 1, 384, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 1, 2, 128, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 1, 2, 256, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 1, 2, 384, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 1, 3, 128, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 1, 3, 256, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 1, 3, 384, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 2, 1, 128, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 2, 1, 256, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 2, 1, 384, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 2, 2, 128, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 2, 2, 256, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 2, 2, 384, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 2, 3, 128, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 2, 3, 256, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 2, 3, 384, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 3, 1, 128, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 3, 1, 256, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 3, 1, 384, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 3, 2, 128, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 3, 2, 256, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 3, 2, 384, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 3, 3, 128, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 3, 3, 256, 240, 240, 207, 103,,, +5, 4, 5, 4, 3, 3, 3, 3, 384, 240, 240, 207, 103,,, +5, 5, 1, 1, 1, 1, 1, 1, 128, 80, 80, 55,,,, +5, 5, 1, 1, 1, 1, 1, 1, 256, 80, 80, 55,,,, +5, 5, 1, 1, 1, 1, 1, 1, 384, 80, 80, 55,,,, +5, 5, 1, 1, 2, 2, 1, 1, 128, 80, 78, 55,,,, +5, 5, 1, 1, 2, 2, 1, 1, 256, 80, 78, 55,,,, +5, 5, 1, 1, 2, 2, 1, 1, 384, 80, 78, 55,,,, +5, 5, 1, 1, 2, 2, 1, 2, 128, 80, 78, 55,,,, +5, 5, 1, 1, 2, 2, 1, 2, 256, 80, 78, 55,,,, +5, 5, 1, 1, 2, 2, 1, 2, 384, 80, 78, 55,,,, +5, 5, 1, 1, 2, 2, 2, 1, 128, 80, 78, 55,,,, +5, 5, 1, 1, 2, 2, 2, 1, 256, 80, 78, 55,,,, +5, 5, 1, 1, 2, 2, 2, 1, 384, 80, 78, 55,,,, +5, 5, 1, 1, 2, 2, 2, 2, 128, 80, 78, 55,,,, +5, 5, 1, 1, 2, 2, 2, 2, 256, 80, 78, 55,,,, +5, 5, 1, 1, 2, 2, 2, 2, 384, 80, 78, 55,,,, +5, 5, 1, 1, 3, 3, 1, 1, 128, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 1, 1, 256, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 1, 1, 384, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 1, 2, 128, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 1, 2, 256, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 1, 2, 384, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 1, 3, 128, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 1, 3, 256, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 1, 3, 384, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 2, 1, 128, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 2, 1, 256, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 2, 1, 384, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 2, 2, 128, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 2, 2, 256, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 2, 2, 384, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 2, 3, 128, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 2, 3, 256, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 2, 3, 384, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 3, 1, 128, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 3, 1, 256, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 3, 1, 384, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 3, 2, 128, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 3, 2, 256, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 3, 2, 384, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 3, 3, 128, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 3, 3, 256, 80, 75, 55,,,, +5, 5, 1, 1, 3, 3, 3, 3, 384, 80, 75, 55,,,, +5, 5, 1, 2, 1, 1, 1, 1, 128, 156, 156, 106,,,, +5, 5, 1, 2, 1, 1, 1, 1, 256, 156, 156, 106,,,, +5, 5, 1, 2, 1, 1, 1, 1, 384, 156, 156, 106,,,, +5, 5, 1, 2, 2, 2, 1, 1, 128, 156, 152, 106,,,, +5, 5, 1, 2, 2, 2, 1, 1, 256, 156, 152, 106,,,, +5, 5, 1, 2, 2, 2, 1, 1, 384, 156, 152, 106,,,, +5, 5, 1, 2, 2, 2, 1, 2, 128, 156, 152, 106,,,, +5, 5, 1, 2, 2, 2, 1, 2, 256, 156, 152, 106,,,, +5, 5, 1, 2, 2, 2, 1, 2, 384, 156, 152, 106,,,, +5, 5, 1, 2, 2, 2, 2, 1, 128, 156, 152, 106,,,, +5, 5, 1, 2, 2, 2, 2, 1, 256, 156, 152, 106,,,, +5, 5, 1, 2, 2, 2, 2, 1, 384, 156, 152, 106,,,, +5, 5, 1, 2, 2, 2, 2, 2, 128, 156, 152, 106,,,, +5, 5, 1, 2, 2, 2, 2, 2, 256, 156, 152, 106,,,, +5, 5, 1, 2, 2, 2, 2, 2, 384, 156, 152, 106,,,, +5, 5, 1, 2, 3, 3, 1, 1, 128, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 1, 1, 256, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 1, 1, 384, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 1, 2, 128, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 1, 2, 256, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 1, 2, 384, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 1, 3, 128, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 1, 3, 256, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 1, 3, 384, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 2, 1, 128, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 2, 1, 256, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 2, 1, 384, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 2, 2, 128, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 2, 2, 256, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 2, 2, 384, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 2, 3, 128, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 2, 3, 256, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 2, 3, 384, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 3, 1, 128, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 3, 1, 256, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 3, 1, 384, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 3, 2, 128, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 3, 2, 256, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 3, 2, 384, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 3, 3, 128, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 3, 3, 256, 156, 146, 106,,,, +5, 5, 1, 2, 3, 3, 3, 3, 384, 156, 146, 106,,,, +5, 5, 1, 3, 1, 1, 1, 1, 128, 232, 232, 157,,,, +5, 5, 1, 3, 1, 1, 1, 1, 256, 232, 232, 157,,,, +5, 5, 1, 3, 1, 1, 1, 1, 384, 232, 232, 157,,,, +5, 5, 1, 3, 2, 2, 1, 1, 128, 232, 226, 157,,,, +5, 5, 1, 3, 2, 2, 1, 1, 256, 232, 226, 157,,,, +5, 5, 1, 3, 2, 2, 1, 1, 384, 232, 226, 157,,,, +5, 5, 1, 3, 2, 2, 1, 2, 128, 232, 226, 157,,,, +5, 5, 1, 3, 2, 2, 1, 2, 256, 232, 226, 157,,,, +5, 5, 1, 3, 2, 2, 1, 2, 384, 232, 226, 157,,,, +5, 5, 1, 3, 2, 2, 2, 1, 128, 232, 226, 157,,,, +5, 5, 1, 3, 2, 2, 2, 1, 256, 232, 226, 157,,,, +5, 5, 1, 3, 2, 2, 2, 1, 384, 232, 226, 157,,,, +5, 5, 1, 3, 2, 2, 2, 2, 128, 232, 226, 157,,,, +5, 5, 1, 3, 2, 2, 2, 2, 256, 232, 226, 157,,,, +5, 5, 1, 3, 2, 2, 2, 2, 384, 232, 226, 157,,,, +5, 5, 1, 3, 3, 3, 1, 1, 128, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 1, 1, 256, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 1, 1, 384, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 1, 2, 128, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 1, 2, 256, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 1, 2, 384, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 1, 3, 128, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 1, 3, 256, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 1, 3, 384, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 2, 1, 128, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 2, 1, 256, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 2, 1, 384, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 2, 2, 128, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 2, 2, 256, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 2, 2, 384, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 2, 3, 128, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 2, 3, 256, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 2, 3, 384, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 3, 1, 128, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 3, 1, 256, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 3, 1, 384, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 3, 2, 128, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 3, 2, 256, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 3, 2, 384, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 3, 3, 128, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 3, 3, 256, 232, 217, 157,,,, +5, 5, 1, 3, 3, 3, 3, 3, 384, 232, 217, 157,,,, +5, 5, 1, 4, 1, 1, 1, 1, 128, 240, 240, 208,,,, +5, 5, 1, 4, 1, 1, 1, 1, 256, 240, 240, 208,,,, +5, 5, 1, 4, 1, 1, 1, 1, 384, 240, 240, 208,,,, +5, 5, 1, 4, 2, 2, 1, 1, 128, 240, 240, 208,,,, +5, 5, 1, 4, 2, 2, 1, 1, 256, 240, 240, 208,,,, +5, 5, 1, 4, 2, 2, 1, 1, 384, 240, 240, 208,,,, +5, 5, 1, 4, 2, 2, 1, 2, 128, 240, 240, 208,,,, +5, 5, 1, 4, 2, 2, 1, 2, 256, 240, 240, 208,,,, +5, 5, 1, 4, 2, 2, 1, 2, 384, 240, 240, 208,,,, +5, 5, 1, 4, 2, 2, 2, 1, 128, 240, 240, 208,,,, +5, 5, 1, 4, 2, 2, 2, 1, 256, 240, 240, 208,,,, +5, 5, 1, 4, 2, 2, 2, 1, 384, 240, 240, 208,,,, +5, 5, 1, 4, 2, 2, 2, 2, 128, 240, 240, 208,,,, +5, 5, 1, 4, 2, 2, 2, 2, 256, 240, 240, 208,,,, +5, 5, 1, 4, 2, 2, 2, 2, 384, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 1, 1, 128, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 1, 1, 256, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 1, 1, 384, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 1, 2, 128, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 1, 2, 256, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 1, 2, 384, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 1, 3, 128, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 1, 3, 256, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 1, 3, 384, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 2, 1, 128, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 2, 1, 256, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 2, 1, 384, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 2, 2, 128, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 2, 2, 256, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 2, 2, 384, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 2, 3, 128, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 2, 3, 256, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 2, 3, 384, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 3, 1, 128, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 3, 1, 256, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 3, 1, 384, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 3, 2, 128, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 3, 2, 256, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 3, 2, 384, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 3, 3, 128, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 3, 3, 256, 240, 240, 208,,,, +5, 5, 1, 4, 3, 3, 3, 3, 384, 240, 240, 208,,,, +5, 5, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +5, 5, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +5, 5, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +5, 5, 1, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +5, 5, 1, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +5, 5, 1, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +5, 5, 1, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +5, 5, 1, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +5, 5, 1, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +5, 5, 1, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +5, 5, 1, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +5, 5, 1, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +5, 5, 1, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +5, 5, 1, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +5, 5, 1, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +5, 5, 1, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +5, 5, 2, 1, 1, 1, 1, 1, 128, 80, 80, 55,,,, +5, 5, 2, 1, 1, 1, 1, 1, 256, 80, 80, 55,,,, +5, 5, 2, 1, 1, 1, 1, 1, 384, 80, 80, 55,,,, +5, 5, 2, 1, 2, 2, 1, 1, 128, 80, 78, 55,,,, +5, 5, 2, 1, 2, 2, 1, 1, 256, 80, 78, 55,,,, +5, 5, 2, 1, 2, 2, 1, 1, 384, 80, 78, 55,,,, +5, 5, 2, 1, 2, 2, 1, 2, 128, 80, 78, 55,,,, +5, 5, 2, 1, 2, 2, 1, 2, 256, 80, 78, 55,,,, +5, 5, 2, 1, 2, 2, 1, 2, 384, 80, 78, 55,,,, +5, 5, 2, 1, 2, 2, 2, 1, 128, 80, 78, 55,,,, +5, 5, 2, 1, 2, 2, 2, 1, 256, 80, 78, 55,,,, +5, 5, 2, 1, 2, 2, 2, 1, 384, 80, 78, 55,,,, +5, 5, 2, 1, 2, 2, 2, 2, 128, 80, 78, 55,,,, +5, 5, 2, 1, 2, 2, 2, 2, 256, 80, 78, 55,,,, +5, 5, 2, 1, 2, 2, 2, 2, 384, 80, 78, 55,,,, +5, 5, 2, 1, 3, 3, 1, 1, 128, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 1, 1, 256, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 1, 1, 384, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 1, 2, 128, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 1, 2, 256, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 1, 2, 384, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 1, 3, 128, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 1, 3, 256, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 1, 3, 384, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 2, 1, 128, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 2, 1, 256, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 2, 1, 384, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 2, 2, 128, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 2, 2, 256, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 2, 2, 384, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 2, 3, 128, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 2, 3, 256, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 2, 3, 384, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 3, 1, 128, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 3, 1, 256, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 3, 1, 384, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 3, 2, 128, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 3, 2, 256, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 3, 2, 384, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 3, 3, 128, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 3, 3, 256, 80, 75, 55,,,, +5, 5, 2, 1, 3, 3, 3, 3, 384, 80, 75, 55,,,, +5, 5, 2, 2, 1, 1, 1, 1, 128, 156, 156, 106,,,, +5, 5, 2, 2, 1, 1, 1, 1, 256, 156, 156, 106,,,, +5, 5, 2, 2, 1, 1, 1, 1, 384, 156, 156, 106,,,, +5, 5, 2, 2, 2, 2, 1, 1, 128, 156, 152, 106,,,, +5, 5, 2, 2, 2, 2, 1, 1, 256, 156, 152, 106,,,, +5, 5, 2, 2, 2, 2, 1, 1, 384, 156, 152, 106,,,, +5, 5, 2, 2, 2, 2, 1, 2, 128, 156, 152, 106,,,, +5, 5, 2, 2, 2, 2, 1, 2, 256, 156, 152, 106,,,, +5, 5, 2, 2, 2, 2, 1, 2, 384, 156, 152, 106,,,, +5, 5, 2, 2, 2, 2, 2, 1, 128, 156, 152, 106,,,, +5, 5, 2, 2, 2, 2, 2, 1, 256, 156, 152, 106,,,, +5, 5, 2, 2, 2, 2, 2, 1, 384, 156, 152, 106,,,, +5, 5, 2, 2, 2, 2, 2, 2, 128, 156, 152, 106,,,, +5, 5, 2, 2, 2, 2, 2, 2, 256, 156, 152, 106,,,, +5, 5, 2, 2, 2, 2, 2, 2, 384, 156, 152, 106,,,, +5, 5, 2, 2, 3, 3, 1, 1, 128, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 1, 1, 256, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 1, 1, 384, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 1, 2, 128, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 1, 2, 256, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 1, 2, 384, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 1, 3, 128, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 1, 3, 256, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 1, 3, 384, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 2, 1, 128, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 2, 1, 256, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 2, 1, 384, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 2, 2, 128, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 2, 2, 256, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 2, 2, 384, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 2, 3, 128, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 2, 3, 256, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 2, 3, 384, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 3, 1, 128, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 3, 1, 256, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 3, 1, 384, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 3, 2, 128, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 3, 2, 256, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 3, 2, 384, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 3, 3, 128, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 3, 3, 256, 156, 146, 106,,,, +5, 5, 2, 2, 3, 3, 3, 3, 384, 156, 146, 106,,,, +5, 5, 2, 3, 1, 1, 1, 1, 128, 232, 232, 157,,,, +5, 5, 2, 3, 1, 1, 1, 1, 256, 232, 232, 157,,,, +5, 5, 2, 3, 1, 1, 1, 1, 384, 232, 232, 157,,,, +5, 5, 2, 3, 2, 2, 1, 1, 128, 232, 226, 157,,,, +5, 5, 2, 3, 2, 2, 1, 1, 256, 232, 226, 157,,,, +5, 5, 2, 3, 2, 2, 1, 1, 384, 232, 226, 157,,,, +5, 5, 2, 3, 2, 2, 1, 2, 128, 232, 226, 157,,,, +5, 5, 2, 3, 2, 2, 1, 2, 256, 232, 226, 157,,,, +5, 5, 2, 3, 2, 2, 1, 2, 384, 232, 226, 157,,,, +5, 5, 2, 3, 2, 2, 2, 1, 128, 232, 226, 157,,,, +5, 5, 2, 3, 2, 2, 2, 1, 256, 232, 226, 157,,,, +5, 5, 2, 3, 2, 2, 2, 1, 384, 232, 226, 157,,,, +5, 5, 2, 3, 2, 2, 2, 2, 128, 232, 226, 157,,,, +5, 5, 2, 3, 2, 2, 2, 2, 256, 232, 226, 157,,,, +5, 5, 2, 3, 2, 2, 2, 2, 384, 232, 226, 157,,,, +5, 5, 2, 3, 3, 3, 1, 1, 128, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 1, 1, 256, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 1, 1, 384, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 1, 2, 128, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 1, 2, 256, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 1, 2, 384, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 1, 3, 128, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 1, 3, 256, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 1, 3, 384, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 2, 1, 128, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 2, 1, 256, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 2, 1, 384, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 2, 2, 128, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 2, 2, 256, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 2, 2, 384, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 2, 3, 128, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 2, 3, 256, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 2, 3, 384, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 3, 1, 128, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 3, 1, 256, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 3, 1, 384, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 3, 2, 128, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 3, 2, 256, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 3, 2, 384, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 3, 3, 128, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 3, 3, 256, 232, 217, 157,,,, +5, 5, 2, 3, 3, 3, 3, 3, 384, 232, 217, 157,,,, +5, 5, 2, 4, 1, 1, 1, 1, 128, 240, 240, 208,,,, +5, 5, 2, 4, 1, 1, 1, 1, 256, 240, 240, 208,,,, +5, 5, 2, 4, 1, 1, 1, 1, 384, 240, 240, 208,,,, +5, 5, 2, 4, 2, 2, 1, 1, 128, 240, 240, 208,,,, +5, 5, 2, 4, 2, 2, 1, 1, 256, 240, 240, 208,,,, +5, 5, 2, 4, 2, 2, 1, 1, 384, 240, 240, 208,,,, +5, 5, 2, 4, 2, 2, 1, 2, 128, 240, 240, 208,,,, +5, 5, 2, 4, 2, 2, 1, 2, 256, 240, 240, 208,,,, +5, 5, 2, 4, 2, 2, 1, 2, 384, 240, 240, 208,,,, +5, 5, 2, 4, 2, 2, 2, 1, 128, 240, 240, 208,,,, +5, 5, 2, 4, 2, 2, 2, 1, 256, 240, 240, 208,,,, +5, 5, 2, 4, 2, 2, 2, 1, 384, 240, 240, 208,,,, +5, 5, 2, 4, 2, 2, 2, 2, 128, 240, 240, 208,,,, +5, 5, 2, 4, 2, 2, 2, 2, 256, 240, 240, 208,,,, +5, 5, 2, 4, 2, 2, 2, 2, 384, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 1, 1, 128, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 1, 1, 256, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 1, 1, 384, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 1, 2, 128, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 1, 2, 256, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 1, 2, 384, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 1, 3, 128, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 1, 3, 256, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 1, 3, 384, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 2, 1, 128, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 2, 1, 256, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 2, 1, 384, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 2, 2, 128, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 2, 2, 256, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 2, 2, 384, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 2, 3, 128, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 2, 3, 256, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 2, 3, 384, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 3, 1, 128, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 3, 1, 256, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 3, 1, 384, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 3, 2, 128, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 3, 2, 256, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 3, 2, 384, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 3, 3, 128, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 3, 3, 256, 240, 240, 208,,,, +5, 5, 2, 4, 3, 3, 3, 3, 384, 240, 240, 208,,,, +5, 5, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +5, 5, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +5, 5, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +5, 5, 2, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +5, 5, 2, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +5, 5, 2, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +5, 5, 2, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +5, 5, 2, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +5, 5, 2, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +5, 5, 2, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +5, 5, 2, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +5, 5, 2, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +5, 5, 2, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +5, 5, 2, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +5, 5, 2, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +5, 5, 2, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +5, 5, 3, 1, 1, 1, 1, 1, 128, 80, 80, 55,,,, +5, 5, 3, 1, 1, 1, 1, 1, 256, 80, 80, 55,,,, +5, 5, 3, 1, 1, 1, 1, 1, 384, 80, 80, 55,,,, +5, 5, 3, 1, 2, 2, 1, 1, 128, 80, 78, 55,,,, +5, 5, 3, 1, 2, 2, 1, 1, 256, 80, 78, 55,,,, +5, 5, 3, 1, 2, 2, 1, 1, 384, 80, 78, 55,,,, +5, 5, 3, 1, 2, 2, 1, 2, 128, 80, 78, 55,,,, +5, 5, 3, 1, 2, 2, 1, 2, 256, 80, 78, 55,,,, +5, 5, 3, 1, 2, 2, 1, 2, 384, 80, 78, 55,,,, +5, 5, 3, 1, 2, 2, 2, 1, 128, 80, 78, 55,,,, +5, 5, 3, 1, 2, 2, 2, 1, 256, 80, 78, 55,,,, +5, 5, 3, 1, 2, 2, 2, 1, 384, 80, 78, 55,,,, +5, 5, 3, 1, 2, 2, 2, 2, 128, 80, 78, 55,,,, +5, 5, 3, 1, 2, 2, 2, 2, 256, 80, 78, 55,,,, +5, 5, 3, 1, 2, 2, 2, 2, 384, 80, 78, 55,,,, +5, 5, 3, 1, 3, 3, 1, 1, 128, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 1, 1, 256, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 1, 1, 384, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 1, 2, 128, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 1, 2, 256, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 1, 2, 384, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 1, 3, 128, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 1, 3, 256, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 1, 3, 384, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 2, 1, 128, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 2, 1, 256, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 2, 1, 384, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 2, 2, 128, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 2, 2, 256, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 2, 2, 384, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 2, 3, 128, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 2, 3, 256, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 2, 3, 384, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 3, 1, 128, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 3, 1, 256, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 3, 1, 384, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 3, 2, 128, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 3, 2, 256, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 3, 2, 384, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 3, 3, 128, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 3, 3, 256, 80, 75, 55,,,, +5, 5, 3, 1, 3, 3, 3, 3, 384, 80, 75, 55,,,, +5, 5, 3, 2, 1, 1, 1, 1, 128, 156, 156, 106,,,, +5, 5, 3, 2, 1, 1, 1, 1, 256, 156, 156, 106,,,, +5, 5, 3, 2, 1, 1, 1, 1, 384, 156, 156, 106,,,, +5, 5, 3, 2, 2, 2, 1, 1, 128, 156, 152, 106,,,, +5, 5, 3, 2, 2, 2, 1, 1, 256, 156, 152, 106,,,, +5, 5, 3, 2, 2, 2, 1, 1, 384, 156, 152, 106,,,, +5, 5, 3, 2, 2, 2, 1, 2, 128, 156, 152, 106,,,, +5, 5, 3, 2, 2, 2, 1, 2, 256, 156, 152, 106,,,, +5, 5, 3, 2, 2, 2, 1, 2, 384, 156, 152, 106,,,, +5, 5, 3, 2, 2, 2, 2, 1, 128, 156, 152, 106,,,, +5, 5, 3, 2, 2, 2, 2, 1, 256, 156, 152, 106,,,, +5, 5, 3, 2, 2, 2, 2, 1, 384, 156, 152, 106,,,, +5, 5, 3, 2, 2, 2, 2, 2, 128, 156, 152, 106,,,, +5, 5, 3, 2, 2, 2, 2, 2, 256, 156, 152, 106,,,, +5, 5, 3, 2, 2, 2, 2, 2, 384, 156, 152, 106,,,, +5, 5, 3, 2, 3, 3, 1, 1, 128, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 1, 1, 256, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 1, 1, 384, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 1, 2, 128, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 1, 2, 256, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 1, 2, 384, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 1, 3, 128, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 1, 3, 256, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 1, 3, 384, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 2, 1, 128, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 2, 1, 256, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 2, 1, 384, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 2, 2, 128, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 2, 2, 256, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 2, 2, 384, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 2, 3, 128, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 2, 3, 256, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 2, 3, 384, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 3, 1, 128, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 3, 1, 256, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 3, 1, 384, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 3, 2, 128, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 3, 2, 256, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 3, 2, 384, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 3, 3, 128, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 3, 3, 256, 156, 146, 106,,,, +5, 5, 3, 2, 3, 3, 3, 3, 384, 156, 146, 106,,,, +5, 5, 3, 3, 1, 1, 1, 1, 128, 232, 232, 157,,,, +5, 5, 3, 3, 1, 1, 1, 1, 256, 232, 232, 157,,,, +5, 5, 3, 3, 1, 1, 1, 1, 384, 232, 232, 157,,,, +5, 5, 3, 3, 2, 2, 1, 1, 128, 232, 226, 157,,,, +5, 5, 3, 3, 2, 2, 1, 1, 256, 232, 226, 157,,,, +5, 5, 3, 3, 2, 2, 1, 1, 384, 232, 226, 157,,,, +5, 5, 3, 3, 2, 2, 1, 2, 128, 232, 226, 157,,,, +5, 5, 3, 3, 2, 2, 1, 2, 256, 232, 226, 157,,,, +5, 5, 3, 3, 2, 2, 1, 2, 384, 232, 226, 157,,,, +5, 5, 3, 3, 2, 2, 2, 1, 128, 232, 226, 157,,,, +5, 5, 3, 3, 2, 2, 2, 1, 256, 232, 226, 157,,,, +5, 5, 3, 3, 2, 2, 2, 1, 384, 232, 226, 157,,,, +5, 5, 3, 3, 2, 2, 2, 2, 128, 232, 226, 157,,,, +5, 5, 3, 3, 2, 2, 2, 2, 256, 232, 226, 157,,,, +5, 5, 3, 3, 2, 2, 2, 2, 384, 232, 226, 157,,,, +5, 5, 3, 3, 3, 3, 1, 1, 128, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 1, 1, 256, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 1, 1, 384, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 1, 2, 128, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 1, 2, 256, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 1, 2, 384, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 1, 3, 128, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 1, 3, 256, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 1, 3, 384, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 2, 1, 128, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 2, 1, 256, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 2, 1, 384, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 2, 2, 128, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 2, 2, 256, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 2, 2, 384, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 2, 3, 128, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 2, 3, 256, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 2, 3, 384, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 3, 1, 128, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 3, 1, 256, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 3, 1, 384, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 3, 2, 128, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 3, 2, 256, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 3, 2, 384, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 3, 3, 128, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 3, 3, 256, 232, 217, 157,,,, +5, 5, 3, 3, 3, 3, 3, 3, 384, 232, 217, 157,,,, +5, 5, 3, 4, 1, 1, 1, 1, 128, 240, 240, 208,,,, +5, 5, 3, 4, 1, 1, 1, 1, 256, 240, 240, 208,,,, +5, 5, 3, 4, 1, 1, 1, 1, 384, 240, 240, 208,,,, +5, 5, 3, 4, 2, 2, 1, 1, 128, 240, 240, 208,,,, +5, 5, 3, 4, 2, 2, 1, 1, 256, 240, 240, 208,,,, +5, 5, 3, 4, 2, 2, 1, 1, 384, 240, 240, 208,,,, +5, 5, 3, 4, 2, 2, 1, 2, 128, 240, 240, 208,,,, +5, 5, 3, 4, 2, 2, 1, 2, 256, 240, 240, 208,,,, +5, 5, 3, 4, 2, 2, 1, 2, 384, 240, 240, 208,,,, +5, 5, 3, 4, 2, 2, 2, 1, 128, 240, 240, 208,,,, +5, 5, 3, 4, 2, 2, 2, 1, 256, 240, 240, 208,,,, +5, 5, 3, 4, 2, 2, 2, 1, 384, 240, 240, 208,,,, +5, 5, 3, 4, 2, 2, 2, 2, 128, 240, 240, 208,,,, +5, 5, 3, 4, 2, 2, 2, 2, 256, 240, 240, 208,,,, +5, 5, 3, 4, 2, 2, 2, 2, 384, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 1, 1, 128, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 1, 1, 256, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 1, 1, 384, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 1, 2, 128, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 1, 2, 256, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 1, 2, 384, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 1, 3, 128, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 1, 3, 256, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 1, 3, 384, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 2, 1, 128, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 2, 1, 256, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 2, 1, 384, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 2, 2, 128, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 2, 2, 256, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 2, 2, 384, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 2, 3, 128, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 2, 3, 256, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 2, 3, 384, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 3, 1, 128, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 3, 1, 256, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 3, 1, 384, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 3, 2, 128, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 3, 2, 256, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 3, 2, 384, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 3, 3, 128, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 3, 3, 256, 240, 240, 208,,,, +5, 5, 3, 4, 3, 3, 3, 3, 384, 240, 240, 208,,,, +5, 5, 3, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +5, 5, 3, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +5, 5, 3, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +5, 5, 3, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +5, 5, 3, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +5, 5, 3, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +5, 5, 3, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +5, 5, 3, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +5, 5, 3, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +5, 5, 3, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +5, 5, 3, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +5, 5, 3, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +5, 5, 3, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +5, 5, 3, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +5, 5, 3, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +5, 5, 3, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +5, 5, 4, 1, 1, 1, 1, 1, 128, 80, 80, 55,,,, +5, 5, 4, 1, 1, 1, 1, 1, 256, 80, 80, 55,,,, +5, 5, 4, 1, 1, 1, 1, 1, 384, 80, 80, 55,,,, +5, 5, 4, 1, 2, 2, 1, 1, 128, 80, 78, 55,,,, +5, 5, 4, 1, 2, 2, 1, 1, 256, 80, 78, 55,,,, +5, 5, 4, 1, 2, 2, 1, 1, 384, 80, 78, 55,,,, +5, 5, 4, 1, 2, 2, 1, 2, 128, 80, 78, 55,,,, +5, 5, 4, 1, 2, 2, 1, 2, 256, 80, 78, 55,,,, +5, 5, 4, 1, 2, 2, 1, 2, 384, 80, 78, 55,,,, +5, 5, 4, 1, 2, 2, 2, 1, 128, 80, 78, 55,,,, +5, 5, 4, 1, 2, 2, 2, 1, 256, 80, 78, 55,,,, +5, 5, 4, 1, 2, 2, 2, 1, 384, 80, 78, 55,,,, +5, 5, 4, 1, 2, 2, 2, 2, 128, 80, 78, 55,,,, +5, 5, 4, 1, 2, 2, 2, 2, 256, 80, 78, 55,,,, +5, 5, 4, 1, 2, 2, 2, 2, 384, 80, 78, 55,,,, +5, 5, 4, 1, 3, 3, 1, 1, 128, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 1, 1, 256, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 1, 1, 384, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 1, 2, 128, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 1, 2, 256, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 1, 2, 384, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 1, 3, 128, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 1, 3, 256, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 1, 3, 384, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 2, 1, 128, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 2, 1, 256, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 2, 1, 384, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 2, 2, 128, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 2, 2, 256, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 2, 2, 384, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 2, 3, 128, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 2, 3, 256, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 2, 3, 384, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 3, 1, 128, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 3, 1, 256, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 3, 1, 384, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 3, 2, 128, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 3, 2, 256, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 3, 2, 384, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 3, 3, 128, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 3, 3, 256, 80, 75, 55,,,, +5, 5, 4, 1, 3, 3, 3, 3, 384, 80, 75, 55,,,, +5, 5, 4, 2, 1, 1, 1, 1, 128, 156, 156, 106,,,, +5, 5, 4, 2, 1, 1, 1, 1, 256, 156, 156, 106,,,, +5, 5, 4, 2, 1, 1, 1, 1, 384, 156, 156, 106,,,, +5, 5, 4, 2, 2, 2, 1, 1, 128, 156, 152, 106,,,, +5, 5, 4, 2, 2, 2, 1, 1, 256, 156, 152, 106,,,, +5, 5, 4, 2, 2, 2, 1, 1, 384, 156, 152, 106,,,, +5, 5, 4, 2, 2, 2, 1, 2, 128, 156, 152, 106,,,, +5, 5, 4, 2, 2, 2, 1, 2, 256, 156, 152, 106,,,, +5, 5, 4, 2, 2, 2, 1, 2, 384, 156, 152, 106,,,, +5, 5, 4, 2, 2, 2, 2, 1, 128, 156, 152, 106,,,, +5, 5, 4, 2, 2, 2, 2, 1, 256, 156, 152, 106,,,, +5, 5, 4, 2, 2, 2, 2, 1, 384, 156, 152, 106,,,, +5, 5, 4, 2, 2, 2, 2, 2, 128, 156, 152, 106,,,, +5, 5, 4, 2, 2, 2, 2, 2, 256, 156, 152, 106,,,, +5, 5, 4, 2, 2, 2, 2, 2, 384, 156, 152, 106,,,, +5, 5, 4, 2, 3, 3, 1, 1, 128, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 1, 1, 256, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 1, 1, 384, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 1, 2, 128, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 1, 2, 256, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 1, 2, 384, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 1, 3, 128, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 1, 3, 256, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 1, 3, 384, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 2, 1, 128, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 2, 1, 256, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 2, 1, 384, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 2, 2, 128, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 2, 2, 256, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 2, 2, 384, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 2, 3, 128, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 2, 3, 256, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 2, 3, 384, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 3, 1, 128, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 3, 1, 256, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 3, 1, 384, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 3, 2, 128, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 3, 2, 256, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 3, 2, 384, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 3, 3, 128, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 3, 3, 256, 156, 146, 106,,,, +5, 5, 4, 2, 3, 3, 3, 3, 384, 156, 146, 106,,,, +5, 5, 4, 3, 1, 1, 1, 1, 128, 232, 232, 157,,,, +5, 5, 4, 3, 1, 1, 1, 1, 256, 232, 232, 157,,,, +5, 5, 4, 3, 1, 1, 1, 1, 384, 232, 232, 157,,,, +5, 5, 4, 3, 2, 2, 1, 1, 128, 232, 226, 157,,,, +5, 5, 4, 3, 2, 2, 1, 1, 256, 232, 226, 157,,,, +5, 5, 4, 3, 2, 2, 1, 1, 384, 232, 226, 157,,,, +5, 5, 4, 3, 2, 2, 1, 2, 128, 232, 226, 157,,,, +5, 5, 4, 3, 2, 2, 1, 2, 256, 232, 226, 157,,,, +5, 5, 4, 3, 2, 2, 1, 2, 384, 232, 226, 157,,,, +5, 5, 4, 3, 2, 2, 2, 1, 128, 232, 226, 157,,,, +5, 5, 4, 3, 2, 2, 2, 1, 256, 232, 226, 157,,,, +5, 5, 4, 3, 2, 2, 2, 1, 384, 232, 226, 157,,,, +5, 5, 4, 3, 2, 2, 2, 2, 128, 232, 226, 157,,,, +5, 5, 4, 3, 2, 2, 2, 2, 256, 232, 226, 157,,,, +5, 5, 4, 3, 2, 2, 2, 2, 384, 232, 226, 157,,,, +5, 5, 4, 3, 3, 3, 1, 1, 128, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 1, 1, 256, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 1, 1, 384, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 1, 2, 128, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 1, 2, 256, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 1, 2, 384, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 1, 3, 128, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 1, 3, 256, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 1, 3, 384, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 2, 1, 128, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 2, 1, 256, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 2, 1, 384, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 2, 2, 128, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 2, 2, 256, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 2, 2, 384, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 2, 3, 128, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 2, 3, 256, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 2, 3, 384, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 3, 1, 128, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 3, 1, 256, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 3, 1, 384, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 3, 2, 128, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 3, 2, 256, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 3, 2, 384, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 3, 3, 128, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 3, 3, 256, 232, 217, 157,,,, +5, 5, 4, 3, 3, 3, 3, 3, 384, 232, 217, 157,,,, +5, 5, 4, 4, 1, 1, 1, 1, 128, 240, 240, 208,,,, +5, 5, 4, 4, 1, 1, 1, 1, 256, 240, 240, 208,,,, +5, 5, 4, 4, 1, 1, 1, 1, 384, 240, 240, 208,,,, +5, 5, 4, 4, 2, 2, 1, 1, 128, 240, 240, 208,,,, +5, 5, 4, 4, 2, 2, 1, 1, 256, 240, 240, 208,,,, +5, 5, 4, 4, 2, 2, 1, 1, 384, 240, 240, 208,,,, +5, 5, 4, 4, 2, 2, 1, 2, 128, 240, 240, 208,,,, +5, 5, 4, 4, 2, 2, 1, 2, 256, 240, 240, 208,,,, +5, 5, 4, 4, 2, 2, 1, 2, 384, 240, 240, 208,,,, +5, 5, 4, 4, 2, 2, 2, 1, 128, 240, 240, 208,,,, +5, 5, 4, 4, 2, 2, 2, 1, 256, 240, 240, 208,,,, +5, 5, 4, 4, 2, 2, 2, 1, 384, 240, 240, 208,,,, +5, 5, 4, 4, 2, 2, 2, 2, 128, 240, 240, 208,,,, +5, 5, 4, 4, 2, 2, 2, 2, 256, 240, 240, 208,,,, +5, 5, 4, 4, 2, 2, 2, 2, 384, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 1, 1, 128, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 1, 1, 256, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 1, 1, 384, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 1, 2, 128, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 1, 2, 256, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 1, 2, 384, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 1, 3, 128, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 1, 3, 256, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 1, 3, 384, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 2, 1, 128, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 2, 1, 256, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 2, 1, 384, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 2, 2, 128, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 2, 2, 256, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 2, 2, 384, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 2, 3, 128, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 2, 3, 256, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 2, 3, 384, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 3, 1, 128, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 3, 1, 256, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 3, 1, 384, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 3, 2, 128, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 3, 2, 256, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 3, 2, 384, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 3, 3, 128, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 3, 3, 256, 240, 240, 208,,,, +5, 5, 4, 4, 3, 3, 3, 3, 384, 240, 240, 208,,,, +5, 5, 4, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +5, 5, 4, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +5, 5, 4, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +5, 5, 4, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +5, 5, 4, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +5, 5, 4, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +5, 5, 4, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +5, 5, 4, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +5, 5, 4, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +5, 5, 4, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +5, 5, 4, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +5, 5, 4, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +5, 5, 4, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +5, 5, 4, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +5, 5, 4, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +5, 5, 4, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +5, 5, 5, 1, 1, 1, 1, 1, 128, 80, 80, 55,,,, +5, 5, 5, 1, 1, 1, 1, 1, 256, 80, 80, 55,,,, +5, 5, 5, 1, 1, 1, 1, 1, 384, 80, 80, 55,,,, +5, 5, 5, 1, 2, 2, 1, 1, 128, 80, 78, 55,,,, +5, 5, 5, 1, 2, 2, 1, 1, 256, 80, 78, 55,,,, +5, 5, 5, 1, 2, 2, 1, 1, 384, 80, 78, 55,,,, +5, 5, 5, 1, 2, 2, 1, 2, 128, 80, 78, 55,,,, +5, 5, 5, 1, 2, 2, 1, 2, 256, 80, 78, 55,,,, +5, 5, 5, 1, 2, 2, 1, 2, 384, 80, 78, 55,,,, +5, 5, 5, 1, 2, 2, 2, 1, 128, 80, 78, 55,,,, +5, 5, 5, 1, 2, 2, 2, 1, 256, 80, 78, 55,,,, +5, 5, 5, 1, 2, 2, 2, 1, 384, 80, 78, 55,,,, +5, 5, 5, 1, 2, 2, 2, 2, 128, 80, 78, 55,,,, +5, 5, 5, 1, 2, 2, 2, 2, 256, 80, 78, 55,,,, +5, 5, 5, 1, 2, 2, 2, 2, 384, 80, 78, 55,,,, +5, 5, 5, 1, 3, 3, 1, 1, 128, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 1, 1, 256, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 1, 1, 384, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 1, 2, 128, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 1, 2, 256, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 1, 2, 384, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 1, 3, 128, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 1, 3, 256, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 1, 3, 384, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 2, 1, 128, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 2, 1, 256, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 2, 1, 384, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 2, 2, 128, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 2, 2, 256, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 2, 2, 384, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 2, 3, 128, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 2, 3, 256, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 2, 3, 384, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 3, 1, 128, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 3, 1, 256, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 3, 1, 384, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 3, 2, 128, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 3, 2, 256, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 3, 2, 384, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 3, 3, 128, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 3, 3, 256, 80, 75, 55,,,, +5, 5, 5, 1, 3, 3, 3, 3, 384, 80, 75, 55,,,, +5, 5, 5, 2, 1, 1, 1, 1, 128, 156, 156, 106,,,, +5, 5, 5, 2, 1, 1, 1, 1, 256, 156, 156, 106,,,, +5, 5, 5, 2, 1, 1, 1, 1, 384, 156, 156, 106,,,, +5, 5, 5, 2, 2, 2, 1, 1, 128, 156, 152, 106,,,, +5, 5, 5, 2, 2, 2, 1, 1, 256, 156, 152, 106,,,, +5, 5, 5, 2, 2, 2, 1, 1, 384, 156, 152, 106,,,, +5, 5, 5, 2, 2, 2, 1, 2, 128, 156, 152, 106,,,, +5, 5, 5, 2, 2, 2, 1, 2, 256, 156, 152, 106,,,, +5, 5, 5, 2, 2, 2, 1, 2, 384, 156, 152, 106,,,, +5, 5, 5, 2, 2, 2, 2, 1, 128, 156, 152, 106,,,, +5, 5, 5, 2, 2, 2, 2, 1, 256, 156, 152, 106,,,, +5, 5, 5, 2, 2, 2, 2, 1, 384, 156, 152, 106,,,, +5, 5, 5, 2, 2, 2, 2, 2, 128, 156, 152, 106,,,, +5, 5, 5, 2, 2, 2, 2, 2, 256, 156, 152, 106,,,, +5, 5, 5, 2, 2, 2, 2, 2, 384, 156, 152, 106,,,, +5, 5, 5, 2, 3, 3, 1, 1, 128, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 1, 1, 256, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 1, 1, 384, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 1, 2, 128, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 1, 2, 256, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 1, 2, 384, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 1, 3, 128, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 1, 3, 256, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 1, 3, 384, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 2, 1, 128, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 2, 1, 256, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 2, 1, 384, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 2, 2, 128, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 2, 2, 256, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 2, 2, 384, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 2, 3, 128, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 2, 3, 256, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 2, 3, 384, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 3, 1, 128, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 3, 1, 256, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 3, 1, 384, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 3, 2, 128, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 3, 2, 256, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 3, 2, 384, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 3, 3, 128, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 3, 3, 256, 156, 146, 106,,,, +5, 5, 5, 2, 3, 3, 3, 3, 384, 156, 146, 106,,,, +5, 5, 5, 3, 1, 1, 1, 1, 128, 232, 232, 157,,,, +5, 5, 5, 3, 1, 1, 1, 1, 256, 232, 232, 157,,,, +5, 5, 5, 3, 1, 1, 1, 1, 384, 232, 232, 157,,,, +5, 5, 5, 3, 2, 2, 1, 1, 128, 232, 226, 157,,,, +5, 5, 5, 3, 2, 2, 1, 1, 256, 232, 226, 157,,,, +5, 5, 5, 3, 2, 2, 1, 1, 384, 232, 226, 157,,,, +5, 5, 5, 3, 2, 2, 1, 2, 128, 232, 226, 157,,,, +5, 5, 5, 3, 2, 2, 1, 2, 256, 232, 226, 157,,,, +5, 5, 5, 3, 2, 2, 1, 2, 384, 232, 226, 157,,,, +5, 5, 5, 3, 2, 2, 2, 1, 128, 232, 226, 157,,,, +5, 5, 5, 3, 2, 2, 2, 1, 256, 232, 226, 157,,,, +5, 5, 5, 3, 2, 2, 2, 1, 384, 232, 226, 157,,,, +5, 5, 5, 3, 2, 2, 2, 2, 128, 232, 226, 157,,,, +5, 5, 5, 3, 2, 2, 2, 2, 256, 232, 226, 157,,,, +5, 5, 5, 3, 2, 2, 2, 2, 384, 232, 226, 157,,,, +5, 5, 5, 3, 3, 3, 1, 1, 128, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 1, 1, 256, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 1, 1, 384, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 1, 2, 128, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 1, 2, 256, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 1, 2, 384, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 1, 3, 128, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 1, 3, 256, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 1, 3, 384, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 2, 1, 128, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 2, 1, 256, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 2, 1, 384, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 2, 2, 128, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 2, 2, 256, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 2, 2, 384, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 2, 3, 128, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 2, 3, 256, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 2, 3, 384, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 3, 1, 128, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 3, 1, 256, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 3, 1, 384, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 3, 2, 128, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 3, 2, 256, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 3, 2, 384, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 3, 3, 128, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 3, 3, 256, 232, 217, 157,,,, +5, 5, 5, 3, 3, 3, 3, 3, 384, 232, 217, 157,,,, +5, 5, 5, 4, 1, 1, 1, 1, 128, 240, 240, 208,,,, +5, 5, 5, 4, 1, 1, 1, 1, 256, 240, 240, 208,,,, +5, 5, 5, 4, 1, 1, 1, 1, 384, 240, 240, 208,,,, +5, 5, 5, 4, 2, 2, 1, 1, 128, 240, 240, 208,,,, +5, 5, 5, 4, 2, 2, 1, 1, 256, 240, 240, 208,,,, +5, 5, 5, 4, 2, 2, 1, 1, 384, 240, 240, 208,,,, +5, 5, 5, 4, 2, 2, 1, 2, 128, 240, 240, 208,,,, +5, 5, 5, 4, 2, 2, 1, 2, 256, 240, 240, 208,,,, +5, 5, 5, 4, 2, 2, 1, 2, 384, 240, 240, 208,,,, +5, 5, 5, 4, 2, 2, 2, 1, 128, 240, 240, 208,,,, +5, 5, 5, 4, 2, 2, 2, 1, 256, 240, 240, 208,,,, +5, 5, 5, 4, 2, 2, 2, 1, 384, 240, 240, 208,,,, +5, 5, 5, 4, 2, 2, 2, 2, 128, 240, 240, 208,,,, +5, 5, 5, 4, 2, 2, 2, 2, 256, 240, 240, 208,,,, +5, 5, 5, 4, 2, 2, 2, 2, 384, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 1, 1, 128, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 1, 1, 256, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 1, 1, 384, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 1, 2, 128, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 1, 2, 256, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 1, 2, 384, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 1, 3, 128, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 1, 3, 256, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 1, 3, 384, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 2, 1, 128, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 2, 1, 256, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 2, 1, 384, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 2, 2, 128, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 2, 2, 256, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 2, 2, 384, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 2, 3, 128, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 2, 3, 256, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 2, 3, 384, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 3, 1, 128, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 3, 1, 256, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 3, 1, 384, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 3, 2, 128, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 3, 2, 256, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 3, 2, 384, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 3, 3, 128, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 3, 3, 256, 240, 240, 208,,,, +5, 5, 5, 4, 3, 3, 3, 3, 384, 240, 240, 208,,,, +5, 5, 5, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +5, 5, 5, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +5, 5, 5, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +5, 5, 5, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +5, 5, 5, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +5, 5, 5, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +5, 5, 5, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +5, 5, 5, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +5, 5, 5, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +5, 5, 5, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +5, 5, 5, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +5, 5, 5, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +5, 5, 5, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +5, 5, 5, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +5, 5, 5, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 1, 1, 128, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 1, 1, 256, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 1, 1, 384, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 1, 2, 128, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 1, 2, 256, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 1, 2, 384, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 1, 3, 128, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 1, 3, 256, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 1, 3, 384, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 2, 1, 128, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 2, 1, 256, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 2, 1, 384, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 2, 2, 128, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 2, 2, 256, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 2, 2, 384, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 2, 3, 128, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 2, 3, 256, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 2, 3, 384, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 3, 1, 128, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 3, 1, 256, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 3, 1, 384, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 3, 2, 128, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 3, 2, 256, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 3, 2, 384, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 3, 3, 128, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 3, 3, 256, 240, 240, 240,,,, +5, 5, 5, 5, 3, 3, 3, 3, 384, 240, 240, 240,,,, +5, 6, 1, 1, 1, 1, 1, 1, 128, 81, 81, 56,,,, +5, 6, 1, 1, 1, 1, 1, 1, 256, 81, 81, 56,,,, +5, 6, 1, 1, 1, 1, 1, 1, 384, 81, 81, 56,,,, +5, 6, 1, 1, 2, 2, 1, 1, 128, 81, 75, 53,,,, +5, 6, 1, 1, 2, 2, 1, 1, 256, 81, 75, 53,,,, +5, 6, 1, 1, 2, 2, 1, 1, 384, 81, 75, 53,,,, +5, 6, 1, 1, 2, 2, 1, 2, 128, 81, 75, 53,,,, +5, 6, 1, 1, 2, 2, 1, 2, 256, 81, 75, 53,,,, +5, 6, 1, 1, 2, 2, 1, 2, 384, 81, 75, 53,,,, +5, 6, 1, 1, 2, 2, 2, 1, 128, 81, 75, 53,,,, +5, 6, 1, 1, 2, 2, 2, 1, 256, 81, 75, 53,,,, +5, 6, 1, 1, 2, 2, 2, 1, 384, 81, 75, 53,,,, +5, 6, 1, 1, 2, 2, 2, 2, 128, 81, 75, 53,,,, +5, 6, 1, 1, 2, 2, 2, 2, 256, 81, 75, 53,,,, +5, 6, 1, 1, 2, 2, 2, 2, 384, 81, 75, 53,,,, +5, 6, 1, 1, 3, 3, 1, 1, 128, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 1, 1, 256, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 1, 1, 384, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 1, 2, 128, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 1, 2, 256, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 1, 2, 384, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 1, 3, 128, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 1, 3, 256, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 1, 3, 384, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 2, 1, 128, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 2, 1, 256, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 2, 1, 384, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 2, 2, 128, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 2, 2, 256, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 2, 2, 384, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 2, 3, 128, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 2, 3, 256, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 2, 3, 384, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 3, 1, 128, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 3, 1, 256, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 3, 1, 384, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 3, 2, 128, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 3, 2, 256, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 3, 2, 384, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 3, 3, 128, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 3, 3, 256, 81, 72, 51,,,, +5, 6, 1, 1, 3, 3, 3, 3, 384, 81, 72, 51,,,, +5, 6, 1, 2, 1, 1, 1, 1, 128, 157, 157, 107,,,, +5, 6, 1, 2, 1, 1, 1, 1, 256, 157, 157, 107,,,, +5, 6, 1, 2, 1, 1, 1, 1, 384, 157, 157, 107,,,, +5, 6, 1, 2, 2, 2, 1, 1, 128, 157, 145, 101,,,, +5, 6, 1, 2, 2, 2, 1, 1, 256, 157, 145, 101,,,, +5, 6, 1, 2, 2, 2, 1, 1, 384, 157, 145, 101,,,, +5, 6, 1, 2, 2, 2, 1, 2, 128, 157, 145, 101,,,, +5, 6, 1, 2, 2, 2, 1, 2, 256, 157, 145, 101,,,, +5, 6, 1, 2, 2, 2, 1, 2, 384, 157, 145, 101,,,, +5, 6, 1, 2, 2, 2, 2, 1, 128, 157, 145, 101,,,, +5, 6, 1, 2, 2, 2, 2, 1, 256, 157, 145, 101,,,, +5, 6, 1, 2, 2, 2, 2, 1, 384, 157, 145, 101,,,, +5, 6, 1, 2, 2, 2, 2, 2, 128, 157, 145, 101,,,, +5, 6, 1, 2, 2, 2, 2, 2, 256, 157, 145, 101,,,, +5, 6, 1, 2, 2, 2, 2, 2, 384, 157, 145, 101,,,, +5, 6, 1, 2, 3, 3, 1, 1, 128, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 1, 1, 256, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 1, 1, 384, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 1, 2, 128, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 1, 2, 256, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 1, 2, 384, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 1, 3, 128, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 1, 3, 256, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 1, 3, 384, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 2, 1, 128, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 2, 1, 256, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 2, 1, 384, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 2, 2, 128, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 2, 2, 256, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 2, 2, 384, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 2, 3, 128, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 2, 3, 256, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 2, 3, 384, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 3, 1, 128, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 3, 1, 256, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 3, 1, 384, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 3, 2, 128, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 3, 2, 256, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 3, 2, 384, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 3, 3, 128, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 3, 3, 256, 157, 139, 97,,,, +5, 6, 1, 2, 3, 3, 3, 3, 384, 157, 139, 97,,,, +5, 6, 1, 3, 1, 1, 1, 1, 128, 233, 233, 158,,,, +5, 6, 1, 3, 1, 1, 1, 1, 256, 233, 233, 158,,,, +5, 6, 1, 3, 1, 1, 1, 1, 384, 233, 233, 158,,,, +5, 6, 1, 3, 2, 2, 1, 1, 128, 233, 215, 149,,,, +5, 6, 1, 3, 2, 2, 1, 1, 256, 233, 215, 149,,,, +5, 6, 1, 3, 2, 2, 1, 1, 384, 233, 215, 149,,,, +5, 6, 1, 3, 2, 2, 1, 2, 128, 233, 215, 149,,,, +5, 6, 1, 3, 2, 2, 1, 2, 256, 233, 215, 149,,,, +5, 6, 1, 3, 2, 2, 1, 2, 384, 233, 215, 149,,,, +5, 6, 1, 3, 2, 2, 2, 1, 128, 233, 215, 149,,,, +5, 6, 1, 3, 2, 2, 2, 1, 256, 233, 215, 149,,,, +5, 6, 1, 3, 2, 2, 2, 1, 384, 233, 215, 149,,,, +5, 6, 1, 3, 2, 2, 2, 2, 128, 233, 215, 149,,,, +5, 6, 1, 3, 2, 2, 2, 2, 256, 233, 215, 149,,,, +5, 6, 1, 3, 2, 2, 2, 2, 384, 233, 215, 149,,,, +5, 6, 1, 3, 3, 3, 1, 1, 128, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 1, 1, 256, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 1, 1, 384, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 1, 2, 128, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 1, 2, 256, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 1, 2, 384, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 1, 3, 128, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 1, 3, 256, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 1, 3, 384, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 2, 1, 128, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 2, 1, 256, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 2, 1, 384, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 2, 2, 128, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 2, 2, 256, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 2, 2, 384, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 2, 3, 128, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 2, 3, 256, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 2, 3, 384, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 3, 1, 128, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 3, 1, 256, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 3, 1, 384, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 3, 2, 128, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 3, 2, 256, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 3, 2, 384, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 3, 3, 128, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 3, 3, 256, 233, 206, 143,,,, +5, 6, 1, 3, 3, 3, 3, 3, 384, 233, 206, 143,,,, +5, 6, 1, 4, 1, 1, 1, 1, 128, 240, 240, 209,,,, +5, 6, 1, 4, 1, 1, 1, 1, 256, 240, 240, 209,,,, +5, 6, 1, 4, 1, 1, 1, 1, 384, 240, 240, 209,,,, +5, 6, 1, 4, 2, 2, 1, 1, 128, 240, 240, 197,,,, +5, 6, 1, 4, 2, 2, 1, 1, 256, 240, 240, 197,,,, +5, 6, 1, 4, 2, 2, 1, 1, 384, 240, 240, 197,,,, +5, 6, 1, 4, 2, 2, 1, 2, 128, 240, 240, 197,,,, +5, 6, 1, 4, 2, 2, 1, 2, 256, 240, 240, 197,,,, +5, 6, 1, 4, 2, 2, 1, 2, 384, 240, 240, 197,,,, +5, 6, 1, 4, 2, 2, 2, 1, 128, 240, 240, 197,,,, +5, 6, 1, 4, 2, 2, 2, 1, 256, 240, 240, 197,,,, +5, 6, 1, 4, 2, 2, 2, 1, 384, 240, 240, 197,,,, +5, 6, 1, 4, 2, 2, 2, 2, 128, 240, 240, 197,,,, +5, 6, 1, 4, 2, 2, 2, 2, 256, 240, 240, 197,,,, +5, 6, 1, 4, 2, 2, 2, 2, 384, 240, 240, 197,,,, +5, 6, 1, 4, 3, 3, 1, 1, 128, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 1, 1, 256, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 1, 1, 384, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 1, 2, 128, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 1, 2, 256, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 1, 2, 384, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 1, 3, 128, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 1, 3, 256, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 1, 3, 384, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 2, 1, 128, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 2, 1, 256, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 2, 1, 384, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 2, 2, 128, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 2, 2, 256, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 2, 2, 384, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 2, 3, 128, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 2, 3, 256, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 2, 3, 384, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 3, 1, 128, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 3, 1, 256, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 3, 1, 384, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 3, 2, 128, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 3, 2, 256, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 3, 2, 384, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 3, 3, 128, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 3, 3, 256, 240, 240, 189,,,, +5, 6, 1, 4, 3, 3, 3, 3, 384, 240, 240, 189,,,, +5, 6, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +5, 6, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +5, 6, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +5, 6, 1, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +5, 6, 1, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +5, 6, 1, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +5, 6, 1, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +5, 6, 1, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +5, 6, 1, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +5, 6, 1, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +5, 6, 1, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +5, 6, 1, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +5, 6, 1, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +5, 6, 1, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +5, 6, 1, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +5, 6, 1, 5, 3, 3, 1, 1, 128, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 1, 1, 256, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 1, 1, 384, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 1, 2, 128, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 1, 2, 256, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 1, 2, 384, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 1, 3, 128, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 1, 3, 256, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 1, 3, 384, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 2, 1, 128, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 2, 1, 256, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 2, 1, 384, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 2, 2, 128, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 2, 2, 256, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 2, 2, 384, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 2, 3, 128, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 2, 3, 256, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 2, 3, 384, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 3, 1, 128, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 3, 1, 256, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 3, 1, 384, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 3, 2, 128, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 3, 2, 256, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 3, 2, 384, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 3, 3, 128, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 3, 3, 256, 240, 240, 235,,,, +5, 6, 1, 5, 3, 3, 3, 3, 384, 240, 240, 235,,,, +5, 6, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +5, 6, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +5, 6, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +5, 6, 1, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +5, 6, 1, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +5, 6, 1, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +5, 6, 1, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +5, 6, 1, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +5, 6, 1, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +5, 6, 1, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +5, 6, 1, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +5, 6, 1, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +5, 6, 1, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +5, 6, 1, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +5, 6, 1, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +5, 6, 1, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +5, 6, 2, 1, 1, 1, 1, 1, 128, 81, 81, 56,,,, +5, 6, 2, 1, 1, 1, 1, 1, 256, 81, 81, 56,,,, +5, 6, 2, 1, 1, 1, 1, 1, 384, 81, 81, 56,,,, +5, 6, 2, 1, 2, 2, 1, 1, 128, 81, 75, 53,,,, +5, 6, 2, 1, 2, 2, 1, 1, 256, 81, 75, 53,,,, +5, 6, 2, 1, 2, 2, 1, 1, 384, 81, 75, 53,,,, +5, 6, 2, 1, 2, 2, 1, 2, 128, 81, 75, 53,,,, +5, 6, 2, 1, 2, 2, 1, 2, 256, 81, 75, 53,,,, +5, 6, 2, 1, 2, 2, 1, 2, 384, 81, 75, 53,,,, +5, 6, 2, 1, 2, 2, 2, 1, 128, 81, 75, 53,,,, +5, 6, 2, 1, 2, 2, 2, 1, 256, 81, 75, 53,,,, +5, 6, 2, 1, 2, 2, 2, 1, 384, 81, 75, 53,,,, +5, 6, 2, 1, 2, 2, 2, 2, 128, 81, 75, 53,,,, +5, 6, 2, 1, 2, 2, 2, 2, 256, 81, 75, 53,,,, +5, 6, 2, 1, 2, 2, 2, 2, 384, 81, 75, 53,,,, +5, 6, 2, 1, 3, 3, 1, 1, 128, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 1, 1, 256, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 1, 1, 384, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 1, 2, 128, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 1, 2, 256, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 1, 2, 384, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 1, 3, 128, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 1, 3, 256, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 1, 3, 384, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 2, 1, 128, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 2, 1, 256, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 2, 1, 384, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 2, 2, 128, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 2, 2, 256, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 2, 2, 384, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 2, 3, 128, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 2, 3, 256, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 2, 3, 384, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 3, 1, 128, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 3, 1, 256, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 3, 1, 384, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 3, 2, 128, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 3, 2, 256, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 3, 2, 384, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 3, 3, 128, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 3, 3, 256, 81, 72, 51,,,, +5, 6, 2, 1, 3, 3, 3, 3, 384, 81, 72, 51,,,, +5, 6, 2, 2, 1, 1, 1, 1, 128, 157, 157, 107,,,, +5, 6, 2, 2, 1, 1, 1, 1, 256, 157, 157, 107,,,, +5, 6, 2, 2, 1, 1, 1, 1, 384, 157, 157, 107,,,, +5, 6, 2, 2, 2, 2, 1, 1, 128, 157, 145, 101,,,, +5, 6, 2, 2, 2, 2, 1, 1, 256, 157, 145, 101,,,, +5, 6, 2, 2, 2, 2, 1, 1, 384, 157, 145, 101,,,, +5, 6, 2, 2, 2, 2, 1, 2, 128, 157, 145, 101,,,, +5, 6, 2, 2, 2, 2, 1, 2, 256, 157, 145, 101,,,, +5, 6, 2, 2, 2, 2, 1, 2, 384, 157, 145, 101,,,, +5, 6, 2, 2, 2, 2, 2, 1, 128, 157, 145, 101,,,, +5, 6, 2, 2, 2, 2, 2, 1, 256, 157, 145, 101,,,, +5, 6, 2, 2, 2, 2, 2, 1, 384, 157, 145, 101,,,, +5, 6, 2, 2, 2, 2, 2, 2, 128, 157, 145, 101,,,, +5, 6, 2, 2, 2, 2, 2, 2, 256, 157, 145, 101,,,, +5, 6, 2, 2, 2, 2, 2, 2, 384, 157, 145, 101,,,, +5, 6, 2, 2, 3, 3, 1, 1, 128, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 1, 1, 256, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 1, 1, 384, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 1, 2, 128, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 1, 2, 256, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 1, 2, 384, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 1, 3, 128, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 1, 3, 256, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 1, 3, 384, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 2, 1, 128, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 2, 1, 256, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 2, 1, 384, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 2, 2, 128, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 2, 2, 256, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 2, 2, 384, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 2, 3, 128, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 2, 3, 256, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 2, 3, 384, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 3, 1, 128, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 3, 1, 256, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 3, 1, 384, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 3, 2, 128, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 3, 2, 256, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 3, 2, 384, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 3, 3, 128, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 3, 3, 256, 157, 139, 97,,,, +5, 6, 2, 2, 3, 3, 3, 3, 384, 157, 139, 97,,,, +5, 6, 2, 3, 1, 1, 1, 1, 128, 233, 233, 158,,,, +5, 6, 2, 3, 1, 1, 1, 1, 256, 233, 233, 158,,,, +5, 6, 2, 3, 1, 1, 1, 1, 384, 233, 233, 158,,,, +5, 6, 2, 3, 2, 2, 1, 1, 128, 233, 215, 149,,,, +5, 6, 2, 3, 2, 2, 1, 1, 256, 233, 215, 149,,,, +5, 6, 2, 3, 2, 2, 1, 1, 384, 233, 215, 149,,,, +5, 6, 2, 3, 2, 2, 1, 2, 128, 233, 215, 149,,,, +5, 6, 2, 3, 2, 2, 1, 2, 256, 233, 215, 149,,,, +5, 6, 2, 3, 2, 2, 1, 2, 384, 233, 215, 149,,,, +5, 6, 2, 3, 2, 2, 2, 1, 128, 233, 215, 149,,,, +5, 6, 2, 3, 2, 2, 2, 1, 256, 233, 215, 149,,,, +5, 6, 2, 3, 2, 2, 2, 1, 384, 233, 215, 149,,,, +5, 6, 2, 3, 2, 2, 2, 2, 128, 233, 215, 149,,,, +5, 6, 2, 3, 2, 2, 2, 2, 256, 233, 215, 149,,,, +5, 6, 2, 3, 2, 2, 2, 2, 384, 233, 215, 149,,,, +5, 6, 2, 3, 3, 3, 1, 1, 128, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 1, 1, 256, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 1, 1, 384, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 1, 2, 128, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 1, 2, 256, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 1, 2, 384, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 1, 3, 128, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 1, 3, 256, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 1, 3, 384, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 2, 1, 128, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 2, 1, 256, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 2, 1, 384, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 2, 2, 128, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 2, 2, 256, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 2, 2, 384, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 2, 3, 128, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 2, 3, 256, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 2, 3, 384, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 3, 1, 128, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 3, 1, 256, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 3, 1, 384, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 3, 2, 128, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 3, 2, 256, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 3, 2, 384, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 3, 3, 128, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 3, 3, 256, 233, 206, 143,,,, +5, 6, 2, 3, 3, 3, 3, 3, 384, 233, 206, 143,,,, +5, 6, 2, 4, 1, 1, 1, 1, 128, 240, 240, 209,,,, +5, 6, 2, 4, 1, 1, 1, 1, 256, 240, 240, 209,,,, +5, 6, 2, 4, 1, 1, 1, 1, 384, 240, 240, 209,,,, +5, 6, 2, 4, 2, 2, 1, 1, 128, 240, 240, 197,,,, +5, 6, 2, 4, 2, 2, 1, 1, 256, 240, 240, 197,,,, +5, 6, 2, 4, 2, 2, 1, 1, 384, 240, 240, 197,,,, +5, 6, 2, 4, 2, 2, 1, 2, 128, 240, 240, 197,,,, +5, 6, 2, 4, 2, 2, 1, 2, 256, 240, 240, 197,,,, +5, 6, 2, 4, 2, 2, 1, 2, 384, 240, 240, 197,,,, +5, 6, 2, 4, 2, 2, 2, 1, 128, 240, 240, 197,,,, +5, 6, 2, 4, 2, 2, 2, 1, 256, 240, 240, 197,,,, +5, 6, 2, 4, 2, 2, 2, 1, 384, 240, 240, 197,,,, +5, 6, 2, 4, 2, 2, 2, 2, 128, 240, 240, 197,,,, +5, 6, 2, 4, 2, 2, 2, 2, 256, 240, 240, 197,,,, +5, 6, 2, 4, 2, 2, 2, 2, 384, 240, 240, 197,,,, +5, 6, 2, 4, 3, 3, 1, 1, 128, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 1, 1, 256, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 1, 1, 384, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 1, 2, 128, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 1, 2, 256, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 1, 2, 384, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 1, 3, 128, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 1, 3, 256, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 1, 3, 384, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 2, 1, 128, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 2, 1, 256, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 2, 1, 384, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 2, 2, 128, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 2, 2, 256, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 2, 2, 384, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 2, 3, 128, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 2, 3, 256, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 2, 3, 384, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 3, 1, 128, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 3, 1, 256, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 3, 1, 384, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 3, 2, 128, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 3, 2, 256, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 3, 2, 384, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 3, 3, 128, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 3, 3, 256, 240, 240, 189,,,, +5, 6, 2, 4, 3, 3, 3, 3, 384, 240, 240, 189,,,, +5, 6, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +5, 6, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +5, 6, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +5, 6, 2, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +5, 6, 2, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +5, 6, 2, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +5, 6, 2, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +5, 6, 2, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +5, 6, 2, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +5, 6, 2, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +5, 6, 2, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +5, 6, 2, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +5, 6, 2, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +5, 6, 2, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +5, 6, 2, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +5, 6, 2, 5, 3, 3, 1, 1, 128, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 1, 1, 256, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 1, 1, 384, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 1, 2, 128, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 1, 2, 256, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 1, 2, 384, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 1, 3, 128, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 1, 3, 256, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 1, 3, 384, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 2, 1, 128, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 2, 1, 256, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 2, 1, 384, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 2, 2, 128, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 2, 2, 256, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 2, 2, 384, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 2, 3, 128, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 2, 3, 256, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 2, 3, 384, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 3, 1, 128, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 3, 1, 256, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 3, 1, 384, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 3, 2, 128, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 3, 2, 256, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 3, 2, 384, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 3, 3, 128, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 3, 3, 256, 240, 240, 235,,,, +5, 6, 2, 5, 3, 3, 3, 3, 384, 240, 240, 235,,,, +5, 6, 2, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +5, 6, 2, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +5, 6, 2, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +5, 6, 2, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +5, 6, 2, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +5, 6, 2, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +5, 6, 2, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +5, 6, 2, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +5, 6, 2, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +5, 6, 2, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +5, 6, 2, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +5, 6, 2, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +5, 6, 2, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +5, 6, 2, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +5, 6, 2, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +5, 6, 2, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +5, 6, 3, 1, 1, 1, 1, 1, 128, 81, 81, 56,,,, +5, 6, 3, 1, 1, 1, 1, 1, 256, 81, 81, 56,,,, +5, 6, 3, 1, 1, 1, 1, 1, 384, 81, 81, 56,,,, +5, 6, 3, 1, 2, 2, 1, 1, 128, 81, 75, 53,,,, +5, 6, 3, 1, 2, 2, 1, 1, 256, 81, 75, 53,,,, +5, 6, 3, 1, 2, 2, 1, 1, 384, 81, 75, 53,,,, +5, 6, 3, 1, 2, 2, 1, 2, 128, 81, 75, 53,,,, +5, 6, 3, 1, 2, 2, 1, 2, 256, 81, 75, 53,,,, +5, 6, 3, 1, 2, 2, 1, 2, 384, 81, 75, 53,,,, +5, 6, 3, 1, 2, 2, 2, 1, 128, 81, 75, 53,,,, +5, 6, 3, 1, 2, 2, 2, 1, 256, 81, 75, 53,,,, +5, 6, 3, 1, 2, 2, 2, 1, 384, 81, 75, 53,,,, +5, 6, 3, 1, 2, 2, 2, 2, 128, 81, 75, 53,,,, +5, 6, 3, 1, 2, 2, 2, 2, 256, 81, 75, 53,,,, +5, 6, 3, 1, 2, 2, 2, 2, 384, 81, 75, 53,,,, +5, 6, 3, 1, 3, 3, 1, 1, 128, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 1, 1, 256, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 1, 1, 384, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 1, 2, 128, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 1, 2, 256, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 1, 2, 384, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 1, 3, 128, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 1, 3, 256, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 1, 3, 384, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 2, 1, 128, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 2, 1, 256, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 2, 1, 384, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 2, 2, 128, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 2, 2, 256, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 2, 2, 384, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 2, 3, 128, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 2, 3, 256, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 2, 3, 384, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 3, 1, 128, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 3, 1, 256, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 3, 1, 384, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 3, 2, 128, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 3, 2, 256, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 3, 2, 384, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 3, 3, 128, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 3, 3, 256, 81, 72, 51,,,, +5, 6, 3, 1, 3, 3, 3, 3, 384, 81, 72, 51,,,, +5, 6, 3, 2, 1, 1, 1, 1, 128, 157, 157, 107,,,, +5, 6, 3, 2, 1, 1, 1, 1, 256, 157, 157, 107,,,, +5, 6, 3, 2, 1, 1, 1, 1, 384, 157, 157, 107,,,, +5, 6, 3, 2, 2, 2, 1, 1, 128, 157, 145, 101,,,, +5, 6, 3, 2, 2, 2, 1, 1, 256, 157, 145, 101,,,, +5, 6, 3, 2, 2, 2, 1, 1, 384, 157, 145, 101,,,, +5, 6, 3, 2, 2, 2, 1, 2, 128, 157, 145, 101,,,, +5, 6, 3, 2, 2, 2, 1, 2, 256, 157, 145, 101,,,, +5, 6, 3, 2, 2, 2, 1, 2, 384, 157, 145, 101,,,, +5, 6, 3, 2, 2, 2, 2, 1, 128, 157, 145, 101,,,, +5, 6, 3, 2, 2, 2, 2, 1, 256, 157, 145, 101,,,, +5, 6, 3, 2, 2, 2, 2, 1, 384, 157, 145, 101,,,, +5, 6, 3, 2, 2, 2, 2, 2, 128, 157, 145, 101,,,, +5, 6, 3, 2, 2, 2, 2, 2, 256, 157, 145, 101,,,, +5, 6, 3, 2, 2, 2, 2, 2, 384, 157, 145, 101,,,, +5, 6, 3, 2, 3, 3, 1, 1, 128, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 1, 1, 256, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 1, 1, 384, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 1, 2, 128, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 1, 2, 256, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 1, 2, 384, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 1, 3, 128, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 1, 3, 256, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 1, 3, 384, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 2, 1, 128, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 2, 1, 256, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 2, 1, 384, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 2, 2, 128, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 2, 2, 256, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 2, 2, 384, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 2, 3, 128, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 2, 3, 256, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 2, 3, 384, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 3, 1, 128, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 3, 1, 256, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 3, 1, 384, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 3, 2, 128, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 3, 2, 256, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 3, 2, 384, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 3, 3, 128, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 3, 3, 256, 157, 139, 97,,,, +5, 6, 3, 2, 3, 3, 3, 3, 384, 157, 139, 97,,,, +5, 6, 3, 3, 1, 1, 1, 1, 128, 233, 233, 158,,,, +5, 6, 3, 3, 1, 1, 1, 1, 256, 233, 233, 158,,,, +5, 6, 3, 3, 1, 1, 1, 1, 384, 233, 233, 158,,,, +5, 6, 3, 3, 2, 2, 1, 1, 128, 233, 215, 149,,,, +5, 6, 3, 3, 2, 2, 1, 1, 256, 233, 215, 149,,,, +5, 6, 3, 3, 2, 2, 1, 1, 384, 233, 215, 149,,,, +5, 6, 3, 3, 2, 2, 1, 2, 128, 233, 215, 149,,,, +5, 6, 3, 3, 2, 2, 1, 2, 256, 233, 215, 149,,,, +5, 6, 3, 3, 2, 2, 1, 2, 384, 233, 215, 149,,,, +5, 6, 3, 3, 2, 2, 2, 1, 128, 233, 215, 149,,,, +5, 6, 3, 3, 2, 2, 2, 1, 256, 233, 215, 149,,,, +5, 6, 3, 3, 2, 2, 2, 1, 384, 233, 215, 149,,,, +5, 6, 3, 3, 2, 2, 2, 2, 128, 233, 215, 149,,,, +5, 6, 3, 3, 2, 2, 2, 2, 256, 233, 215, 149,,,, +5, 6, 3, 3, 2, 2, 2, 2, 384, 233, 215, 149,,,, +5, 6, 3, 3, 3, 3, 1, 1, 128, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 1, 1, 256, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 1, 1, 384, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 1, 2, 128, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 1, 2, 256, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 1, 2, 384, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 1, 3, 128, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 1, 3, 256, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 1, 3, 384, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 2, 1, 128, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 2, 1, 256, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 2, 1, 384, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 2, 2, 128, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 2, 2, 256, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 2, 2, 384, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 2, 3, 128, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 2, 3, 256, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 2, 3, 384, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 3, 1, 128, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 3, 1, 256, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 3, 1, 384, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 3, 2, 128, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 3, 2, 256, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 3, 2, 384, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 3, 3, 128, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 3, 3, 256, 233, 206, 143,,,, +5, 6, 3, 3, 3, 3, 3, 3, 384, 233, 206, 143,,,, +5, 6, 3, 4, 1, 1, 1, 1, 128, 240, 240, 209,,,, +5, 6, 3, 4, 1, 1, 1, 1, 256, 240, 240, 209,,,, +5, 6, 3, 4, 1, 1, 1, 1, 384, 240, 240, 209,,,, +5, 6, 3, 4, 2, 2, 1, 1, 128, 240, 240, 197,,,, +5, 6, 3, 4, 2, 2, 1, 1, 256, 240, 240, 197,,,, +5, 6, 3, 4, 2, 2, 1, 1, 384, 240, 240, 197,,,, +5, 6, 3, 4, 2, 2, 1, 2, 128, 240, 240, 197,,,, +5, 6, 3, 4, 2, 2, 1, 2, 256, 240, 240, 197,,,, +5, 6, 3, 4, 2, 2, 1, 2, 384, 240, 240, 197,,,, +5, 6, 3, 4, 2, 2, 2, 1, 128, 240, 240, 197,,,, +5, 6, 3, 4, 2, 2, 2, 1, 256, 240, 240, 197,,,, +5, 6, 3, 4, 2, 2, 2, 1, 384, 240, 240, 197,,,, +5, 6, 3, 4, 2, 2, 2, 2, 128, 240, 240, 197,,,, +5, 6, 3, 4, 2, 2, 2, 2, 256, 240, 240, 197,,,, +5, 6, 3, 4, 2, 2, 2, 2, 384, 240, 240, 197,,,, +5, 6, 3, 4, 3, 3, 1, 1, 128, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 1, 1, 256, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 1, 1, 384, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 1, 2, 128, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 1, 2, 256, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 1, 2, 384, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 1, 3, 128, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 1, 3, 256, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 1, 3, 384, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 2, 1, 128, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 2, 1, 256, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 2, 1, 384, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 2, 2, 128, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 2, 2, 256, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 2, 2, 384, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 2, 3, 128, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 2, 3, 256, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 2, 3, 384, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 3, 1, 128, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 3, 1, 256, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 3, 1, 384, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 3, 2, 128, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 3, 2, 256, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 3, 2, 384, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 3, 3, 128, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 3, 3, 256, 240, 240, 189,,,, +5, 6, 3, 4, 3, 3, 3, 3, 384, 240, 240, 189,,,, +5, 6, 3, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +5, 6, 3, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +5, 6, 3, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +5, 6, 3, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +5, 6, 3, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +5, 6, 3, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +5, 6, 3, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +5, 6, 3, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +5, 6, 3, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +5, 6, 3, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +5, 6, 3, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +5, 6, 3, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +5, 6, 3, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +5, 6, 3, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +5, 6, 3, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +5, 6, 3, 5, 3, 3, 1, 1, 128, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 1, 1, 256, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 1, 1, 384, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 1, 2, 128, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 1, 2, 256, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 1, 2, 384, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 1, 3, 128, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 1, 3, 256, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 1, 3, 384, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 2, 1, 128, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 2, 1, 256, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 2, 1, 384, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 2, 2, 128, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 2, 2, 256, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 2, 2, 384, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 2, 3, 128, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 2, 3, 256, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 2, 3, 384, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 3, 1, 128, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 3, 1, 256, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 3, 1, 384, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 3, 2, 128, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 3, 2, 256, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 3, 2, 384, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 3, 3, 128, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 3, 3, 256, 240, 240, 235,,,, +5, 6, 3, 5, 3, 3, 3, 3, 384, 240, 240, 235,,,, +5, 6, 3, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +5, 6, 3, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +5, 6, 3, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +5, 6, 3, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +5, 6, 3, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +5, 6, 3, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +5, 6, 3, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +5, 6, 3, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +5, 6, 3, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +5, 6, 3, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +5, 6, 3, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +5, 6, 3, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +5, 6, 3, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +5, 6, 3, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +5, 6, 3, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +5, 6, 3, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +5, 6, 4, 1, 1, 1, 1, 1, 128, 81, 81, 56,,,, +5, 6, 4, 1, 1, 1, 1, 1, 256, 81, 81, 56,,,, +5, 6, 4, 1, 1, 1, 1, 1, 384, 81, 81, 56,,,, +5, 6, 4, 1, 2, 2, 1, 1, 128, 81, 75, 53,,,, +5, 6, 4, 1, 2, 2, 1, 1, 256, 81, 75, 53,,,, +5, 6, 4, 1, 2, 2, 1, 1, 384, 81, 75, 53,,,, +5, 6, 4, 1, 2, 2, 1, 2, 128, 81, 75, 53,,,, +5, 6, 4, 1, 2, 2, 1, 2, 256, 81, 75, 53,,,, +5, 6, 4, 1, 2, 2, 1, 2, 384, 81, 75, 53,,,, +5, 6, 4, 1, 2, 2, 2, 1, 128, 81, 75, 53,,,, +5, 6, 4, 1, 2, 2, 2, 1, 256, 81, 75, 53,,,, +5, 6, 4, 1, 2, 2, 2, 1, 384, 81, 75, 53,,,, +5, 6, 4, 1, 2, 2, 2, 2, 128, 81, 75, 53,,,, +5, 6, 4, 1, 2, 2, 2, 2, 256, 81, 75, 53,,,, +5, 6, 4, 1, 2, 2, 2, 2, 384, 81, 75, 53,,,, +5, 6, 4, 1, 3, 3, 1, 1, 128, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 1, 1, 256, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 1, 1, 384, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 1, 2, 128, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 1, 2, 256, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 1, 2, 384, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 1, 3, 128, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 1, 3, 256, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 1, 3, 384, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 2, 1, 128, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 2, 1, 256, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 2, 1, 384, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 2, 2, 128, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 2, 2, 256, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 2, 2, 384, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 2, 3, 128, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 2, 3, 256, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 2, 3, 384, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 3, 1, 128, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 3, 1, 256, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 3, 1, 384, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 3, 2, 128, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 3, 2, 256, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 3, 2, 384, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 3, 3, 128, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 3, 3, 256, 81, 72, 51,,,, +5, 6, 4, 1, 3, 3, 3, 3, 384, 81, 72, 51,,,, +5, 6, 4, 2, 1, 1, 1, 1, 128, 157, 157, 107,,,, +5, 6, 4, 2, 1, 1, 1, 1, 256, 157, 157, 107,,,, +5, 6, 4, 2, 1, 1, 1, 1, 384, 157, 157, 107,,,, +5, 6, 4, 2, 2, 2, 1, 1, 128, 157, 145, 101,,,, +5, 6, 4, 2, 2, 2, 1, 1, 256, 157, 145, 101,,,, +5, 6, 4, 2, 2, 2, 1, 1, 384, 157, 145, 101,,,, +5, 6, 4, 2, 2, 2, 1, 2, 128, 157, 145, 101,,,, +5, 6, 4, 2, 2, 2, 1, 2, 256, 157, 145, 101,,,, +5, 6, 4, 2, 2, 2, 1, 2, 384, 157, 145, 101,,,, +5, 6, 4, 2, 2, 2, 2, 1, 128, 157, 145, 101,,,, +5, 6, 4, 2, 2, 2, 2, 1, 256, 157, 145, 101,,,, +5, 6, 4, 2, 2, 2, 2, 1, 384, 157, 145, 101,,,, +5, 6, 4, 2, 2, 2, 2, 2, 128, 157, 145, 101,,,, +5, 6, 4, 2, 2, 2, 2, 2, 256, 157, 145, 101,,,, +5, 6, 4, 2, 2, 2, 2, 2, 384, 157, 145, 101,,,, +5, 6, 4, 2, 3, 3, 1, 1, 128, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 1, 1, 256, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 1, 1, 384, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 1, 2, 128, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 1, 2, 256, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 1, 2, 384, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 1, 3, 128, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 1, 3, 256, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 1, 3, 384, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 2, 1, 128, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 2, 1, 256, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 2, 1, 384, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 2, 2, 128, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 2, 2, 256, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 2, 2, 384, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 2, 3, 128, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 2, 3, 256, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 2, 3, 384, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 3, 1, 128, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 3, 1, 256, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 3, 1, 384, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 3, 2, 128, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 3, 2, 256, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 3, 2, 384, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 3, 3, 128, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 3, 3, 256, 157, 139, 97,,,, +5, 6, 4, 2, 3, 3, 3, 3, 384, 157, 139, 97,,,, +5, 6, 4, 3, 1, 1, 1, 1, 128, 233, 233, 158,,,, +5, 6, 4, 3, 1, 1, 1, 1, 256, 233, 233, 158,,,, +5, 6, 4, 3, 1, 1, 1, 1, 384, 233, 233, 158,,,, +5, 6, 4, 3, 2, 2, 1, 1, 128, 233, 215, 149,,,, +5, 6, 4, 3, 2, 2, 1, 1, 256, 233, 215, 149,,,, +5, 6, 4, 3, 2, 2, 1, 1, 384, 233, 215, 149,,,, +5, 6, 4, 3, 2, 2, 1, 2, 128, 233, 215, 149,,,, +5, 6, 4, 3, 2, 2, 1, 2, 256, 233, 215, 149,,,, +5, 6, 4, 3, 2, 2, 1, 2, 384, 233, 215, 149,,,, +5, 6, 4, 3, 2, 2, 2, 1, 128, 233, 215, 149,,,, +5, 6, 4, 3, 2, 2, 2, 1, 256, 233, 215, 149,,,, +5, 6, 4, 3, 2, 2, 2, 1, 384, 233, 215, 149,,,, +5, 6, 4, 3, 2, 2, 2, 2, 128, 233, 215, 149,,,, +5, 6, 4, 3, 2, 2, 2, 2, 256, 233, 215, 149,,,, +5, 6, 4, 3, 2, 2, 2, 2, 384, 233, 215, 149,,,, +5, 6, 4, 3, 3, 3, 1, 1, 128, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 1, 1, 256, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 1, 1, 384, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 1, 2, 128, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 1, 2, 256, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 1, 2, 384, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 1, 3, 128, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 1, 3, 256, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 1, 3, 384, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 2, 1, 128, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 2, 1, 256, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 2, 1, 384, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 2, 2, 128, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 2, 2, 256, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 2, 2, 384, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 2, 3, 128, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 2, 3, 256, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 2, 3, 384, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 3, 1, 128, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 3, 1, 256, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 3, 1, 384, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 3, 2, 128, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 3, 2, 256, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 3, 2, 384, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 3, 3, 128, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 3, 3, 256, 233, 206, 143,,,, +5, 6, 4, 3, 3, 3, 3, 3, 384, 233, 206, 143,,,, +5, 6, 4, 4, 1, 1, 1, 1, 128, 240, 240, 209,,,, +5, 6, 4, 4, 1, 1, 1, 1, 256, 240, 240, 209,,,, +5, 6, 4, 4, 1, 1, 1, 1, 384, 240, 240, 209,,,, +5, 6, 4, 4, 2, 2, 1, 1, 128, 240, 240, 197,,,, +5, 6, 4, 4, 2, 2, 1, 1, 256, 240, 240, 197,,,, +5, 6, 4, 4, 2, 2, 1, 1, 384, 240, 240, 197,,,, +5, 6, 4, 4, 2, 2, 1, 2, 128, 240, 240, 197,,,, +5, 6, 4, 4, 2, 2, 1, 2, 256, 240, 240, 197,,,, +5, 6, 4, 4, 2, 2, 1, 2, 384, 240, 240, 197,,,, +5, 6, 4, 4, 2, 2, 2, 1, 128, 240, 240, 197,,,, +5, 6, 4, 4, 2, 2, 2, 1, 256, 240, 240, 197,,,, +5, 6, 4, 4, 2, 2, 2, 1, 384, 240, 240, 197,,,, +5, 6, 4, 4, 2, 2, 2, 2, 128, 240, 240, 197,,,, +5, 6, 4, 4, 2, 2, 2, 2, 256, 240, 240, 197,,,, +5, 6, 4, 4, 2, 2, 2, 2, 384, 240, 240, 197,,,, +5, 6, 4, 4, 3, 3, 1, 1, 128, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 1, 1, 256, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 1, 1, 384, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 1, 2, 128, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 1, 2, 256, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 1, 2, 384, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 1, 3, 128, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 1, 3, 256, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 1, 3, 384, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 2, 1, 128, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 2, 1, 256, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 2, 1, 384, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 2, 2, 128, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 2, 2, 256, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 2, 2, 384, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 2, 3, 128, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 2, 3, 256, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 2, 3, 384, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 3, 1, 128, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 3, 1, 256, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 3, 1, 384, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 3, 2, 128, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 3, 2, 256, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 3, 2, 384, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 3, 3, 128, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 3, 3, 256, 240, 240, 189,,,, +5, 6, 4, 4, 3, 3, 3, 3, 384, 240, 240, 189,,,, +5, 6, 4, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +5, 6, 4, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +5, 6, 4, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +5, 6, 4, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +5, 6, 4, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +5, 6, 4, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +5, 6, 4, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +5, 6, 4, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +5, 6, 4, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +5, 6, 4, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +5, 6, 4, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +5, 6, 4, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +5, 6, 4, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +5, 6, 4, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +5, 6, 4, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +5, 6, 4, 5, 3, 3, 1, 1, 128, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 1, 1, 256, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 1, 1, 384, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 1, 2, 128, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 1, 2, 256, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 1, 2, 384, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 1, 3, 128, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 1, 3, 256, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 1, 3, 384, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 2, 1, 128, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 2, 1, 256, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 2, 1, 384, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 2, 2, 128, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 2, 2, 256, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 2, 2, 384, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 2, 3, 128, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 2, 3, 256, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 2, 3, 384, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 3, 1, 128, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 3, 1, 256, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 3, 1, 384, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 3, 2, 128, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 3, 2, 256, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 3, 2, 384, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 3, 3, 128, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 3, 3, 256, 240, 240, 235,,,, +5, 6, 4, 5, 3, 3, 3, 3, 384, 240, 240, 235,,,, +5, 6, 4, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +5, 6, 4, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +5, 6, 4, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +5, 6, 4, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +5, 6, 4, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +5, 6, 4, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +5, 6, 4, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +5, 6, 4, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +5, 6, 4, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +5, 6, 4, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +5, 6, 4, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +5, 6, 4, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +5, 6, 4, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +5, 6, 4, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +5, 6, 4, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +5, 6, 4, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +5, 6, 5, 1, 1, 1, 1, 1, 128, 81, 81, 56,,,, +5, 6, 5, 1, 1, 1, 1, 1, 256, 81, 81, 56,,,, +5, 6, 5, 1, 1, 1, 1, 1, 384, 81, 81, 56,,,, +5, 6, 5, 1, 2, 2, 1, 1, 128, 81, 75, 53,,,, +5, 6, 5, 1, 2, 2, 1, 1, 256, 81, 75, 53,,,, +5, 6, 5, 1, 2, 2, 1, 1, 384, 81, 75, 53,,,, +5, 6, 5, 1, 2, 2, 1, 2, 128, 81, 75, 53,,,, +5, 6, 5, 1, 2, 2, 1, 2, 256, 81, 75, 53,,,, +5, 6, 5, 1, 2, 2, 1, 2, 384, 81, 75, 53,,,, +5, 6, 5, 1, 2, 2, 2, 1, 128, 81, 75, 53,,,, +5, 6, 5, 1, 2, 2, 2, 1, 256, 81, 75, 53,,,, +5, 6, 5, 1, 2, 2, 2, 1, 384, 81, 75, 53,,,, +5, 6, 5, 1, 2, 2, 2, 2, 128, 81, 75, 53,,,, +5, 6, 5, 1, 2, 2, 2, 2, 256, 81, 75, 53,,,, +5, 6, 5, 1, 2, 2, 2, 2, 384, 81, 75, 53,,,, +5, 6, 5, 1, 3, 3, 1, 1, 128, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 1, 1, 256, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 1, 1, 384, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 1, 2, 128, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 1, 2, 256, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 1, 2, 384, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 1, 3, 128, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 1, 3, 256, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 1, 3, 384, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 2, 1, 128, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 2, 1, 256, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 2, 1, 384, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 2, 2, 128, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 2, 2, 256, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 2, 2, 384, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 2, 3, 128, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 2, 3, 256, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 2, 3, 384, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 3, 1, 128, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 3, 1, 256, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 3, 1, 384, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 3, 2, 128, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 3, 2, 256, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 3, 2, 384, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 3, 3, 128, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 3, 3, 256, 81, 72, 51,,,, +5, 6, 5, 1, 3, 3, 3, 3, 384, 81, 72, 51,,,, +5, 6, 5, 2, 1, 1, 1, 1, 128, 157, 157, 107,,,, +5, 6, 5, 2, 1, 1, 1, 1, 256, 157, 157, 107,,,, +5, 6, 5, 2, 1, 1, 1, 1, 384, 157, 157, 107,,,, +5, 6, 5, 2, 2, 2, 1, 1, 128, 157, 145, 101,,,, +5, 6, 5, 2, 2, 2, 1, 1, 256, 157, 145, 101,,,, +5, 6, 5, 2, 2, 2, 1, 1, 384, 157, 145, 101,,,, +5, 6, 5, 2, 2, 2, 1, 2, 128, 157, 145, 101,,,, +5, 6, 5, 2, 2, 2, 1, 2, 256, 157, 145, 101,,,, +5, 6, 5, 2, 2, 2, 1, 2, 384, 157, 145, 101,,,, +5, 6, 5, 2, 2, 2, 2, 1, 128, 157, 145, 101,,,, +5, 6, 5, 2, 2, 2, 2, 1, 256, 157, 145, 101,,,, +5, 6, 5, 2, 2, 2, 2, 1, 384, 157, 145, 101,,,, +5, 6, 5, 2, 2, 2, 2, 2, 128, 157, 145, 101,,,, +5, 6, 5, 2, 2, 2, 2, 2, 256, 157, 145, 101,,,, +5, 6, 5, 2, 2, 2, 2, 2, 384, 157, 145, 101,,,, +5, 6, 5, 2, 3, 3, 1, 1, 128, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 1, 1, 256, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 1, 1, 384, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 1, 2, 128, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 1, 2, 256, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 1, 2, 384, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 1, 3, 128, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 1, 3, 256, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 1, 3, 384, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 2, 1, 128, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 2, 1, 256, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 2, 1, 384, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 2, 2, 128, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 2, 2, 256, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 2, 2, 384, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 2, 3, 128, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 2, 3, 256, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 2, 3, 384, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 3, 1, 128, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 3, 1, 256, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 3, 1, 384, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 3, 2, 128, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 3, 2, 256, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 3, 2, 384, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 3, 3, 128, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 3, 3, 256, 157, 139, 97,,,, +5, 6, 5, 2, 3, 3, 3, 3, 384, 157, 139, 97,,,, +5, 6, 5, 3, 1, 1, 1, 1, 128, 233, 233, 158,,,, +5, 6, 5, 3, 1, 1, 1, 1, 256, 233, 233, 158,,,, +5, 6, 5, 3, 1, 1, 1, 1, 384, 233, 233, 158,,,, +5, 6, 5, 3, 2, 2, 1, 1, 128, 233, 215, 149,,,, +5, 6, 5, 3, 2, 2, 1, 1, 256, 233, 215, 149,,,, +5, 6, 5, 3, 2, 2, 1, 1, 384, 233, 215, 149,,,, +5, 6, 5, 3, 2, 2, 1, 2, 128, 233, 215, 149,,,, +5, 6, 5, 3, 2, 2, 1, 2, 256, 233, 215, 149,,,, +5, 6, 5, 3, 2, 2, 1, 2, 384, 233, 215, 149,,,, +5, 6, 5, 3, 2, 2, 2, 1, 128, 233, 215, 149,,,, +5, 6, 5, 3, 2, 2, 2, 1, 256, 233, 215, 149,,,, +5, 6, 5, 3, 2, 2, 2, 1, 384, 233, 215, 149,,,, +5, 6, 5, 3, 2, 2, 2, 2, 128, 233, 215, 149,,,, +5, 6, 5, 3, 2, 2, 2, 2, 256, 233, 215, 149,,,, +5, 6, 5, 3, 2, 2, 2, 2, 384, 233, 215, 149,,,, +5, 6, 5, 3, 3, 3, 1, 1, 128, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 1, 1, 256, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 1, 1, 384, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 1, 2, 128, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 1, 2, 256, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 1, 2, 384, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 1, 3, 128, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 1, 3, 256, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 1, 3, 384, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 2, 1, 128, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 2, 1, 256, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 2, 1, 384, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 2, 2, 128, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 2, 2, 256, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 2, 2, 384, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 2, 3, 128, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 2, 3, 256, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 2, 3, 384, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 3, 1, 128, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 3, 1, 256, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 3, 1, 384, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 3, 2, 128, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 3, 2, 256, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 3, 2, 384, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 3, 3, 128, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 3, 3, 256, 233, 206, 143,,,, +5, 6, 5, 3, 3, 3, 3, 3, 384, 233, 206, 143,,,, +5, 6, 5, 4, 1, 1, 1, 1, 128, 240, 240, 209,,,, +5, 6, 5, 4, 1, 1, 1, 1, 256, 240, 240, 209,,,, +5, 6, 5, 4, 1, 1, 1, 1, 384, 240, 240, 209,,,, +5, 6, 5, 4, 2, 2, 1, 1, 128, 240, 240, 197,,,, +5, 6, 5, 4, 2, 2, 1, 1, 256, 240, 240, 197,,,, +5, 6, 5, 4, 2, 2, 1, 1, 384, 240, 240, 197,,,, +5, 6, 5, 4, 2, 2, 1, 2, 128, 240, 240, 197,,,, +5, 6, 5, 4, 2, 2, 1, 2, 256, 240, 240, 197,,,, +5, 6, 5, 4, 2, 2, 1, 2, 384, 240, 240, 197,,,, +5, 6, 5, 4, 2, 2, 2, 1, 128, 240, 240, 197,,,, +5, 6, 5, 4, 2, 2, 2, 1, 256, 240, 240, 197,,,, +5, 6, 5, 4, 2, 2, 2, 1, 384, 240, 240, 197,,,, +5, 6, 5, 4, 2, 2, 2, 2, 128, 240, 240, 197,,,, +5, 6, 5, 4, 2, 2, 2, 2, 256, 240, 240, 197,,,, +5, 6, 5, 4, 2, 2, 2, 2, 384, 240, 240, 197,,,, +5, 6, 5, 4, 3, 3, 1, 1, 128, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 1, 1, 256, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 1, 1, 384, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 1, 2, 128, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 1, 2, 256, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 1, 2, 384, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 1, 3, 128, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 1, 3, 256, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 1, 3, 384, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 2, 1, 128, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 2, 1, 256, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 2, 1, 384, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 2, 2, 128, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 2, 2, 256, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 2, 2, 384, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 2, 3, 128, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 2, 3, 256, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 2, 3, 384, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 3, 1, 128, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 3, 1, 256, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 3, 1, 384, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 3, 2, 128, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 3, 2, 256, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 3, 2, 384, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 3, 3, 128, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 3, 3, 256, 240, 240, 189,,,, +5, 6, 5, 4, 3, 3, 3, 3, 384, 240, 240, 189,,,, +5, 6, 5, 5, 1, 1, 1, 1, 128, 240, 240, 240,,,, +5, 6, 5, 5, 1, 1, 1, 1, 256, 240, 240, 240,,,, +5, 6, 5, 5, 1, 1, 1, 1, 384, 240, 240, 240,,,, +5, 6, 5, 5, 2, 2, 1, 1, 128, 240, 240, 240,,,, +5, 6, 5, 5, 2, 2, 1, 1, 256, 240, 240, 240,,,, +5, 6, 5, 5, 2, 2, 1, 1, 384, 240, 240, 240,,,, +5, 6, 5, 5, 2, 2, 1, 2, 128, 240, 240, 240,,,, +5, 6, 5, 5, 2, 2, 1, 2, 256, 240, 240, 240,,,, +5, 6, 5, 5, 2, 2, 1, 2, 384, 240, 240, 240,,,, +5, 6, 5, 5, 2, 2, 2, 1, 128, 240, 240, 240,,,, +5, 6, 5, 5, 2, 2, 2, 1, 256, 240, 240, 240,,,, +5, 6, 5, 5, 2, 2, 2, 1, 384, 240, 240, 240,,,, +5, 6, 5, 5, 2, 2, 2, 2, 128, 240, 240, 240,,,, +5, 6, 5, 5, 2, 2, 2, 2, 256, 240, 240, 240,,,, +5, 6, 5, 5, 2, 2, 2, 2, 384, 240, 240, 240,,,, +5, 6, 5, 5, 3, 3, 1, 1, 128, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 1, 1, 256, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 1, 1, 384, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 1, 2, 128, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 1, 2, 256, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 1, 2, 384, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 1, 3, 128, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 1, 3, 256, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 1, 3, 384, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 2, 1, 128, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 2, 1, 256, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 2, 1, 384, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 2, 2, 128, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 2, 2, 256, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 2, 2, 384, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 2, 3, 128, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 2, 3, 256, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 2, 3, 384, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 3, 1, 128, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 3, 1, 256, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 3, 1, 384, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 3, 2, 128, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 3, 2, 256, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 3, 2, 384, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 3, 3, 128, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 3, 3, 256, 240, 240, 235,,,, +5, 6, 5, 5, 3, 3, 3, 3, 384, 240, 240, 235,,,, +5, 6, 5, 6, 1, 1, 1, 1, 128, 240, 240, 240,,,, +5, 6, 5, 6, 1, 1, 1, 1, 256, 240, 240, 240,,,, +5, 6, 5, 6, 1, 1, 1, 1, 384, 240, 240, 240,,,, +5, 6, 5, 6, 2, 2, 1, 1, 128, 240, 240, 240,,,, +5, 6, 5, 6, 2, 2, 1, 1, 256, 240, 240, 240,,,, +5, 6, 5, 6, 2, 2, 1, 1, 384, 240, 240, 240,,,, +5, 6, 5, 6, 2, 2, 1, 2, 128, 240, 240, 240,,,, +5, 6, 5, 6, 2, 2, 1, 2, 256, 240, 240, 240,,,, +5, 6, 5, 6, 2, 2, 1, 2, 384, 240, 240, 240,,,, +5, 6, 5, 6, 2, 2, 2, 1, 128, 240, 240, 240,,,, +5, 6, 5, 6, 2, 2, 2, 1, 256, 240, 240, 240,,,, +5, 6, 5, 6, 2, 2, 2, 1, 384, 240, 240, 240,,,, +5, 6, 5, 6, 2, 2, 2, 2, 128, 240, 240, 240,,,, +5, 6, 5, 6, 2, 2, 2, 2, 256, 240, 240, 240,,,, +5, 6, 5, 6, 2, 2, 2, 2, 384, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 1, 1, 128, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 1, 1, 256, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 1, 1, 384, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 1, 2, 128, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 1, 2, 256, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 1, 2, 384, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 1, 3, 128, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 1, 3, 256, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 1, 3, 384, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 2, 1, 128, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 2, 1, 256, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 2, 1, 384, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 2, 2, 128, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 2, 2, 256, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 2, 2, 384, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 2, 3, 128, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 2, 3, 256, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 2, 3, 384, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 3, 1, 128, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 3, 1, 256, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 3, 1, 384, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 3, 2, 128, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 3, 2, 256, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 3, 2, 384, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 3, 3, 128, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 3, 3, 256, 240, 240, 240,,,, +5, 6, 5, 6, 3, 3, 3, 3, 384, 240, 240, 240,,,, +5, 7, 1, 1, 1, 1, 1, 1, 128, 82, 57, 31,,,, +5, 7, 1, 1, 1, 1, 1, 1, 256, 82, 57, 31,,,, +5, 7, 1, 1, 1, 1, 1, 1, 384, 82, 57, 31,,,, +5, 7, 1, 1, 2, 2, 1, 1, 128, 82, 57, 31,,,, +5, 7, 1, 1, 2, 2, 1, 1, 256, 82, 57, 31,,,, +5, 7, 1, 1, 2, 2, 1, 1, 384, 82, 57, 31,,,, +5, 7, 1, 1, 2, 2, 1, 2, 128, 82, 57, 31,,,, +5, 7, 1, 1, 2, 2, 1, 2, 256, 82, 57, 31,,,, +5, 7, 1, 1, 2, 2, 1, 2, 384, 82, 57, 31,,,, +5, 7, 1, 1, 2, 2, 2, 1, 128, 82, 57, 31,,,, +5, 7, 1, 1, 2, 2, 2, 1, 256, 82, 57, 31,,,, +5, 7, 1, 1, 2, 2, 2, 1, 384, 82, 57, 31,,,, +5, 7, 1, 1, 2, 2, 2, 2, 128, 82, 57, 31,,,, +5, 7, 1, 1, 2, 2, 2, 2, 256, 82, 57, 31,,,, +5, 7, 1, 1, 2, 2, 2, 2, 384, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 1, 1, 128, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 1, 1, 256, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 1, 1, 384, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 1, 2, 128, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 1, 2, 256, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 1, 2, 384, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 1, 3, 128, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 1, 3, 256, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 1, 3, 384, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 2, 1, 128, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 2, 1, 256, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 2, 1, 384, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 2, 2, 128, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 2, 2, 256, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 2, 2, 384, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 2, 3, 128, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 2, 3, 256, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 2, 3, 384, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 3, 1, 128, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 3, 1, 256, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 3, 1, 384, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 3, 2, 128, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 3, 2, 256, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 3, 2, 384, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 3, 3, 128, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 3, 3, 256, 82, 57, 31,,,, +5, 7, 1, 1, 3, 3, 3, 3, 384, 82, 57, 31,,,, +5, 7, 1, 2, 1, 1, 1, 1, 128, 158, 108, 56,,,, +5, 7, 1, 2, 1, 1, 1, 1, 256, 158, 108, 56,,,, +5, 7, 1, 2, 1, 1, 1, 1, 384, 158, 108, 56,,,, +5, 7, 1, 2, 2, 2, 1, 1, 128, 158, 108, 56,,,, +5, 7, 1, 2, 2, 2, 1, 1, 256, 158, 108, 56,,,, +5, 7, 1, 2, 2, 2, 1, 1, 384, 158, 108, 56,,,, +5, 7, 1, 2, 2, 2, 1, 2, 128, 158, 108, 56,,,, +5, 7, 1, 2, 2, 2, 1, 2, 256, 158, 108, 56,,,, +5, 7, 1, 2, 2, 2, 1, 2, 384, 158, 108, 56,,,, +5, 7, 1, 2, 2, 2, 2, 1, 128, 158, 108, 56,,,, +5, 7, 1, 2, 2, 2, 2, 1, 256, 158, 108, 56,,,, +5, 7, 1, 2, 2, 2, 2, 1, 384, 158, 108, 56,,,, +5, 7, 1, 2, 2, 2, 2, 2, 128, 158, 108, 56,,,, +5, 7, 1, 2, 2, 2, 2, 2, 256, 158, 108, 56,,,, +5, 7, 1, 2, 2, 2, 2, 2, 384, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 1, 1, 128, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 1, 1, 256, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 1, 1, 384, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 1, 2, 128, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 1, 2, 256, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 1, 2, 384, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 1, 3, 128, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 1, 3, 256, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 1, 3, 384, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 2, 1, 128, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 2, 1, 256, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 2, 1, 384, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 2, 2, 128, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 2, 2, 256, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 2, 2, 384, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 2, 3, 128, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 2, 3, 256, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 2, 3, 384, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 3, 1, 128, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 3, 1, 256, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 3, 1, 384, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 3, 2, 128, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 3, 2, 256, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 3, 2, 384, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 3, 3, 128, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 3, 3, 256, 158, 108, 56,,,, +5, 7, 1, 2, 3, 3, 3, 3, 384, 158, 108, 56,,,, +5, 7, 1, 3, 1, 1, 1, 1, 128, 234, 159, 81,,,, +5, 7, 1, 3, 1, 1, 1, 1, 256, 234, 159, 81,,,, +5, 7, 1, 3, 1, 1, 1, 1, 384, 234, 159, 81,,,, +5, 7, 1, 3, 2, 2, 1, 1, 128, 234, 159, 81,,,, +5, 7, 1, 3, 2, 2, 1, 1, 256, 234, 159, 81,,,, +5, 7, 1, 3, 2, 2, 1, 1, 384, 234, 159, 81,,,, +5, 7, 1, 3, 2, 2, 1, 2, 128, 234, 159, 81,,,, +5, 7, 1, 3, 2, 2, 1, 2, 256, 234, 159, 81,,,, +5, 7, 1, 3, 2, 2, 1, 2, 384, 234, 159, 81,,,, +5, 7, 1, 3, 2, 2, 2, 1, 128, 234, 159, 81,,,, +5, 7, 1, 3, 2, 2, 2, 1, 256, 234, 159, 81,,,, +5, 7, 1, 3, 2, 2, 2, 1, 384, 234, 159, 81,,,, +5, 7, 1, 3, 2, 2, 2, 2, 128, 234, 159, 81,,,, +5, 7, 1, 3, 2, 2, 2, 2, 256, 234, 159, 81,,,, +5, 7, 1, 3, 2, 2, 2, 2, 384, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 1, 1, 128, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 1, 1, 256, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 1, 1, 384, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 1, 2, 128, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 1, 2, 256, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 1, 2, 384, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 1, 3, 128, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 1, 3, 256, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 1, 3, 384, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 2, 1, 128, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 2, 1, 256, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 2, 1, 384, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 2, 2, 128, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 2, 2, 256, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 2, 2, 384, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 2, 3, 128, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 2, 3, 256, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 2, 3, 384, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 3, 1, 128, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 3, 1, 256, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 3, 1, 384, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 3, 2, 128, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 3, 2, 256, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 3, 2, 384, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 3, 3, 128, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 3, 3, 256, 234, 159, 81,,,, +5, 7, 1, 3, 3, 3, 3, 3, 384, 234, 159, 81,,,, +5, 7, 1, 4, 1, 1, 1, 1, 128, 240, 210, 106,,,, +5, 7, 1, 4, 1, 1, 1, 1, 256, 240, 210, 106,,,, +5, 7, 1, 4, 1, 1, 1, 1, 384, 240, 210, 106,,,, +5, 7, 1, 4, 2, 2, 1, 1, 128, 240, 210, 106,,,, +5, 7, 1, 4, 2, 2, 1, 1, 256, 240, 210, 106,,,, +5, 7, 1, 4, 2, 2, 1, 1, 384, 240, 210, 106,,,, +5, 7, 1, 4, 2, 2, 1, 2, 128, 240, 210, 106,,,, +5, 7, 1, 4, 2, 2, 1, 2, 256, 240, 210, 106,,,, +5, 7, 1, 4, 2, 2, 1, 2, 384, 240, 210, 106,,,, +5, 7, 1, 4, 2, 2, 2, 1, 128, 240, 210, 106,,,, +5, 7, 1, 4, 2, 2, 2, 1, 256, 240, 210, 106,,,, +5, 7, 1, 4, 2, 2, 2, 1, 384, 240, 210, 106,,,, +5, 7, 1, 4, 2, 2, 2, 2, 128, 240, 210, 106,,,, +5, 7, 1, 4, 2, 2, 2, 2, 256, 240, 210, 106,,,, +5, 7, 1, 4, 2, 2, 2, 2, 384, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 1, 1, 128, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 1, 1, 256, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 1, 1, 384, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 1, 2, 128, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 1, 2, 256, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 1, 2, 384, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 1, 3, 128, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 1, 3, 256, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 1, 3, 384, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 2, 1, 128, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 2, 1, 256, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 2, 1, 384, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 2, 2, 128, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 2, 2, 256, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 2, 2, 384, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 2, 3, 128, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 2, 3, 256, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 2, 3, 384, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 3, 1, 128, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 3, 1, 256, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 3, 1, 384, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 3, 2, 128, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 3, 2, 256, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 3, 2, 384, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 3, 3, 128, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 3, 3, 256, 240, 210, 106,,,, +5, 7, 1, 4, 3, 3, 3, 3, 384, 240, 210, 106,,,, +5, 7, 1, 5, 1, 1, 1, 1, 128, 240, 240, 131,,,, +5, 7, 1, 5, 1, 1, 1, 1, 256, 240, 240, 131,,,, +5, 7, 1, 5, 1, 1, 1, 1, 384, 240, 240, 131,,,, +5, 7, 1, 5, 2, 2, 1, 1, 128, 240, 240, 131,,,, +5, 7, 1, 5, 2, 2, 1, 1, 256, 240, 240, 131,,,, +5, 7, 1, 5, 2, 2, 1, 1, 384, 240, 240, 131,,,, +5, 7, 1, 5, 2, 2, 1, 2, 128, 240, 240, 131,,,, +5, 7, 1, 5, 2, 2, 1, 2, 256, 240, 240, 131,,,, +5, 7, 1, 5, 2, 2, 1, 2, 384, 240, 240, 131,,,, +5, 7, 1, 5, 2, 2, 2, 1, 128, 240, 240, 131,,,, +5, 7, 1, 5, 2, 2, 2, 1, 256, 240, 240, 131,,,, +5, 7, 1, 5, 2, 2, 2, 1, 384, 240, 240, 131,,,, +5, 7, 1, 5, 2, 2, 2, 2, 128, 240, 240, 131,,,, +5, 7, 1, 5, 2, 2, 2, 2, 256, 240, 240, 131,,,, +5, 7, 1, 5, 2, 2, 2, 2, 384, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 1, 1, 128, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 1, 1, 256, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 1, 1, 384, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 1, 2, 128, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 1, 2, 256, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 1, 2, 384, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 1, 3, 128, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 1, 3, 256, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 1, 3, 384, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 2, 1, 128, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 2, 1, 256, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 2, 1, 384, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 2, 2, 128, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 2, 2, 256, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 2, 2, 384, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 2, 3, 128, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 2, 3, 256, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 2, 3, 384, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 3, 1, 128, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 3, 1, 256, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 3, 1, 384, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 3, 2, 128, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 3, 2, 256, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 3, 2, 384, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 3, 3, 128, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 3, 3, 256, 240, 240, 131,,,, +5, 7, 1, 5, 3, 3, 3, 3, 384, 240, 240, 131,,,, +5, 7, 1, 6, 1, 1, 1, 1, 128, 240, 240, 156,,,, +5, 7, 1, 6, 1, 1, 1, 1, 256, 240, 240, 156,,,, +5, 7, 1, 6, 1, 1, 1, 1, 384, 240, 240, 156,,,, +5, 7, 1, 6, 2, 2, 1, 1, 128, 240, 240, 156,,,, +5, 7, 1, 6, 2, 2, 1, 1, 256, 240, 240, 156,,,, +5, 7, 1, 6, 2, 2, 1, 1, 384, 240, 240, 156,,,, +5, 7, 1, 6, 2, 2, 1, 2, 128, 240, 240, 156,,,, +5, 7, 1, 6, 2, 2, 1, 2, 256, 240, 240, 156,,,, +5, 7, 1, 6, 2, 2, 1, 2, 384, 240, 240, 156,,,, +5, 7, 1, 6, 2, 2, 2, 1, 128, 240, 240, 156,,,, +5, 7, 1, 6, 2, 2, 2, 1, 256, 240, 240, 156,,,, +5, 7, 1, 6, 2, 2, 2, 1, 384, 240, 240, 156,,,, +5, 7, 1, 6, 2, 2, 2, 2, 128, 240, 240, 156,,,, +5, 7, 1, 6, 2, 2, 2, 2, 256, 240, 240, 156,,,, +5, 7, 1, 6, 2, 2, 2, 2, 384, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 1, 1, 128, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 1, 1, 256, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 1, 1, 384, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 1, 2, 128, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 1, 2, 256, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 1, 2, 384, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 1, 3, 128, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 1, 3, 256, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 1, 3, 384, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 2, 1, 128, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 2, 1, 256, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 2, 1, 384, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 2, 2, 128, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 2, 2, 256, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 2, 2, 384, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 2, 3, 128, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 2, 3, 256, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 2, 3, 384, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 3, 1, 128, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 3, 1, 256, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 3, 1, 384, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 3, 2, 128, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 3, 2, 256, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 3, 2, 384, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 3, 3, 128, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 3, 3, 256, 240, 240, 156,,,, +5, 7, 1, 6, 3, 3, 3, 3, 384, 240, 240, 156,,,, +5, 7, 1, 7, 1, 1, 1, 1, 128, 240, 240, 181,,,, +5, 7, 1, 7, 1, 1, 1, 1, 256, 240, 240, 181,,,, +5, 7, 1, 7, 1, 1, 1, 1, 384, 240, 240, 181,,,, +5, 7, 1, 7, 2, 2, 1, 1, 128, 240, 240, 181,,,, +5, 7, 1, 7, 2, 2, 1, 1, 256, 240, 240, 181,,,, +5, 7, 1, 7, 2, 2, 1, 1, 384, 240, 240, 181,,,, +5, 7, 1, 7, 2, 2, 1, 2, 128, 240, 240, 181,,,, +5, 7, 1, 7, 2, 2, 1, 2, 256, 240, 240, 181,,,, +5, 7, 1, 7, 2, 2, 1, 2, 384, 240, 240, 181,,,, +5, 7, 1, 7, 2, 2, 2, 1, 128, 240, 240, 181,,,, +5, 7, 1, 7, 2, 2, 2, 1, 256, 240, 240, 181,,,, +5, 7, 1, 7, 2, 2, 2, 1, 384, 240, 240, 181,,,, +5, 7, 1, 7, 2, 2, 2, 2, 128, 240, 240, 181,,,, +5, 7, 1, 7, 2, 2, 2, 2, 256, 240, 240, 181,,,, +5, 7, 1, 7, 2, 2, 2, 2, 384, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 1, 1, 128, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 1, 1, 256, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 1, 1, 384, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 1, 2, 128, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 1, 2, 256, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 1, 2, 384, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 1, 3, 128, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 1, 3, 256, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 1, 3, 384, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 2, 1, 128, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 2, 1, 256, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 2, 1, 384, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 2, 2, 128, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 2, 2, 256, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 2, 2, 384, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 2, 3, 128, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 2, 3, 256, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 2, 3, 384, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 3, 1, 128, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 3, 1, 256, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 3, 1, 384, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 3, 2, 128, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 3, 2, 256, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 3, 2, 384, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 3, 3, 128, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 3, 3, 256, 240, 240, 181,,,, +5, 7, 1, 7, 3, 3, 3, 3, 384, 240, 240, 181,,,, +5, 7, 2, 1, 1, 1, 1, 1, 128, 82, 57, 31,,,, +5, 7, 2, 1, 1, 1, 1, 1, 256, 82, 57, 31,,,, +5, 7, 2, 1, 1, 1, 1, 1, 384, 82, 57, 31,,,, +5, 7, 2, 1, 2, 2, 1, 1, 128, 82, 57, 31,,,, +5, 7, 2, 1, 2, 2, 1, 1, 256, 82, 57, 31,,,, +5, 7, 2, 1, 2, 2, 1, 1, 384, 82, 57, 31,,,, +5, 7, 2, 1, 2, 2, 1, 2, 128, 82, 57, 31,,,, +5, 7, 2, 1, 2, 2, 1, 2, 256, 82, 57, 31,,,, +5, 7, 2, 1, 2, 2, 1, 2, 384, 82, 57, 31,,,, +5, 7, 2, 1, 2, 2, 2, 1, 128, 82, 57, 31,,,, +5, 7, 2, 1, 2, 2, 2, 1, 256, 82, 57, 31,,,, +5, 7, 2, 1, 2, 2, 2, 1, 384, 82, 57, 31,,,, +5, 7, 2, 1, 2, 2, 2, 2, 128, 82, 57, 31,,,, +5, 7, 2, 1, 2, 2, 2, 2, 256, 82, 57, 31,,,, +5, 7, 2, 1, 2, 2, 2, 2, 384, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 1, 1, 128, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 1, 1, 256, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 1, 1, 384, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 1, 2, 128, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 1, 2, 256, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 1, 2, 384, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 1, 3, 128, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 1, 3, 256, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 1, 3, 384, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 2, 1, 128, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 2, 1, 256, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 2, 1, 384, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 2, 2, 128, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 2, 2, 256, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 2, 2, 384, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 2, 3, 128, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 2, 3, 256, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 2, 3, 384, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 3, 1, 128, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 3, 1, 256, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 3, 1, 384, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 3, 2, 128, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 3, 2, 256, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 3, 2, 384, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 3, 3, 128, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 3, 3, 256, 82, 57, 31,,,, +5, 7, 2, 1, 3, 3, 3, 3, 384, 82, 57, 31,,,, +5, 7, 2, 2, 1, 1, 1, 1, 128, 158, 108, 56,,,, +5, 7, 2, 2, 1, 1, 1, 1, 256, 158, 108, 56,,,, +5, 7, 2, 2, 1, 1, 1, 1, 384, 158, 108, 56,,,, +5, 7, 2, 2, 2, 2, 1, 1, 128, 158, 108, 56,,,, +5, 7, 2, 2, 2, 2, 1, 1, 256, 158, 108, 56,,,, +5, 7, 2, 2, 2, 2, 1, 1, 384, 158, 108, 56,,,, +5, 7, 2, 2, 2, 2, 1, 2, 128, 158, 108, 56,,,, +5, 7, 2, 2, 2, 2, 1, 2, 256, 158, 108, 56,,,, +5, 7, 2, 2, 2, 2, 1, 2, 384, 158, 108, 56,,,, +5, 7, 2, 2, 2, 2, 2, 1, 128, 158, 108, 56,,,, +5, 7, 2, 2, 2, 2, 2, 1, 256, 158, 108, 56,,,, +5, 7, 2, 2, 2, 2, 2, 1, 384, 158, 108, 56,,,, +5, 7, 2, 2, 2, 2, 2, 2, 128, 158, 108, 56,,,, +5, 7, 2, 2, 2, 2, 2, 2, 256, 158, 108, 56,,,, +5, 7, 2, 2, 2, 2, 2, 2, 384, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 1, 1, 128, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 1, 1, 256, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 1, 1, 384, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 1, 2, 128, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 1, 2, 256, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 1, 2, 384, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 1, 3, 128, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 1, 3, 256, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 1, 3, 384, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 2, 1, 128, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 2, 1, 256, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 2, 1, 384, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 2, 2, 128, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 2, 2, 256, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 2, 2, 384, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 2, 3, 128, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 2, 3, 256, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 2, 3, 384, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 3, 1, 128, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 3, 1, 256, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 3, 1, 384, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 3, 2, 128, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 3, 2, 256, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 3, 2, 384, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 3, 3, 128, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 3, 3, 256, 158, 108, 56,,,, +5, 7, 2, 2, 3, 3, 3, 3, 384, 158, 108, 56,,,, +5, 7, 2, 3, 1, 1, 1, 1, 128, 234, 159, 81,,,, +5, 7, 2, 3, 1, 1, 1, 1, 256, 234, 159, 81,,,, +5, 7, 2, 3, 1, 1, 1, 1, 384, 234, 159, 81,,,, +5, 7, 2, 3, 2, 2, 1, 1, 128, 234, 159, 81,,,, +5, 7, 2, 3, 2, 2, 1, 1, 256, 234, 159, 81,,,, +5, 7, 2, 3, 2, 2, 1, 1, 384, 234, 159, 81,,,, +5, 7, 2, 3, 2, 2, 1, 2, 128, 234, 159, 81,,,, +5, 7, 2, 3, 2, 2, 1, 2, 256, 234, 159, 81,,,, +5, 7, 2, 3, 2, 2, 1, 2, 384, 234, 159, 81,,,, +5, 7, 2, 3, 2, 2, 2, 1, 128, 234, 159, 81,,,, +5, 7, 2, 3, 2, 2, 2, 1, 256, 234, 159, 81,,,, +5, 7, 2, 3, 2, 2, 2, 1, 384, 234, 159, 81,,,, +5, 7, 2, 3, 2, 2, 2, 2, 128, 234, 159, 81,,,, +5, 7, 2, 3, 2, 2, 2, 2, 256, 234, 159, 81,,,, +5, 7, 2, 3, 2, 2, 2, 2, 384, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 1, 1, 128, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 1, 1, 256, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 1, 1, 384, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 1, 2, 128, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 1, 2, 256, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 1, 2, 384, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 1, 3, 128, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 1, 3, 256, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 1, 3, 384, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 2, 1, 128, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 2, 1, 256, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 2, 1, 384, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 2, 2, 128, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 2, 2, 256, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 2, 2, 384, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 2, 3, 128, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 2, 3, 256, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 2, 3, 384, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 3, 1, 128, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 3, 1, 256, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 3, 1, 384, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 3, 2, 128, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 3, 2, 256, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 3, 2, 384, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 3, 3, 128, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 3, 3, 256, 234, 159, 81,,,, +5, 7, 2, 3, 3, 3, 3, 3, 384, 234, 159, 81,,,, +5, 7, 2, 4, 1, 1, 1, 1, 128, 240, 210, 106,,,, +5, 7, 2, 4, 1, 1, 1, 1, 256, 240, 210, 106,,,, +5, 7, 2, 4, 1, 1, 1, 1, 384, 240, 210, 106,,,, +5, 7, 2, 4, 2, 2, 1, 1, 128, 240, 210, 106,,,, +5, 7, 2, 4, 2, 2, 1, 1, 256, 240, 210, 106,,,, +5, 7, 2, 4, 2, 2, 1, 1, 384, 240, 210, 106,,,, +5, 7, 2, 4, 2, 2, 1, 2, 128, 240, 210, 106,,,, +5, 7, 2, 4, 2, 2, 1, 2, 256, 240, 210, 106,,,, +5, 7, 2, 4, 2, 2, 1, 2, 384, 240, 210, 106,,,, +5, 7, 2, 4, 2, 2, 2, 1, 128, 240, 210, 106,,,, +5, 7, 2, 4, 2, 2, 2, 1, 256, 240, 210, 106,,,, +5, 7, 2, 4, 2, 2, 2, 1, 384, 240, 210, 106,,,, +5, 7, 2, 4, 2, 2, 2, 2, 128, 240, 210, 106,,,, +5, 7, 2, 4, 2, 2, 2, 2, 256, 240, 210, 106,,,, +5, 7, 2, 4, 2, 2, 2, 2, 384, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 1, 1, 128, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 1, 1, 256, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 1, 1, 384, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 1, 2, 128, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 1, 2, 256, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 1, 2, 384, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 1, 3, 128, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 1, 3, 256, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 1, 3, 384, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 2, 1, 128, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 2, 1, 256, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 2, 1, 384, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 2, 2, 128, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 2, 2, 256, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 2, 2, 384, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 2, 3, 128, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 2, 3, 256, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 2, 3, 384, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 3, 1, 128, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 3, 1, 256, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 3, 1, 384, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 3, 2, 128, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 3, 2, 256, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 3, 2, 384, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 3, 3, 128, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 3, 3, 256, 240, 210, 106,,,, +5, 7, 2, 4, 3, 3, 3, 3, 384, 240, 210, 106,,,, +5, 7, 2, 5, 1, 1, 1, 1, 128, 240, 240, 131,,,, +5, 7, 2, 5, 1, 1, 1, 1, 256, 240, 240, 131,,,, +5, 7, 2, 5, 1, 1, 1, 1, 384, 240, 240, 131,,,, +5, 7, 2, 5, 2, 2, 1, 1, 128, 240, 240, 131,,,, +5, 7, 2, 5, 2, 2, 1, 1, 256, 240, 240, 131,,,, +5, 7, 2, 5, 2, 2, 1, 1, 384, 240, 240, 131,,,, +5, 7, 2, 5, 2, 2, 1, 2, 128, 240, 240, 131,,,, +5, 7, 2, 5, 2, 2, 1, 2, 256, 240, 240, 131,,,, +5, 7, 2, 5, 2, 2, 1, 2, 384, 240, 240, 131,,,, +5, 7, 2, 5, 2, 2, 2, 1, 128, 240, 240, 131,,,, +5, 7, 2, 5, 2, 2, 2, 1, 256, 240, 240, 131,,,, +5, 7, 2, 5, 2, 2, 2, 1, 384, 240, 240, 131,,,, +5, 7, 2, 5, 2, 2, 2, 2, 128, 240, 240, 131,,,, +5, 7, 2, 5, 2, 2, 2, 2, 256, 240, 240, 131,,,, +5, 7, 2, 5, 2, 2, 2, 2, 384, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 1, 1, 128, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 1, 1, 256, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 1, 1, 384, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 1, 2, 128, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 1, 2, 256, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 1, 2, 384, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 1, 3, 128, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 1, 3, 256, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 1, 3, 384, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 2, 1, 128, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 2, 1, 256, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 2, 1, 384, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 2, 2, 128, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 2, 2, 256, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 2, 2, 384, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 2, 3, 128, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 2, 3, 256, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 2, 3, 384, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 3, 1, 128, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 3, 1, 256, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 3, 1, 384, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 3, 2, 128, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 3, 2, 256, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 3, 2, 384, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 3, 3, 128, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 3, 3, 256, 240, 240, 131,,,, +5, 7, 2, 5, 3, 3, 3, 3, 384, 240, 240, 131,,,, +5, 7, 2, 6, 1, 1, 1, 1, 128, 240, 240, 156,,,, +5, 7, 2, 6, 1, 1, 1, 1, 256, 240, 240, 156,,,, +5, 7, 2, 6, 1, 1, 1, 1, 384, 240, 240, 156,,,, +5, 7, 2, 6, 2, 2, 1, 1, 128, 240, 240, 156,,,, +5, 7, 2, 6, 2, 2, 1, 1, 256, 240, 240, 156,,,, +5, 7, 2, 6, 2, 2, 1, 1, 384, 240, 240, 156,,,, +5, 7, 2, 6, 2, 2, 1, 2, 128, 240, 240, 156,,,, +5, 7, 2, 6, 2, 2, 1, 2, 256, 240, 240, 156,,,, +5, 7, 2, 6, 2, 2, 1, 2, 384, 240, 240, 156,,,, +5, 7, 2, 6, 2, 2, 2, 1, 128, 240, 240, 156,,,, +5, 7, 2, 6, 2, 2, 2, 1, 256, 240, 240, 156,,,, +5, 7, 2, 6, 2, 2, 2, 1, 384, 240, 240, 156,,,, +5, 7, 2, 6, 2, 2, 2, 2, 128, 240, 240, 156,,,, +5, 7, 2, 6, 2, 2, 2, 2, 256, 240, 240, 156,,,, +5, 7, 2, 6, 2, 2, 2, 2, 384, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 1, 1, 128, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 1, 1, 256, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 1, 1, 384, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 1, 2, 128, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 1, 2, 256, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 1, 2, 384, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 1, 3, 128, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 1, 3, 256, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 1, 3, 384, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 2, 1, 128, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 2, 1, 256, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 2, 1, 384, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 2, 2, 128, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 2, 2, 256, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 2, 2, 384, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 2, 3, 128, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 2, 3, 256, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 2, 3, 384, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 3, 1, 128, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 3, 1, 256, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 3, 1, 384, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 3, 2, 128, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 3, 2, 256, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 3, 2, 384, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 3, 3, 128, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 3, 3, 256, 240, 240, 156,,,, +5, 7, 2, 6, 3, 3, 3, 3, 384, 240, 240, 156,,,, +5, 7, 2, 7, 1, 1, 1, 1, 128, 240, 240, 181,,,, +5, 7, 2, 7, 1, 1, 1, 1, 256, 240, 240, 181,,,, +5, 7, 2, 7, 1, 1, 1, 1, 384, 240, 240, 181,,,, +5, 7, 2, 7, 2, 2, 1, 1, 128, 240, 240, 181,,,, +5, 7, 2, 7, 2, 2, 1, 1, 256, 240, 240, 181,,,, +5, 7, 2, 7, 2, 2, 1, 1, 384, 240, 240, 181,,,, +5, 7, 2, 7, 2, 2, 1, 2, 128, 240, 240, 181,,,, +5, 7, 2, 7, 2, 2, 1, 2, 256, 240, 240, 181,,,, +5, 7, 2, 7, 2, 2, 1, 2, 384, 240, 240, 181,,,, +5, 7, 2, 7, 2, 2, 2, 1, 128, 240, 240, 181,,,, +5, 7, 2, 7, 2, 2, 2, 1, 256, 240, 240, 181,,,, +5, 7, 2, 7, 2, 2, 2, 1, 384, 240, 240, 181,,,, +5, 7, 2, 7, 2, 2, 2, 2, 128, 240, 240, 181,,,, +5, 7, 2, 7, 2, 2, 2, 2, 256, 240, 240, 181,,,, +5, 7, 2, 7, 2, 2, 2, 2, 384, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 1, 1, 128, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 1, 1, 256, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 1, 1, 384, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 1, 2, 128, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 1, 2, 256, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 1, 2, 384, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 1, 3, 128, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 1, 3, 256, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 1, 3, 384, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 2, 1, 128, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 2, 1, 256, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 2, 1, 384, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 2, 2, 128, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 2, 2, 256, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 2, 2, 384, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 2, 3, 128, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 2, 3, 256, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 2, 3, 384, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 3, 1, 128, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 3, 1, 256, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 3, 1, 384, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 3, 2, 128, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 3, 2, 256, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 3, 2, 384, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 3, 3, 128, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 3, 3, 256, 240, 240, 181,,,, +5, 7, 2, 7, 3, 3, 3, 3, 384, 240, 240, 181,,,, +5, 7, 3, 1, 1, 1, 1, 1, 128, 82, 57, 31,,,, +5, 7, 3, 1, 1, 1, 1, 1, 256, 82, 57, 31,,,, +5, 7, 3, 1, 1, 1, 1, 1, 384, 82, 57, 31,,,, +5, 7, 3, 1, 2, 2, 1, 1, 128, 82, 57, 31,,,, +5, 7, 3, 1, 2, 2, 1, 1, 256, 82, 57, 31,,,, +5, 7, 3, 1, 2, 2, 1, 1, 384, 82, 57, 31,,,, +5, 7, 3, 1, 2, 2, 1, 2, 128, 82, 57, 31,,,, +5, 7, 3, 1, 2, 2, 1, 2, 256, 82, 57, 31,,,, +5, 7, 3, 1, 2, 2, 1, 2, 384, 82, 57, 31,,,, +5, 7, 3, 1, 2, 2, 2, 1, 128, 82, 57, 31,,,, +5, 7, 3, 1, 2, 2, 2, 1, 256, 82, 57, 31,,,, +5, 7, 3, 1, 2, 2, 2, 1, 384, 82, 57, 31,,,, +5, 7, 3, 1, 2, 2, 2, 2, 128, 82, 57, 31,,,, +5, 7, 3, 1, 2, 2, 2, 2, 256, 82, 57, 31,,,, +5, 7, 3, 1, 2, 2, 2, 2, 384, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 1, 1, 128, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 1, 1, 256, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 1, 1, 384, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 1, 2, 128, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 1, 2, 256, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 1, 2, 384, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 1, 3, 128, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 1, 3, 256, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 1, 3, 384, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 2, 1, 128, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 2, 1, 256, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 2, 1, 384, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 2, 2, 128, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 2, 2, 256, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 2, 2, 384, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 2, 3, 128, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 2, 3, 256, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 2, 3, 384, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 3, 1, 128, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 3, 1, 256, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 3, 1, 384, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 3, 2, 128, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 3, 2, 256, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 3, 2, 384, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 3, 3, 128, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 3, 3, 256, 82, 57, 31,,,, +5, 7, 3, 1, 3, 3, 3, 3, 384, 82, 57, 31,,,, +5, 7, 3, 2, 1, 1, 1, 1, 128, 158, 108, 56,,,, +5, 7, 3, 2, 1, 1, 1, 1, 256, 158, 108, 56,,,, +5, 7, 3, 2, 1, 1, 1, 1, 384, 158, 108, 56,,,, +5, 7, 3, 2, 2, 2, 1, 1, 128, 158, 108, 56,,,, +5, 7, 3, 2, 2, 2, 1, 1, 256, 158, 108, 56,,,, +5, 7, 3, 2, 2, 2, 1, 1, 384, 158, 108, 56,,,, +5, 7, 3, 2, 2, 2, 1, 2, 128, 158, 108, 56,,,, +5, 7, 3, 2, 2, 2, 1, 2, 256, 158, 108, 56,,,, +5, 7, 3, 2, 2, 2, 1, 2, 384, 158, 108, 56,,,, +5, 7, 3, 2, 2, 2, 2, 1, 128, 158, 108, 56,,,, +5, 7, 3, 2, 2, 2, 2, 1, 256, 158, 108, 56,,,, +5, 7, 3, 2, 2, 2, 2, 1, 384, 158, 108, 56,,,, +5, 7, 3, 2, 2, 2, 2, 2, 128, 158, 108, 56,,,, +5, 7, 3, 2, 2, 2, 2, 2, 256, 158, 108, 56,,,, +5, 7, 3, 2, 2, 2, 2, 2, 384, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 1, 1, 128, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 1, 1, 256, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 1, 1, 384, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 1, 2, 128, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 1, 2, 256, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 1, 2, 384, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 1, 3, 128, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 1, 3, 256, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 1, 3, 384, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 2, 1, 128, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 2, 1, 256, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 2, 1, 384, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 2, 2, 128, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 2, 2, 256, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 2, 2, 384, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 2, 3, 128, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 2, 3, 256, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 2, 3, 384, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 3, 1, 128, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 3, 1, 256, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 3, 1, 384, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 3, 2, 128, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 3, 2, 256, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 3, 2, 384, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 3, 3, 128, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 3, 3, 256, 158, 108, 56,,,, +5, 7, 3, 2, 3, 3, 3, 3, 384, 158, 108, 56,,,, +5, 7, 3, 3, 1, 1, 1, 1, 128, 234, 159, 81,,,, +5, 7, 3, 3, 1, 1, 1, 1, 256, 234, 159, 81,,,, +5, 7, 3, 3, 1, 1, 1, 1, 384, 234, 159, 81,,,, +5, 7, 3, 3, 2, 2, 1, 1, 128, 234, 159, 81,,,, +5, 7, 3, 3, 2, 2, 1, 1, 256, 234, 159, 81,,,, +5, 7, 3, 3, 2, 2, 1, 1, 384, 234, 159, 81,,,, +5, 7, 3, 3, 2, 2, 1, 2, 128, 234, 159, 81,,,, +5, 7, 3, 3, 2, 2, 1, 2, 256, 234, 159, 81,,,, +5, 7, 3, 3, 2, 2, 1, 2, 384, 234, 159, 81,,,, +5, 7, 3, 3, 2, 2, 2, 1, 128, 234, 159, 81,,,, +5, 7, 3, 3, 2, 2, 2, 1, 256, 234, 159, 81,,,, +5, 7, 3, 3, 2, 2, 2, 1, 384, 234, 159, 81,,,, +5, 7, 3, 3, 2, 2, 2, 2, 128, 234, 159, 81,,,, +5, 7, 3, 3, 2, 2, 2, 2, 256, 234, 159, 81,,,, +5, 7, 3, 3, 2, 2, 2, 2, 384, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 1, 1, 128, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 1, 1, 256, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 1, 1, 384, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 1, 2, 128, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 1, 2, 256, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 1, 2, 384, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 1, 3, 128, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 1, 3, 256, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 1, 3, 384, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 2, 1, 128, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 2, 1, 256, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 2, 1, 384, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 2, 2, 128, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 2, 2, 256, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 2, 2, 384, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 2, 3, 128, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 2, 3, 256, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 2, 3, 384, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 3, 1, 128, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 3, 1, 256, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 3, 1, 384, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 3, 2, 128, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 3, 2, 256, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 3, 2, 384, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 3, 3, 128, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 3, 3, 256, 234, 159, 81,,,, +5, 7, 3, 3, 3, 3, 3, 3, 384, 234, 159, 81,,,, +5, 7, 3, 4, 1, 1, 1, 1, 128, 240, 210, 106,,,, +5, 7, 3, 4, 1, 1, 1, 1, 256, 240, 210, 106,,,, +5, 7, 3, 4, 1, 1, 1, 1, 384, 240, 210, 106,,,, +5, 7, 3, 4, 2, 2, 1, 1, 128, 240, 210, 106,,,, +5, 7, 3, 4, 2, 2, 1, 1, 256, 240, 210, 106,,,, +5, 7, 3, 4, 2, 2, 1, 1, 384, 240, 210, 106,,,, +5, 7, 3, 4, 2, 2, 1, 2, 128, 240, 210, 106,,,, +5, 7, 3, 4, 2, 2, 1, 2, 256, 240, 210, 106,,,, +5, 7, 3, 4, 2, 2, 1, 2, 384, 240, 210, 106,,,, +5, 7, 3, 4, 2, 2, 2, 1, 128, 240, 210, 106,,,, +5, 7, 3, 4, 2, 2, 2, 1, 256, 240, 210, 106,,,, +5, 7, 3, 4, 2, 2, 2, 1, 384, 240, 210, 106,,,, +5, 7, 3, 4, 2, 2, 2, 2, 128, 240, 210, 106,,,, +5, 7, 3, 4, 2, 2, 2, 2, 256, 240, 210, 106,,,, +5, 7, 3, 4, 2, 2, 2, 2, 384, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 1, 1, 128, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 1, 1, 256, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 1, 1, 384, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 1, 2, 128, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 1, 2, 256, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 1, 2, 384, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 1, 3, 128, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 1, 3, 256, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 1, 3, 384, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 2, 1, 128, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 2, 1, 256, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 2, 1, 384, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 2, 2, 128, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 2, 2, 256, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 2, 2, 384, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 2, 3, 128, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 2, 3, 256, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 2, 3, 384, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 3, 1, 128, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 3, 1, 256, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 3, 1, 384, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 3, 2, 128, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 3, 2, 256, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 3, 2, 384, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 3, 3, 128, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 3, 3, 256, 240, 210, 106,,,, +5, 7, 3, 4, 3, 3, 3, 3, 384, 240, 210, 106,,,, +5, 7, 3, 5, 1, 1, 1, 1, 128, 240, 240, 131,,,, +5, 7, 3, 5, 1, 1, 1, 1, 256, 240, 240, 131,,,, +5, 7, 3, 5, 1, 1, 1, 1, 384, 240, 240, 131,,,, +5, 7, 3, 5, 2, 2, 1, 1, 128, 240, 240, 131,,,, +5, 7, 3, 5, 2, 2, 1, 1, 256, 240, 240, 131,,,, +5, 7, 3, 5, 2, 2, 1, 1, 384, 240, 240, 131,,,, +5, 7, 3, 5, 2, 2, 1, 2, 128, 240, 240, 131,,,, +5, 7, 3, 5, 2, 2, 1, 2, 256, 240, 240, 131,,,, +5, 7, 3, 5, 2, 2, 1, 2, 384, 240, 240, 131,,,, +5, 7, 3, 5, 2, 2, 2, 1, 128, 240, 240, 131,,,, +5, 7, 3, 5, 2, 2, 2, 1, 256, 240, 240, 131,,,, +5, 7, 3, 5, 2, 2, 2, 1, 384, 240, 240, 131,,,, +5, 7, 3, 5, 2, 2, 2, 2, 128, 240, 240, 131,,,, +5, 7, 3, 5, 2, 2, 2, 2, 256, 240, 240, 131,,,, +5, 7, 3, 5, 2, 2, 2, 2, 384, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 1, 1, 128, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 1, 1, 256, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 1, 1, 384, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 1, 2, 128, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 1, 2, 256, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 1, 2, 384, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 1, 3, 128, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 1, 3, 256, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 1, 3, 384, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 2, 1, 128, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 2, 1, 256, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 2, 1, 384, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 2, 2, 128, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 2, 2, 256, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 2, 2, 384, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 2, 3, 128, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 2, 3, 256, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 2, 3, 384, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 3, 1, 128, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 3, 1, 256, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 3, 1, 384, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 3, 2, 128, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 3, 2, 256, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 3, 2, 384, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 3, 3, 128, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 3, 3, 256, 240, 240, 131,,,, +5, 7, 3, 5, 3, 3, 3, 3, 384, 240, 240, 131,,,, +5, 7, 3, 6, 1, 1, 1, 1, 128, 240, 240, 156,,,, +5, 7, 3, 6, 1, 1, 1, 1, 256, 240, 240, 156,,,, +5, 7, 3, 6, 1, 1, 1, 1, 384, 240, 240, 156,,,, +5, 7, 3, 6, 2, 2, 1, 1, 128, 240, 240, 156,,,, +5, 7, 3, 6, 2, 2, 1, 1, 256, 240, 240, 156,,,, +5, 7, 3, 6, 2, 2, 1, 1, 384, 240, 240, 156,,,, +5, 7, 3, 6, 2, 2, 1, 2, 128, 240, 240, 156,,,, +5, 7, 3, 6, 2, 2, 1, 2, 256, 240, 240, 156,,,, +5, 7, 3, 6, 2, 2, 1, 2, 384, 240, 240, 156,,,, +5, 7, 3, 6, 2, 2, 2, 1, 128, 240, 240, 156,,,, +5, 7, 3, 6, 2, 2, 2, 1, 256, 240, 240, 156,,,, +5, 7, 3, 6, 2, 2, 2, 1, 384, 240, 240, 156,,,, +5, 7, 3, 6, 2, 2, 2, 2, 128, 240, 240, 156,,,, +5, 7, 3, 6, 2, 2, 2, 2, 256, 240, 240, 156,,,, +5, 7, 3, 6, 2, 2, 2, 2, 384, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 1, 1, 128, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 1, 1, 256, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 1, 1, 384, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 1, 2, 128, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 1, 2, 256, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 1, 2, 384, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 1, 3, 128, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 1, 3, 256, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 1, 3, 384, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 2, 1, 128, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 2, 1, 256, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 2, 1, 384, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 2, 2, 128, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 2, 2, 256, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 2, 2, 384, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 2, 3, 128, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 2, 3, 256, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 2, 3, 384, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 3, 1, 128, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 3, 1, 256, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 3, 1, 384, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 3, 2, 128, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 3, 2, 256, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 3, 2, 384, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 3, 3, 128, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 3, 3, 256, 240, 240, 156,,,, +5, 7, 3, 6, 3, 3, 3, 3, 384, 240, 240, 156,,,, +5, 7, 3, 7, 1, 1, 1, 1, 128, 240, 240, 181,,,, +5, 7, 3, 7, 1, 1, 1, 1, 256, 240, 240, 181,,,, +5, 7, 3, 7, 1, 1, 1, 1, 384, 240, 240, 181,,,, +5, 7, 3, 7, 2, 2, 1, 1, 128, 240, 240, 181,,,, +5, 7, 3, 7, 2, 2, 1, 1, 256, 240, 240, 181,,,, +5, 7, 3, 7, 2, 2, 1, 1, 384, 240, 240, 181,,,, +5, 7, 3, 7, 2, 2, 1, 2, 128, 240, 240, 181,,,, +5, 7, 3, 7, 2, 2, 1, 2, 256, 240, 240, 181,,,, +5, 7, 3, 7, 2, 2, 1, 2, 384, 240, 240, 181,,,, +5, 7, 3, 7, 2, 2, 2, 1, 128, 240, 240, 181,,,, +5, 7, 3, 7, 2, 2, 2, 1, 256, 240, 240, 181,,,, +5, 7, 3, 7, 2, 2, 2, 1, 384, 240, 240, 181,,,, +5, 7, 3, 7, 2, 2, 2, 2, 128, 240, 240, 181,,,, +5, 7, 3, 7, 2, 2, 2, 2, 256, 240, 240, 181,,,, +5, 7, 3, 7, 2, 2, 2, 2, 384, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 1, 1, 128, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 1, 1, 256, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 1, 1, 384, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 1, 2, 128, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 1, 2, 256, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 1, 2, 384, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 1, 3, 128, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 1, 3, 256, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 1, 3, 384, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 2, 1, 128, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 2, 1, 256, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 2, 1, 384, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 2, 2, 128, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 2, 2, 256, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 2, 2, 384, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 2, 3, 128, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 2, 3, 256, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 2, 3, 384, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 3, 1, 128, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 3, 1, 256, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 3, 1, 384, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 3, 2, 128, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 3, 2, 256, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 3, 2, 384, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 3, 3, 128, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 3, 3, 256, 240, 240, 181,,,, +5, 7, 3, 7, 3, 3, 3, 3, 384, 240, 240, 181,,,, +5, 7, 4, 1, 1, 1, 1, 1, 128, 82, 57, 31,,,, +5, 7, 4, 1, 1, 1, 1, 1, 256, 82, 57, 31,,,, +5, 7, 4, 1, 1, 1, 1, 1, 384, 82, 57, 31,,,, +5, 7, 4, 1, 2, 2, 1, 1, 128, 82, 57, 31,,,, +5, 7, 4, 1, 2, 2, 1, 1, 256, 82, 57, 31,,,, +5, 7, 4, 1, 2, 2, 1, 1, 384, 82, 57, 31,,,, +5, 7, 4, 1, 2, 2, 1, 2, 128, 82, 57, 31,,,, +5, 7, 4, 1, 2, 2, 1, 2, 256, 82, 57, 31,,,, +5, 7, 4, 1, 2, 2, 1, 2, 384, 82, 57, 31,,,, +5, 7, 4, 1, 2, 2, 2, 1, 128, 82, 57, 31,,,, +5, 7, 4, 1, 2, 2, 2, 1, 256, 82, 57, 31,,,, +5, 7, 4, 1, 2, 2, 2, 1, 384, 82, 57, 31,,,, +5, 7, 4, 1, 2, 2, 2, 2, 128, 82, 57, 31,,,, +5, 7, 4, 1, 2, 2, 2, 2, 256, 82, 57, 31,,,, +5, 7, 4, 1, 2, 2, 2, 2, 384, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 1, 1, 128, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 1, 1, 256, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 1, 1, 384, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 1, 2, 128, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 1, 2, 256, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 1, 2, 384, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 1, 3, 128, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 1, 3, 256, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 1, 3, 384, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 2, 1, 128, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 2, 1, 256, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 2, 1, 384, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 2, 2, 128, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 2, 2, 256, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 2, 2, 384, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 2, 3, 128, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 2, 3, 256, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 2, 3, 384, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 3, 1, 128, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 3, 1, 256, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 3, 1, 384, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 3, 2, 128, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 3, 2, 256, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 3, 2, 384, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 3, 3, 128, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 3, 3, 256, 82, 57, 31,,,, +5, 7, 4, 1, 3, 3, 3, 3, 384, 82, 57, 31,,,, +5, 7, 4, 2, 1, 1, 1, 1, 128, 158, 108, 56,,,, +5, 7, 4, 2, 1, 1, 1, 1, 256, 158, 108, 56,,,, +5, 7, 4, 2, 1, 1, 1, 1, 384, 158, 108, 56,,,, +5, 7, 4, 2, 2, 2, 1, 1, 128, 158, 108, 56,,,, +5, 7, 4, 2, 2, 2, 1, 1, 256, 158, 108, 56,,,, +5, 7, 4, 2, 2, 2, 1, 1, 384, 158, 108, 56,,,, +5, 7, 4, 2, 2, 2, 1, 2, 128, 158, 108, 56,,,, +5, 7, 4, 2, 2, 2, 1, 2, 256, 158, 108, 56,,,, +5, 7, 4, 2, 2, 2, 1, 2, 384, 158, 108, 56,,,, +5, 7, 4, 2, 2, 2, 2, 1, 128, 158, 108, 56,,,, +5, 7, 4, 2, 2, 2, 2, 1, 256, 158, 108, 56,,,, +5, 7, 4, 2, 2, 2, 2, 1, 384, 158, 108, 56,,,, +5, 7, 4, 2, 2, 2, 2, 2, 128, 158, 108, 56,,,, +5, 7, 4, 2, 2, 2, 2, 2, 256, 158, 108, 56,,,, +5, 7, 4, 2, 2, 2, 2, 2, 384, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 1, 1, 128, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 1, 1, 256, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 1, 1, 384, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 1, 2, 128, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 1, 2, 256, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 1, 2, 384, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 1, 3, 128, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 1, 3, 256, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 1, 3, 384, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 2, 1, 128, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 2, 1, 256, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 2, 1, 384, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 2, 2, 128, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 2, 2, 256, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 2, 2, 384, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 2, 3, 128, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 2, 3, 256, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 2, 3, 384, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 3, 1, 128, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 3, 1, 256, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 3, 1, 384, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 3, 2, 128, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 3, 2, 256, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 3, 2, 384, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 3, 3, 128, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 3, 3, 256, 158, 108, 56,,,, +5, 7, 4, 2, 3, 3, 3, 3, 384, 158, 108, 56,,,, +5, 7, 4, 3, 1, 1, 1, 1, 128, 234, 159, 81,,,, +5, 7, 4, 3, 1, 1, 1, 1, 256, 234, 159, 81,,,, +5, 7, 4, 3, 1, 1, 1, 1, 384, 234, 159, 81,,,, +5, 7, 4, 3, 2, 2, 1, 1, 128, 234, 159, 81,,,, +5, 7, 4, 3, 2, 2, 1, 1, 256, 234, 159, 81,,,, +5, 7, 4, 3, 2, 2, 1, 1, 384, 234, 159, 81,,,, +5, 7, 4, 3, 2, 2, 1, 2, 128, 234, 159, 81,,,, +5, 7, 4, 3, 2, 2, 1, 2, 256, 234, 159, 81,,,, +5, 7, 4, 3, 2, 2, 1, 2, 384, 234, 159, 81,,,, +5, 7, 4, 3, 2, 2, 2, 1, 128, 234, 159, 81,,,, +5, 7, 4, 3, 2, 2, 2, 1, 256, 234, 159, 81,,,, +5, 7, 4, 3, 2, 2, 2, 1, 384, 234, 159, 81,,,, +5, 7, 4, 3, 2, 2, 2, 2, 128, 234, 159, 81,,,, +5, 7, 4, 3, 2, 2, 2, 2, 256, 234, 159, 81,,,, +5, 7, 4, 3, 2, 2, 2, 2, 384, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 1, 1, 128, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 1, 1, 256, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 1, 1, 384, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 1, 2, 128, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 1, 2, 256, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 1, 2, 384, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 1, 3, 128, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 1, 3, 256, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 1, 3, 384, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 2, 1, 128, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 2, 1, 256, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 2, 1, 384, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 2, 2, 128, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 2, 2, 256, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 2, 2, 384, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 2, 3, 128, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 2, 3, 256, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 2, 3, 384, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 3, 1, 128, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 3, 1, 256, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 3, 1, 384, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 3, 2, 128, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 3, 2, 256, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 3, 2, 384, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 3, 3, 128, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 3, 3, 256, 234, 159, 81,,,, +5, 7, 4, 3, 3, 3, 3, 3, 384, 234, 159, 81,,,, +5, 7, 4, 4, 1, 1, 1, 1, 128, 240, 210, 106,,,, +5, 7, 4, 4, 1, 1, 1, 1, 256, 240, 210, 106,,,, +5, 7, 4, 4, 1, 1, 1, 1, 384, 240, 210, 106,,,, +5, 7, 4, 4, 2, 2, 1, 1, 128, 240, 210, 106,,,, +5, 7, 4, 4, 2, 2, 1, 1, 256, 240, 210, 106,,,, +5, 7, 4, 4, 2, 2, 1, 1, 384, 240, 210, 106,,,, +5, 7, 4, 4, 2, 2, 1, 2, 128, 240, 210, 106,,,, +5, 7, 4, 4, 2, 2, 1, 2, 256, 240, 210, 106,,,, +5, 7, 4, 4, 2, 2, 1, 2, 384, 240, 210, 106,,,, +5, 7, 4, 4, 2, 2, 2, 1, 128, 240, 210, 106,,,, +5, 7, 4, 4, 2, 2, 2, 1, 256, 240, 210, 106,,,, +5, 7, 4, 4, 2, 2, 2, 1, 384, 240, 210, 106,,,, +5, 7, 4, 4, 2, 2, 2, 2, 128, 240, 210, 106,,,, +5, 7, 4, 4, 2, 2, 2, 2, 256, 240, 210, 106,,,, +5, 7, 4, 4, 2, 2, 2, 2, 384, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 1, 1, 128, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 1, 1, 256, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 1, 1, 384, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 1, 2, 128, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 1, 2, 256, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 1, 2, 384, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 1, 3, 128, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 1, 3, 256, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 1, 3, 384, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 2, 1, 128, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 2, 1, 256, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 2, 1, 384, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 2, 2, 128, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 2, 2, 256, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 2, 2, 384, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 2, 3, 128, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 2, 3, 256, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 2, 3, 384, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 3, 1, 128, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 3, 1, 256, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 3, 1, 384, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 3, 2, 128, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 3, 2, 256, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 3, 2, 384, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 3, 3, 128, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 3, 3, 256, 240, 210, 106,,,, +5, 7, 4, 4, 3, 3, 3, 3, 384, 240, 210, 106,,,, +5, 7, 4, 5, 1, 1, 1, 1, 128, 240, 240, 131,,,, +5, 7, 4, 5, 1, 1, 1, 1, 256, 240, 240, 131,,,, +5, 7, 4, 5, 1, 1, 1, 1, 384, 240, 240, 131,,,, +5, 7, 4, 5, 2, 2, 1, 1, 128, 240, 240, 131,,,, +5, 7, 4, 5, 2, 2, 1, 1, 256, 240, 240, 131,,,, +5, 7, 4, 5, 2, 2, 1, 1, 384, 240, 240, 131,,,, +5, 7, 4, 5, 2, 2, 1, 2, 128, 240, 240, 131,,,, +5, 7, 4, 5, 2, 2, 1, 2, 256, 240, 240, 131,,,, +5, 7, 4, 5, 2, 2, 1, 2, 384, 240, 240, 131,,,, +5, 7, 4, 5, 2, 2, 2, 1, 128, 240, 240, 131,,,, +5, 7, 4, 5, 2, 2, 2, 1, 256, 240, 240, 131,,,, +5, 7, 4, 5, 2, 2, 2, 1, 384, 240, 240, 131,,,, +5, 7, 4, 5, 2, 2, 2, 2, 128, 240, 240, 131,,,, +5, 7, 4, 5, 2, 2, 2, 2, 256, 240, 240, 131,,,, +5, 7, 4, 5, 2, 2, 2, 2, 384, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 1, 1, 128, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 1, 1, 256, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 1, 1, 384, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 1, 2, 128, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 1, 2, 256, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 1, 2, 384, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 1, 3, 128, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 1, 3, 256, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 1, 3, 384, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 2, 1, 128, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 2, 1, 256, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 2, 1, 384, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 2, 2, 128, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 2, 2, 256, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 2, 2, 384, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 2, 3, 128, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 2, 3, 256, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 2, 3, 384, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 3, 1, 128, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 3, 1, 256, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 3, 1, 384, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 3, 2, 128, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 3, 2, 256, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 3, 2, 384, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 3, 3, 128, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 3, 3, 256, 240, 240, 131,,,, +5, 7, 4, 5, 3, 3, 3, 3, 384, 240, 240, 131,,,, +5, 7, 4, 6, 1, 1, 1, 1, 128, 240, 240, 156,,,, +5, 7, 4, 6, 1, 1, 1, 1, 256, 240, 240, 156,,,, +5, 7, 4, 6, 1, 1, 1, 1, 384, 240, 240, 156,,,, +5, 7, 4, 6, 2, 2, 1, 1, 128, 240, 240, 156,,,, +5, 7, 4, 6, 2, 2, 1, 1, 256, 240, 240, 156,,,, +5, 7, 4, 6, 2, 2, 1, 1, 384, 240, 240, 156,,,, +5, 7, 4, 6, 2, 2, 1, 2, 128, 240, 240, 156,,,, +5, 7, 4, 6, 2, 2, 1, 2, 256, 240, 240, 156,,,, +5, 7, 4, 6, 2, 2, 1, 2, 384, 240, 240, 156,,,, +5, 7, 4, 6, 2, 2, 2, 1, 128, 240, 240, 156,,,, +5, 7, 4, 6, 2, 2, 2, 1, 256, 240, 240, 156,,,, +5, 7, 4, 6, 2, 2, 2, 1, 384, 240, 240, 156,,,, +5, 7, 4, 6, 2, 2, 2, 2, 128, 240, 240, 156,,,, +5, 7, 4, 6, 2, 2, 2, 2, 256, 240, 240, 156,,,, +5, 7, 4, 6, 2, 2, 2, 2, 384, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 1, 1, 128, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 1, 1, 256, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 1, 1, 384, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 1, 2, 128, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 1, 2, 256, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 1, 2, 384, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 1, 3, 128, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 1, 3, 256, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 1, 3, 384, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 2, 1, 128, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 2, 1, 256, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 2, 1, 384, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 2, 2, 128, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 2, 2, 256, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 2, 2, 384, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 2, 3, 128, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 2, 3, 256, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 2, 3, 384, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 3, 1, 128, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 3, 1, 256, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 3, 1, 384, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 3, 2, 128, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 3, 2, 256, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 3, 2, 384, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 3, 3, 128, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 3, 3, 256, 240, 240, 156,,,, +5, 7, 4, 6, 3, 3, 3, 3, 384, 240, 240, 156,,,, +5, 7, 4, 7, 1, 1, 1, 1, 128, 240, 240, 181,,,, +5, 7, 4, 7, 1, 1, 1, 1, 256, 240, 240, 181,,,, +5, 7, 4, 7, 1, 1, 1, 1, 384, 240, 240, 181,,,, +5, 7, 4, 7, 2, 2, 1, 1, 128, 240, 240, 181,,,, +5, 7, 4, 7, 2, 2, 1, 1, 256, 240, 240, 181,,,, +5, 7, 4, 7, 2, 2, 1, 1, 384, 240, 240, 181,,,, +5, 7, 4, 7, 2, 2, 1, 2, 128, 240, 240, 181,,,, +5, 7, 4, 7, 2, 2, 1, 2, 256, 240, 240, 181,,,, +5, 7, 4, 7, 2, 2, 1, 2, 384, 240, 240, 181,,,, +5, 7, 4, 7, 2, 2, 2, 1, 128, 240, 240, 181,,,, +5, 7, 4, 7, 2, 2, 2, 1, 256, 240, 240, 181,,,, +5, 7, 4, 7, 2, 2, 2, 1, 384, 240, 240, 181,,,, +5, 7, 4, 7, 2, 2, 2, 2, 128, 240, 240, 181,,,, +5, 7, 4, 7, 2, 2, 2, 2, 256, 240, 240, 181,,,, +5, 7, 4, 7, 2, 2, 2, 2, 384, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 1, 1, 128, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 1, 1, 256, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 1, 1, 384, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 1, 2, 128, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 1, 2, 256, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 1, 2, 384, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 1, 3, 128, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 1, 3, 256, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 1, 3, 384, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 2, 1, 128, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 2, 1, 256, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 2, 1, 384, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 2, 2, 128, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 2, 2, 256, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 2, 2, 384, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 2, 3, 128, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 2, 3, 256, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 2, 3, 384, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 3, 1, 128, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 3, 1, 256, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 3, 1, 384, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 3, 2, 128, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 3, 2, 256, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 3, 2, 384, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 3, 3, 128, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 3, 3, 256, 240, 240, 181,,,, +5, 7, 4, 7, 3, 3, 3, 3, 384, 240, 240, 181,,,, +5, 7, 5, 1, 1, 1, 1, 1, 128, 82, 57, 31,,,, +5, 7, 5, 1, 1, 1, 1, 1, 256, 82, 57, 31,,,, +5, 7, 5, 1, 1, 1, 1, 1, 384, 82, 57, 31,,,, +5, 7, 5, 1, 2, 2, 1, 1, 128, 82, 57, 31,,,, +5, 7, 5, 1, 2, 2, 1, 1, 256, 82, 57, 31,,,, +5, 7, 5, 1, 2, 2, 1, 1, 384, 82, 57, 31,,,, +5, 7, 5, 1, 2, 2, 1, 2, 128, 82, 57, 31,,,, +5, 7, 5, 1, 2, 2, 1, 2, 256, 82, 57, 31,,,, +5, 7, 5, 1, 2, 2, 1, 2, 384, 82, 57, 31,,,, +5, 7, 5, 1, 2, 2, 2, 1, 128, 82, 57, 31,,,, +5, 7, 5, 1, 2, 2, 2, 1, 256, 82, 57, 31,,,, +5, 7, 5, 1, 2, 2, 2, 1, 384, 82, 57, 31,,,, +5, 7, 5, 1, 2, 2, 2, 2, 128, 82, 57, 31,,,, +5, 7, 5, 1, 2, 2, 2, 2, 256, 82, 57, 31,,,, +5, 7, 5, 1, 2, 2, 2, 2, 384, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 1, 1, 128, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 1, 1, 256, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 1, 1, 384, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 1, 2, 128, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 1, 2, 256, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 1, 2, 384, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 1, 3, 128, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 1, 3, 256, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 1, 3, 384, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 2, 1, 128, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 2, 1, 256, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 2, 1, 384, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 2, 2, 128, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 2, 2, 256, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 2, 2, 384, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 2, 3, 128, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 2, 3, 256, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 2, 3, 384, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 3, 1, 128, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 3, 1, 256, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 3, 1, 384, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 3, 2, 128, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 3, 2, 256, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 3, 2, 384, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 3, 3, 128, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 3, 3, 256, 82, 57, 31,,,, +5, 7, 5, 1, 3, 3, 3, 3, 384, 82, 57, 31,,,, +5, 7, 5, 2, 1, 1, 1, 1, 128, 158, 108, 56,,,, +5, 7, 5, 2, 1, 1, 1, 1, 256, 158, 108, 56,,,, +5, 7, 5, 2, 1, 1, 1, 1, 384, 158, 108, 56,,,, +5, 7, 5, 2, 2, 2, 1, 1, 128, 158, 108, 56,,,, +5, 7, 5, 2, 2, 2, 1, 1, 256, 158, 108, 56,,,, +5, 7, 5, 2, 2, 2, 1, 1, 384, 158, 108, 56,,,, +5, 7, 5, 2, 2, 2, 1, 2, 128, 158, 108, 56,,,, +5, 7, 5, 2, 2, 2, 1, 2, 256, 158, 108, 56,,,, +5, 7, 5, 2, 2, 2, 1, 2, 384, 158, 108, 56,,,, +5, 7, 5, 2, 2, 2, 2, 1, 128, 158, 108, 56,,,, +5, 7, 5, 2, 2, 2, 2, 1, 256, 158, 108, 56,,,, +5, 7, 5, 2, 2, 2, 2, 1, 384, 158, 108, 56,,,, +5, 7, 5, 2, 2, 2, 2, 2, 128, 158, 108, 56,,,, +5, 7, 5, 2, 2, 2, 2, 2, 256, 158, 108, 56,,,, +5, 7, 5, 2, 2, 2, 2, 2, 384, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 1, 1, 128, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 1, 1, 256, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 1, 1, 384, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 1, 2, 128, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 1, 2, 256, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 1, 2, 384, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 1, 3, 128, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 1, 3, 256, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 1, 3, 384, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 2, 1, 128, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 2, 1, 256, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 2, 1, 384, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 2, 2, 128, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 2, 2, 256, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 2, 2, 384, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 2, 3, 128, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 2, 3, 256, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 2, 3, 384, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 3, 1, 128, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 3, 1, 256, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 3, 1, 384, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 3, 2, 128, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 3, 2, 256, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 3, 2, 384, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 3, 3, 128, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 3, 3, 256, 158, 108, 56,,,, +5, 7, 5, 2, 3, 3, 3, 3, 384, 158, 108, 56,,,, +5, 7, 5, 3, 1, 1, 1, 1, 128, 234, 159, 81,,,, +5, 7, 5, 3, 1, 1, 1, 1, 256, 234, 159, 81,,,, +5, 7, 5, 3, 1, 1, 1, 1, 384, 234, 159, 81,,,, +5, 7, 5, 3, 2, 2, 1, 1, 128, 234, 159, 81,,,, +5, 7, 5, 3, 2, 2, 1, 1, 256, 234, 159, 81,,,, +5, 7, 5, 3, 2, 2, 1, 1, 384, 234, 159, 81,,,, +5, 7, 5, 3, 2, 2, 1, 2, 128, 234, 159, 81,,,, +5, 7, 5, 3, 2, 2, 1, 2, 256, 234, 159, 81,,,, +5, 7, 5, 3, 2, 2, 1, 2, 384, 234, 159, 81,,,, +5, 7, 5, 3, 2, 2, 2, 1, 128, 234, 159, 81,,,, +5, 7, 5, 3, 2, 2, 2, 1, 256, 234, 159, 81,,,, +5, 7, 5, 3, 2, 2, 2, 1, 384, 234, 159, 81,,,, +5, 7, 5, 3, 2, 2, 2, 2, 128, 234, 159, 81,,,, +5, 7, 5, 3, 2, 2, 2, 2, 256, 234, 159, 81,,,, +5, 7, 5, 3, 2, 2, 2, 2, 384, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 1, 1, 128, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 1, 1, 256, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 1, 1, 384, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 1, 2, 128, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 1, 2, 256, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 1, 2, 384, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 1, 3, 128, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 1, 3, 256, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 1, 3, 384, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 2, 1, 128, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 2, 1, 256, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 2, 1, 384, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 2, 2, 128, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 2, 2, 256, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 2, 2, 384, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 2, 3, 128, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 2, 3, 256, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 2, 3, 384, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 3, 1, 128, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 3, 1, 256, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 3, 1, 384, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 3, 2, 128, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 3, 2, 256, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 3, 2, 384, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 3, 3, 128, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 3, 3, 256, 234, 159, 81,,,, +5, 7, 5, 3, 3, 3, 3, 3, 384, 234, 159, 81,,,, +5, 7, 5, 4, 1, 1, 1, 1, 128, 240, 210, 106,,,, +5, 7, 5, 4, 1, 1, 1, 1, 256, 240, 210, 106,,,, +5, 7, 5, 4, 1, 1, 1, 1, 384, 240, 210, 106,,,, +5, 7, 5, 4, 2, 2, 1, 1, 128, 240, 210, 106,,,, +5, 7, 5, 4, 2, 2, 1, 1, 256, 240, 210, 106,,,, +5, 7, 5, 4, 2, 2, 1, 1, 384, 240, 210, 106,,,, +5, 7, 5, 4, 2, 2, 1, 2, 128, 240, 210, 106,,,, +5, 7, 5, 4, 2, 2, 1, 2, 256, 240, 210, 106,,,, +5, 7, 5, 4, 2, 2, 1, 2, 384, 240, 210, 106,,,, +5, 7, 5, 4, 2, 2, 2, 1, 128, 240, 210, 106,,,, +5, 7, 5, 4, 2, 2, 2, 1, 256, 240, 210, 106,,,, +5, 7, 5, 4, 2, 2, 2, 1, 384, 240, 210, 106,,,, +5, 7, 5, 4, 2, 2, 2, 2, 128, 240, 210, 106,,,, +5, 7, 5, 4, 2, 2, 2, 2, 256, 240, 210, 106,,,, +5, 7, 5, 4, 2, 2, 2, 2, 384, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 1, 1, 128, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 1, 1, 256, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 1, 1, 384, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 1, 2, 128, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 1, 2, 256, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 1, 2, 384, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 1, 3, 128, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 1, 3, 256, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 1, 3, 384, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 2, 1, 128, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 2, 1, 256, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 2, 1, 384, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 2, 2, 128, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 2, 2, 256, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 2, 2, 384, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 2, 3, 128, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 2, 3, 256, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 2, 3, 384, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 3, 1, 128, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 3, 1, 256, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 3, 1, 384, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 3, 2, 128, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 3, 2, 256, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 3, 2, 384, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 3, 3, 128, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 3, 3, 256, 240, 210, 106,,,, +5, 7, 5, 4, 3, 3, 3, 3, 384, 240, 210, 106,,,, +5, 7, 5, 5, 1, 1, 1, 1, 128, 240, 240, 131,,,, +5, 7, 5, 5, 1, 1, 1, 1, 256, 240, 240, 131,,,, +5, 7, 5, 5, 1, 1, 1, 1, 384, 240, 240, 131,,,, +5, 7, 5, 5, 2, 2, 1, 1, 128, 240, 240, 131,,,, +5, 7, 5, 5, 2, 2, 1, 1, 256, 240, 240, 131,,,, +5, 7, 5, 5, 2, 2, 1, 1, 384, 240, 240, 131,,,, +5, 7, 5, 5, 2, 2, 1, 2, 128, 240, 240, 131,,,, +5, 7, 5, 5, 2, 2, 1, 2, 256, 240, 240, 131,,,, +5, 7, 5, 5, 2, 2, 1, 2, 384, 240, 240, 131,,,, +5, 7, 5, 5, 2, 2, 2, 1, 128, 240, 240, 131,,,, +5, 7, 5, 5, 2, 2, 2, 1, 256, 240, 240, 131,,,, +5, 7, 5, 5, 2, 2, 2, 1, 384, 240, 240, 131,,,, +5, 7, 5, 5, 2, 2, 2, 2, 128, 240, 240, 131,,,, +5, 7, 5, 5, 2, 2, 2, 2, 256, 240, 240, 131,,,, +5, 7, 5, 5, 2, 2, 2, 2, 384, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 1, 1, 128, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 1, 1, 256, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 1, 1, 384, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 1, 2, 128, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 1, 2, 256, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 1, 2, 384, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 1, 3, 128, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 1, 3, 256, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 1, 3, 384, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 2, 1, 128, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 2, 1, 256, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 2, 1, 384, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 2, 2, 128, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 2, 2, 256, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 2, 2, 384, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 2, 3, 128, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 2, 3, 256, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 2, 3, 384, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 3, 1, 128, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 3, 1, 256, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 3, 1, 384, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 3, 2, 128, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 3, 2, 256, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 3, 2, 384, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 3, 3, 128, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 3, 3, 256, 240, 240, 131,,,, +5, 7, 5, 5, 3, 3, 3, 3, 384, 240, 240, 131,,,, +5, 7, 5, 6, 1, 1, 1, 1, 128, 240, 240, 156,,,, +5, 7, 5, 6, 1, 1, 1, 1, 256, 240, 240, 156,,,, +5, 7, 5, 6, 1, 1, 1, 1, 384, 240, 240, 156,,,, +5, 7, 5, 6, 2, 2, 1, 1, 128, 240, 240, 156,,,, +5, 7, 5, 6, 2, 2, 1, 1, 256, 240, 240, 156,,,, +5, 7, 5, 6, 2, 2, 1, 1, 384, 240, 240, 156,,,, +5, 7, 5, 6, 2, 2, 1, 2, 128, 240, 240, 156,,,, +5, 7, 5, 6, 2, 2, 1, 2, 256, 240, 240, 156,,,, +5, 7, 5, 6, 2, 2, 1, 2, 384, 240, 240, 156,,,, +5, 7, 5, 6, 2, 2, 2, 1, 128, 240, 240, 156,,,, +5, 7, 5, 6, 2, 2, 2, 1, 256, 240, 240, 156,,,, +5, 7, 5, 6, 2, 2, 2, 1, 384, 240, 240, 156,,,, +5, 7, 5, 6, 2, 2, 2, 2, 128, 240, 240, 156,,,, +5, 7, 5, 6, 2, 2, 2, 2, 256, 240, 240, 156,,,, +5, 7, 5, 6, 2, 2, 2, 2, 384, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 1, 1, 128, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 1, 1, 256, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 1, 1, 384, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 1, 2, 128, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 1, 2, 256, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 1, 2, 384, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 1, 3, 128, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 1, 3, 256, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 1, 3, 384, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 2, 1, 128, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 2, 1, 256, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 2, 1, 384, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 2, 2, 128, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 2, 2, 256, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 2, 2, 384, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 2, 3, 128, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 2, 3, 256, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 2, 3, 384, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 3, 1, 128, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 3, 1, 256, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 3, 1, 384, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 3, 2, 128, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 3, 2, 256, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 3, 2, 384, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 3, 3, 128, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 3, 3, 256, 240, 240, 156,,,, +5, 7, 5, 6, 3, 3, 3, 3, 384, 240, 240, 156,,,, +5, 7, 5, 7, 1, 1, 1, 1, 128, 240, 240, 181,,,, +5, 7, 5, 7, 1, 1, 1, 1, 256, 240, 240, 181,,,, +5, 7, 5, 7, 1, 1, 1, 1, 384, 240, 240, 181,,,, +5, 7, 5, 7, 2, 2, 1, 1, 128, 240, 240, 181,,,, +5, 7, 5, 7, 2, 2, 1, 1, 256, 240, 240, 181,,,, +5, 7, 5, 7, 2, 2, 1, 1, 384, 240, 240, 181,,,, +5, 7, 5, 7, 2, 2, 1, 2, 128, 240, 240, 181,,,, +5, 7, 5, 7, 2, 2, 1, 2, 256, 240, 240, 181,,,, +5, 7, 5, 7, 2, 2, 1, 2, 384, 240, 240, 181,,,, +5, 7, 5, 7, 2, 2, 2, 1, 128, 240, 240, 181,,,, +5, 7, 5, 7, 2, 2, 2, 1, 256, 240, 240, 181,,,, +5, 7, 5, 7, 2, 2, 2, 1, 384, 240, 240, 181,,,, +5, 7, 5, 7, 2, 2, 2, 2, 128, 240, 240, 181,,,, +5, 7, 5, 7, 2, 2, 2, 2, 256, 240, 240, 181,,,, +5, 7, 5, 7, 2, 2, 2, 2, 384, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 1, 1, 128, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 1, 1, 256, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 1, 1, 384, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 1, 2, 128, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 1, 2, 256, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 1, 2, 384, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 1, 3, 128, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 1, 3, 256, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 1, 3, 384, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 2, 1, 128, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 2, 1, 256, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 2, 1, 384, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 2, 2, 128, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 2, 2, 256, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 2, 2, 384, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 2, 3, 128, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 2, 3, 256, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 2, 3, 384, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 3, 1, 128, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 3, 1, 256, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 3, 1, 384, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 3, 2, 128, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 3, 2, 256, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 3, 2, 384, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 3, 3, 128, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 3, 3, 256, 240, 240, 181,,,, +5, 7, 5, 7, 3, 3, 3, 3, 384, 240, 240, 181,,,, +6, 1, 1, 1, 1, 1, 1, 1, 128, 64, 64, 64, 64, 42, 21, +6, 1, 1, 1, 1, 1, 1, 1, 256, 64, 64, 64, 64, 42, 21, +6, 1, 1, 1, 1, 1, 1, 1, 384, 64, 64, 64, 64, 42, 21, +6, 1, 2, 1, 1, 1, 1, 1, 128, 64, 64, 64, 64, 42, 21, +6, 1, 2, 1, 1, 1, 1, 1, 256, 64, 64, 64, 64, 42, 21, +6, 1, 2, 1, 1, 1, 1, 1, 384, 64, 64, 64, 64, 42, 21, +6, 1, 3, 1, 1, 1, 1, 1, 128, 64, 64, 64, 64, 42, 21, +6, 1, 3, 1, 1, 1, 1, 1, 256, 64, 64, 64, 64, 42, 21, +6, 1, 3, 1, 1, 1, 1, 1, 384, 64, 64, 64, 64, 42, 21, +6, 1, 4, 1, 1, 1, 1, 1, 128, 64, 64, 64, 64, 42, 21, +6, 1, 4, 1, 1, 1, 1, 1, 256, 64, 64, 64, 64, 42, 21, +6, 1, 4, 1, 1, 1, 1, 1, 384, 64, 64, 64, 64, 42, 21, +6, 1, 5, 1, 1, 1, 1, 1, 128, 64, 64, 64, 64, 42, 21, +6, 1, 5, 1, 1, 1, 1, 1, 256, 64, 64, 64, 64, 42, 21, +6, 1, 5, 1, 1, 1, 1, 1, 384, 64, 64, 64, 64, 42, 21, +6, 1, 6, 1, 1, 1, 1, 1, 128, 64, 64, 64, 64, 42, 21, +6, 1, 6, 1, 1, 1, 1, 1, 256, 64, 64, 64, 64, 42, 21, +6, 1, 6, 1, 1, 1, 1, 1, 384, 64, 64, 64, 64, 42, 21, +6, 2, 1, 1, 1, 1, 1, 1, 128, 65, 65, 65, 43,,, +6, 2, 1, 1, 1, 1, 1, 1, 256, 65, 65, 65, 43,,, +6, 2, 1, 1, 1, 1, 1, 1, 384, 65, 65, 65, 43,,, +6, 2, 1, 1, 2, 2, 1, 1, 128, 65, 65, 64, 43,,, +6, 2, 1, 1, 2, 2, 1, 1, 256, 65, 65, 64, 43,,, +6, 2, 1, 1, 2, 2, 1, 1, 384, 65, 65, 64, 43,,, +6, 2, 1, 1, 2, 2, 1, 2, 128, 65, 65, 64, 43,,, +6, 2, 1, 1, 2, 2, 1, 2, 256, 65, 65, 64, 43,,, +6, 2, 1, 1, 2, 2, 1, 2, 384, 65, 65, 64, 43,,, +6, 2, 1, 1, 2, 2, 2, 1, 128, 65, 65, 64, 43,,, +6, 2, 1, 1, 2, 2, 2, 1, 256, 65, 65, 64, 43,,, +6, 2, 1, 1, 2, 2, 2, 1, 384, 65, 65, 64, 43,,, +6, 2, 1, 1, 2, 2, 2, 2, 128, 65, 65, 64, 43,,, +6, 2, 1, 1, 2, 2, 2, 2, 256, 65, 65, 64, 43,,, +6, 2, 1, 1, 2, 2, 2, 2, 384, 65, 65, 64, 43,,, +6, 2, 1, 2, 1, 1, 1, 1, 128, 129, 129, 129, 85,,, +6, 2, 1, 2, 1, 1, 1, 1, 256, 129, 129, 129, 85,,, +6, 2, 1, 2, 1, 1, 1, 1, 384, 129, 129, 129, 85,,, +6, 2, 1, 2, 2, 2, 1, 1, 128, 129, 129, 127, 85,,, +6, 2, 1, 2, 2, 2, 1, 1, 256, 129, 129, 127, 85,,, +6, 2, 1, 2, 2, 2, 1, 1, 384, 129, 129, 127, 85,,, +6, 2, 1, 2, 2, 2, 1, 2, 128, 129, 129, 127, 85,,, +6, 2, 1, 2, 2, 2, 1, 2, 256, 129, 129, 127, 85,,, +6, 2, 1, 2, 2, 2, 1, 2, 384, 129, 129, 127, 85,,, +6, 2, 1, 2, 2, 2, 2, 1, 128, 129, 129, 127, 85,,, +6, 2, 1, 2, 2, 2, 2, 1, 256, 129, 129, 127, 85,,, +6, 2, 1, 2, 2, 2, 2, 1, 384, 129, 129, 127, 85,,, +6, 2, 1, 2, 2, 2, 2, 2, 128, 129, 129, 127, 85,,, +6, 2, 1, 2, 2, 2, 2, 2, 256, 129, 129, 127, 85,,, +6, 2, 1, 2, 2, 2, 2, 2, 384, 129, 129, 127, 85,,, +6, 2, 2, 1, 1, 1, 1, 1, 128, 65, 65, 65, 43,,, +6, 2, 2, 1, 1, 1, 1, 1, 256, 65, 65, 65, 43,,, +6, 2, 2, 1, 1, 1, 1, 1, 384, 65, 65, 65, 43,,, +6, 2, 2, 1, 2, 2, 1, 1, 128, 65, 65, 64, 43,,, +6, 2, 2, 1, 2, 2, 1, 1, 256, 65, 65, 64, 43,,, +6, 2, 2, 1, 2, 2, 1, 1, 384, 65, 65, 64, 43,,, +6, 2, 2, 1, 2, 2, 1, 2, 128, 65, 65, 64, 43,,, +6, 2, 2, 1, 2, 2, 1, 2, 256, 65, 65, 64, 43,,, +6, 2, 2, 1, 2, 2, 1, 2, 384, 65, 65, 64, 43,,, +6, 2, 2, 1, 2, 2, 2, 1, 128, 65, 65, 64, 43,,, +6, 2, 2, 1, 2, 2, 2, 1, 256, 65, 65, 64, 43,,, +6, 2, 2, 1, 2, 2, 2, 1, 384, 65, 65, 64, 43,,, +6, 2, 2, 1, 2, 2, 2, 2, 128, 65, 65, 64, 43,,, +6, 2, 2, 1, 2, 2, 2, 2, 256, 65, 65, 64, 43,,, +6, 2, 2, 1, 2, 2, 2, 2, 384, 65, 65, 64, 43,,, +6, 2, 2, 2, 1, 1, 1, 1, 128, 129, 129, 129, 85,,, +6, 2, 2, 2, 1, 1, 1, 1, 256, 129, 129, 129, 85,,, +6, 2, 2, 2, 1, 1, 1, 1, 384, 129, 129, 129, 85,,, +6, 2, 2, 2, 2, 2, 1, 1, 128, 129, 129, 127, 85,,, +6, 2, 2, 2, 2, 2, 1, 1, 256, 129, 129, 127, 85,,, +6, 2, 2, 2, 2, 2, 1, 1, 384, 129, 129, 127, 85,,, +6, 2, 2, 2, 2, 2, 1, 2, 128, 129, 129, 127, 85,,, +6, 2, 2, 2, 2, 2, 1, 2, 256, 129, 129, 127, 85,,, +6, 2, 2, 2, 2, 2, 1, 2, 384, 129, 129, 127, 85,,, +6, 2, 2, 2, 2, 2, 2, 1, 128, 129, 129, 127, 85,,, +6, 2, 2, 2, 2, 2, 2, 1, 256, 129, 129, 127, 85,,, +6, 2, 2, 2, 2, 2, 2, 1, 384, 129, 129, 127, 85,,, +6, 2, 2, 2, 2, 2, 2, 2, 128, 129, 129, 127, 85,,, +6, 2, 2, 2, 2, 2, 2, 2, 256, 129, 129, 127, 85,,, +6, 2, 2, 2, 2, 2, 2, 2, 384, 129, 129, 127, 85,,, +6, 2, 3, 1, 1, 1, 1, 1, 128, 65, 65, 65, 43,,, +6, 2, 3, 1, 1, 1, 1, 1, 256, 65, 65, 65, 43,,, +6, 2, 3, 1, 1, 1, 1, 1, 384, 65, 65, 65, 43,,, +6, 2, 3, 1, 2, 2, 1, 1, 128, 65, 65, 64, 43,,, +6, 2, 3, 1, 2, 2, 1, 1, 256, 65, 65, 64, 43,,, +6, 2, 3, 1, 2, 2, 1, 1, 384, 65, 65, 64, 43,,, +6, 2, 3, 1, 2, 2, 1, 2, 128, 65, 65, 64, 43,,, +6, 2, 3, 1, 2, 2, 1, 2, 256, 65, 65, 64, 43,,, +6, 2, 3, 1, 2, 2, 1, 2, 384, 65, 65, 64, 43,,, +6, 2, 3, 1, 2, 2, 2, 1, 128, 65, 65, 64, 43,,, +6, 2, 3, 1, 2, 2, 2, 1, 256, 65, 65, 64, 43,,, +6, 2, 3, 1, 2, 2, 2, 1, 384, 65, 65, 64, 43,,, +6, 2, 3, 1, 2, 2, 2, 2, 128, 65, 65, 64, 43,,, +6, 2, 3, 1, 2, 2, 2, 2, 256, 65, 65, 64, 43,,, +6, 2, 3, 1, 2, 2, 2, 2, 384, 65, 65, 64, 43,,, +6, 2, 3, 2, 1, 1, 1, 1, 128, 129, 129, 129, 85,,, +6, 2, 3, 2, 1, 1, 1, 1, 256, 129, 129, 129, 85,,, +6, 2, 3, 2, 1, 1, 1, 1, 384, 129, 129, 129, 85,,, +6, 2, 3, 2, 2, 2, 1, 1, 128, 129, 129, 127, 85,,, +6, 2, 3, 2, 2, 2, 1, 1, 256, 129, 129, 127, 85,,, +6, 2, 3, 2, 2, 2, 1, 1, 384, 129, 129, 127, 85,,, +6, 2, 3, 2, 2, 2, 1, 2, 128, 129, 129, 127, 85,,, +6, 2, 3, 2, 2, 2, 1, 2, 256, 129, 129, 127, 85,,, +6, 2, 3, 2, 2, 2, 1, 2, 384, 129, 129, 127, 85,,, +6, 2, 3, 2, 2, 2, 2, 1, 128, 129, 129, 127, 85,,, +6, 2, 3, 2, 2, 2, 2, 1, 256, 129, 129, 127, 85,,, +6, 2, 3, 2, 2, 2, 2, 1, 384, 129, 129, 127, 85,,, +6, 2, 3, 2, 2, 2, 2, 2, 128, 129, 129, 127, 85,,, +6, 2, 3, 2, 2, 2, 2, 2, 256, 129, 129, 127, 85,,, +6, 2, 3, 2, 2, 2, 2, 2, 384, 129, 129, 127, 85,,, +6, 2, 4, 1, 1, 1, 1, 1, 128, 65, 65, 65, 43,,, +6, 2, 4, 1, 1, 1, 1, 1, 256, 65, 65, 65, 43,,, +6, 2, 4, 1, 1, 1, 1, 1, 384, 65, 65, 65, 43,,, +6, 2, 4, 1, 2, 2, 1, 1, 128, 65, 65, 64, 43,,, +6, 2, 4, 1, 2, 2, 1, 1, 256, 65, 65, 64, 43,,, +6, 2, 4, 1, 2, 2, 1, 1, 384, 65, 65, 64, 43,,, +6, 2, 4, 1, 2, 2, 1, 2, 128, 65, 65, 64, 43,,, +6, 2, 4, 1, 2, 2, 1, 2, 256, 65, 65, 64, 43,,, +6, 2, 4, 1, 2, 2, 1, 2, 384, 65, 65, 64, 43,,, +6, 2, 4, 1, 2, 2, 2, 1, 128, 65, 65, 64, 43,,, +6, 2, 4, 1, 2, 2, 2, 1, 256, 65, 65, 64, 43,,, +6, 2, 4, 1, 2, 2, 2, 1, 384, 65, 65, 64, 43,,, +6, 2, 4, 1, 2, 2, 2, 2, 128, 65, 65, 64, 43,,, +6, 2, 4, 1, 2, 2, 2, 2, 256, 65, 65, 64, 43,,, +6, 2, 4, 1, 2, 2, 2, 2, 384, 65, 65, 64, 43,,, +6, 2, 4, 2, 1, 1, 1, 1, 128, 129, 129, 129, 85,,, +6, 2, 4, 2, 1, 1, 1, 1, 256, 129, 129, 129, 85,,, +6, 2, 4, 2, 1, 1, 1, 1, 384, 129, 129, 129, 85,,, +6, 2, 4, 2, 2, 2, 1, 1, 128, 129, 129, 127, 85,,, +6, 2, 4, 2, 2, 2, 1, 1, 256, 129, 129, 127, 85,,, +6, 2, 4, 2, 2, 2, 1, 1, 384, 129, 129, 127, 85,,, +6, 2, 4, 2, 2, 2, 1, 2, 128, 129, 129, 127, 85,,, +6, 2, 4, 2, 2, 2, 1, 2, 256, 129, 129, 127, 85,,, +6, 2, 4, 2, 2, 2, 1, 2, 384, 129, 129, 127, 85,,, +6, 2, 4, 2, 2, 2, 2, 1, 128, 129, 129, 127, 85,,, +6, 2, 4, 2, 2, 2, 2, 1, 256, 129, 129, 127, 85,,, +6, 2, 4, 2, 2, 2, 2, 1, 384, 129, 129, 127, 85,,, +6, 2, 4, 2, 2, 2, 2, 2, 128, 129, 129, 127, 85,,, +6, 2, 4, 2, 2, 2, 2, 2, 256, 129, 129, 127, 85,,, +6, 2, 4, 2, 2, 2, 2, 2, 384, 129, 129, 127, 85,,, +6, 2, 5, 1, 1, 1, 1, 1, 128, 65, 65, 65, 43,,, +6, 2, 5, 1, 1, 1, 1, 1, 256, 65, 65, 65, 43,,, +6, 2, 5, 1, 1, 1, 1, 1, 384, 65, 65, 65, 43,,, +6, 2, 5, 1, 2, 2, 1, 1, 128, 65, 65, 64, 43,,, +6, 2, 5, 1, 2, 2, 1, 1, 256, 65, 65, 64, 43,,, +6, 2, 5, 1, 2, 2, 1, 1, 384, 65, 65, 64, 43,,, +6, 2, 5, 1, 2, 2, 1, 2, 128, 65, 65, 64, 43,,, +6, 2, 5, 1, 2, 2, 1, 2, 256, 65, 65, 64, 43,,, +6, 2, 5, 1, 2, 2, 1, 2, 384, 65, 65, 64, 43,,, +6, 2, 5, 1, 2, 2, 2, 1, 128, 65, 65, 64, 43,,, +6, 2, 5, 1, 2, 2, 2, 1, 256, 65, 65, 64, 43,,, +6, 2, 5, 1, 2, 2, 2, 1, 384, 65, 65, 64, 43,,, +6, 2, 5, 1, 2, 2, 2, 2, 128, 65, 65, 64, 43,,, +6, 2, 5, 1, 2, 2, 2, 2, 256, 65, 65, 64, 43,,, +6, 2, 5, 1, 2, 2, 2, 2, 384, 65, 65, 64, 43,,, +6, 2, 5, 2, 1, 1, 1, 1, 128, 129, 129, 129, 85,,, +6, 2, 5, 2, 1, 1, 1, 1, 256, 129, 129, 129, 85,,, +6, 2, 5, 2, 1, 1, 1, 1, 384, 129, 129, 129, 85,,, +6, 2, 5, 2, 2, 2, 1, 1, 128, 129, 129, 127, 85,,, +6, 2, 5, 2, 2, 2, 1, 1, 256, 129, 129, 127, 85,,, +6, 2, 5, 2, 2, 2, 1, 1, 384, 129, 129, 127, 85,,, +6, 2, 5, 2, 2, 2, 1, 2, 128, 129, 129, 127, 85,,, +6, 2, 5, 2, 2, 2, 1, 2, 256, 129, 129, 127, 85,,, +6, 2, 5, 2, 2, 2, 1, 2, 384, 129, 129, 127, 85,,, +6, 2, 5, 2, 2, 2, 2, 1, 128, 129, 129, 127, 85,,, +6, 2, 5, 2, 2, 2, 2, 1, 256, 129, 129, 127, 85,,, +6, 2, 5, 2, 2, 2, 2, 1, 384, 129, 129, 127, 85,,, +6, 2, 5, 2, 2, 2, 2, 2, 128, 129, 129, 127, 85,,, +6, 2, 5, 2, 2, 2, 2, 2, 256, 129, 129, 127, 85,,, +6, 2, 5, 2, 2, 2, 2, 2, 384, 129, 129, 127, 85,,, +6, 2, 6, 1, 1, 1, 1, 1, 128, 65, 65, 65, 43,,, +6, 2, 6, 1, 1, 1, 1, 1, 256, 65, 65, 65, 43,,, +6, 2, 6, 1, 1, 1, 1, 1, 384, 65, 65, 65, 43,,, +6, 2, 6, 1, 2, 2, 1, 1, 128, 65, 65, 64, 43,,, +6, 2, 6, 1, 2, 2, 1, 1, 256, 65, 65, 64, 43,,, +6, 2, 6, 1, 2, 2, 1, 1, 384, 65, 65, 64, 43,,, +6, 2, 6, 1, 2, 2, 1, 2, 128, 65, 65, 64, 43,,, +6, 2, 6, 1, 2, 2, 1, 2, 256, 65, 65, 64, 43,,, +6, 2, 6, 1, 2, 2, 1, 2, 384, 65, 65, 64, 43,,, +6, 2, 6, 1, 2, 2, 2, 1, 128, 65, 65, 64, 43,,, +6, 2, 6, 1, 2, 2, 2, 1, 256, 65, 65, 64, 43,,, +6, 2, 6, 1, 2, 2, 2, 1, 384, 65, 65, 64, 43,,, +6, 2, 6, 1, 2, 2, 2, 2, 128, 65, 65, 64, 43,,, +6, 2, 6, 1, 2, 2, 2, 2, 256, 65, 65, 64, 43,,, +6, 2, 6, 1, 2, 2, 2, 2, 384, 65, 65, 64, 43,,, +6, 2, 6, 2, 1, 1, 1, 1, 128, 129, 129, 129, 85,,, +6, 2, 6, 2, 1, 1, 1, 1, 256, 129, 129, 129, 85,,, +6, 2, 6, 2, 1, 1, 1, 1, 384, 129, 129, 129, 85,,, +6, 2, 6, 2, 2, 2, 1, 1, 128, 129, 129, 127, 85,,, +6, 2, 6, 2, 2, 2, 1, 1, 256, 129, 129, 127, 85,,, +6, 2, 6, 2, 2, 2, 1, 1, 384, 129, 129, 127, 85,,, +6, 2, 6, 2, 2, 2, 1, 2, 128, 129, 129, 127, 85,,, +6, 2, 6, 2, 2, 2, 1, 2, 256, 129, 129, 127, 85,,, +6, 2, 6, 2, 2, 2, 1, 2, 384, 129, 129, 127, 85,,, +6, 2, 6, 2, 2, 2, 2, 1, 128, 129, 129, 127, 85,,, +6, 2, 6, 2, 2, 2, 2, 1, 256, 129, 129, 127, 85,,, +6, 2, 6, 2, 2, 2, 2, 1, 384, 129, 129, 127, 85,,, +6, 2, 6, 2, 2, 2, 2, 2, 128, 129, 129, 127, 85,,, +6, 2, 6, 2, 2, 2, 2, 2, 256, 129, 129, 127, 85,,, +6, 2, 6, 2, 2, 2, 2, 2, 384, 129, 129, 127, 85,,, +6, 3, 1, 1, 1, 1, 1, 1, 128, 66, 66, 44, 23,,, +6, 3, 1, 1, 1, 1, 1, 1, 256, 66, 66, 44, 23,,, +6, 3, 1, 1, 1, 1, 1, 1, 384, 66, 66, 44, 23,,, +6, 3, 1, 1, 2, 2, 1, 1, 128, 66, 66, 44, 23,,, +6, 3, 1, 1, 2, 2, 1, 1, 256, 66, 66, 44, 23,,, +6, 3, 1, 1, 2, 2, 1, 1, 384, 66, 66, 44, 23,,, +6, 3, 1, 1, 2, 2, 1, 2, 128, 66, 66, 44, 23,,, +6, 3, 1, 1, 2, 2, 1, 2, 256, 66, 66, 44, 23,,, +6, 3, 1, 1, 2, 2, 1, 2, 384, 66, 66, 44, 23,,, +6, 3, 1, 1, 2, 2, 2, 1, 128, 66, 66, 44, 23,,, +6, 3, 1, 1, 2, 2, 2, 1, 256, 66, 66, 44, 23,,, +6, 3, 1, 1, 2, 2, 2, 1, 384, 66, 66, 44, 23,,, +6, 3, 1, 1, 2, 2, 2, 2, 128, 66, 66, 44, 23,,, +6, 3, 1, 1, 2, 2, 2, 2, 256, 66, 66, 44, 23,,, +6, 3, 1, 1, 2, 2, 2, 2, 384, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 1, 1, 128, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 1, 1, 256, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 1, 1, 384, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 1, 2, 128, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 1, 2, 256, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 1, 2, 384, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 1, 3, 128, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 1, 3, 256, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 1, 3, 384, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 2, 1, 128, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 2, 1, 256, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 2, 1, 384, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 2, 2, 128, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 2, 2, 256, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 2, 2, 384, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 2, 3, 128, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 2, 3, 256, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 2, 3, 384, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 3, 1, 128, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 3, 1, 256, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 3, 1, 384, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 3, 2, 128, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 3, 2, 256, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 3, 2, 384, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 3, 3, 128, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 3, 3, 256, 66, 66, 44, 23,,, +6, 3, 1, 1, 3, 3, 3, 3, 384, 66, 66, 44, 23,,, +6, 3, 1, 2, 1, 1, 1, 1, 128, 130, 130, 86, 44,,, +6, 3, 1, 2, 1, 1, 1, 1, 256, 130, 130, 86, 44,,, +6, 3, 1, 2, 1, 1, 1, 1, 384, 130, 130, 86, 44,,, +6, 3, 1, 2, 2, 2, 1, 1, 128, 130, 130, 86, 44,,, +6, 3, 1, 2, 2, 2, 1, 1, 256, 130, 130, 86, 44,,, +6, 3, 1, 2, 2, 2, 1, 1, 384, 130, 130, 86, 44,,, +6, 3, 1, 2, 2, 2, 1, 2, 128, 130, 130, 86, 44,,, +6, 3, 1, 2, 2, 2, 1, 2, 256, 130, 130, 86, 44,,, +6, 3, 1, 2, 2, 2, 1, 2, 384, 130, 130, 86, 44,,, +6, 3, 1, 2, 2, 2, 2, 1, 128, 130, 130, 86, 44,,, +6, 3, 1, 2, 2, 2, 2, 1, 256, 130, 130, 86, 44,,, +6, 3, 1, 2, 2, 2, 2, 1, 384, 130, 130, 86, 44,,, +6, 3, 1, 2, 2, 2, 2, 2, 128, 130, 130, 86, 44,,, +6, 3, 1, 2, 2, 2, 2, 2, 256, 130, 130, 86, 44,,, +6, 3, 1, 2, 2, 2, 2, 2, 384, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 1, 1, 128, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 1, 1, 256, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 1, 1, 384, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 1, 2, 128, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 1, 2, 256, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 1, 2, 384, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 1, 3, 128, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 1, 3, 256, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 1, 3, 384, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 2, 1, 128, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 2, 1, 256, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 2, 1, 384, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 2, 2, 128, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 2, 2, 256, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 2, 2, 384, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 2, 3, 128, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 2, 3, 256, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 2, 3, 384, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 3, 1, 128, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 3, 1, 256, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 3, 1, 384, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 3, 2, 128, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 3, 2, 256, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 3, 2, 384, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 3, 3, 128, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 3, 3, 256, 130, 130, 86, 44,,, +6, 3, 1, 2, 3, 3, 3, 3, 384, 130, 130, 86, 44,,, +6, 3, 1, 3, 1, 1, 1, 1, 128, 194, 194, 128, 65,,, +6, 3, 1, 3, 1, 1, 1, 1, 256, 194, 194, 128, 65,,, +6, 3, 1, 3, 1, 1, 1, 1, 384, 194, 194, 128, 65,,, +6, 3, 1, 3, 2, 2, 1, 1, 128, 194, 194, 128, 65,,, +6, 3, 1, 3, 2, 2, 1, 1, 256, 194, 194, 128, 65,,, +6, 3, 1, 3, 2, 2, 1, 1, 384, 194, 194, 128, 65,,, +6, 3, 1, 3, 2, 2, 1, 2, 128, 194, 194, 128, 65,,, +6, 3, 1, 3, 2, 2, 1, 2, 256, 194, 194, 128, 65,,, +6, 3, 1, 3, 2, 2, 1, 2, 384, 194, 194, 128, 65,,, +6, 3, 1, 3, 2, 2, 2, 1, 128, 194, 194, 128, 65,,, +6, 3, 1, 3, 2, 2, 2, 1, 256, 194, 194, 128, 65,,, +6, 3, 1, 3, 2, 2, 2, 1, 384, 194, 194, 128, 65,,, +6, 3, 1, 3, 2, 2, 2, 2, 128, 194, 194, 128, 65,,, +6, 3, 1, 3, 2, 2, 2, 2, 256, 194, 194, 128, 65,,, +6, 3, 1, 3, 2, 2, 2, 2, 384, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 1, 1, 128, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 1, 1, 256, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 1, 1, 384, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 1, 2, 128, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 1, 2, 256, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 1, 2, 384, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 1, 3, 128, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 1, 3, 256, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 1, 3, 384, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 2, 1, 128, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 2, 1, 256, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 2, 1, 384, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 2, 2, 128, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 2, 2, 256, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 2, 2, 384, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 2, 3, 128, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 2, 3, 256, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 2, 3, 384, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 3, 1, 128, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 3, 1, 256, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 3, 1, 384, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 3, 2, 128, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 3, 2, 256, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 3, 2, 384, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 3, 3, 128, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 3, 3, 256, 194, 194, 128, 65,,, +6, 3, 1, 3, 3, 3, 3, 3, 384, 194, 194, 128, 65,,, +6, 3, 2, 1, 1, 1, 1, 1, 128, 66, 66, 44, 23,,, +6, 3, 2, 1, 1, 1, 1, 1, 256, 66, 66, 44, 23,,, +6, 3, 2, 1, 1, 1, 1, 1, 384, 66, 66, 44, 23,,, +6, 3, 2, 1, 2, 2, 1, 1, 128, 66, 66, 44, 23,,, +6, 3, 2, 1, 2, 2, 1, 1, 256, 66, 66, 44, 23,,, +6, 3, 2, 1, 2, 2, 1, 1, 384, 66, 66, 44, 23,,, +6, 3, 2, 1, 2, 2, 1, 2, 128, 66, 66, 44, 23,,, +6, 3, 2, 1, 2, 2, 1, 2, 256, 66, 66, 44, 23,,, +6, 3, 2, 1, 2, 2, 1, 2, 384, 66, 66, 44, 23,,, +6, 3, 2, 1, 2, 2, 2, 1, 128, 66, 66, 44, 23,,, +6, 3, 2, 1, 2, 2, 2, 1, 256, 66, 66, 44, 23,,, +6, 3, 2, 1, 2, 2, 2, 1, 384, 66, 66, 44, 23,,, +6, 3, 2, 1, 2, 2, 2, 2, 128, 66, 66, 44, 23,,, +6, 3, 2, 1, 2, 2, 2, 2, 256, 66, 66, 44, 23,,, +6, 3, 2, 1, 2, 2, 2, 2, 384, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 1, 1, 128, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 1, 1, 256, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 1, 1, 384, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 1, 2, 128, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 1, 2, 256, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 1, 2, 384, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 1, 3, 128, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 1, 3, 256, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 1, 3, 384, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 2, 1, 128, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 2, 1, 256, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 2, 1, 384, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 2, 2, 128, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 2, 2, 256, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 2, 2, 384, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 2, 3, 128, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 2, 3, 256, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 2, 3, 384, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 3, 1, 128, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 3, 1, 256, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 3, 1, 384, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 3, 2, 128, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 3, 2, 256, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 3, 2, 384, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 3, 3, 128, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 3, 3, 256, 66, 66, 44, 23,,, +6, 3, 2, 1, 3, 3, 3, 3, 384, 66, 66, 44, 23,,, +6, 3, 2, 2, 1, 1, 1, 1, 128, 130, 130, 86, 44,,, +6, 3, 2, 2, 1, 1, 1, 1, 256, 130, 130, 86, 44,,, +6, 3, 2, 2, 1, 1, 1, 1, 384, 130, 130, 86, 44,,, +6, 3, 2, 2, 2, 2, 1, 1, 128, 130, 130, 86, 44,,, +6, 3, 2, 2, 2, 2, 1, 1, 256, 130, 130, 86, 44,,, +6, 3, 2, 2, 2, 2, 1, 1, 384, 130, 130, 86, 44,,, +6, 3, 2, 2, 2, 2, 1, 2, 128, 130, 130, 86, 44,,, +6, 3, 2, 2, 2, 2, 1, 2, 256, 130, 130, 86, 44,,, +6, 3, 2, 2, 2, 2, 1, 2, 384, 130, 130, 86, 44,,, +6, 3, 2, 2, 2, 2, 2, 1, 128, 130, 130, 86, 44,,, +6, 3, 2, 2, 2, 2, 2, 1, 256, 130, 130, 86, 44,,, +6, 3, 2, 2, 2, 2, 2, 1, 384, 130, 130, 86, 44,,, +6, 3, 2, 2, 2, 2, 2, 2, 128, 130, 130, 86, 44,,, +6, 3, 2, 2, 2, 2, 2, 2, 256, 130, 130, 86, 44,,, +6, 3, 2, 2, 2, 2, 2, 2, 384, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 1, 1, 128, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 1, 1, 256, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 1, 1, 384, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 1, 2, 128, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 1, 2, 256, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 1, 2, 384, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 1, 3, 128, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 1, 3, 256, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 1, 3, 384, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 2, 1, 128, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 2, 1, 256, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 2, 1, 384, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 2, 2, 128, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 2, 2, 256, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 2, 2, 384, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 2, 3, 128, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 2, 3, 256, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 2, 3, 384, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 3, 1, 128, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 3, 1, 256, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 3, 1, 384, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 3, 2, 128, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 3, 2, 256, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 3, 2, 384, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 3, 3, 128, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 3, 3, 256, 130, 130, 86, 44,,, +6, 3, 2, 2, 3, 3, 3, 3, 384, 130, 130, 86, 44,,, +6, 3, 2, 3, 1, 1, 1, 1, 128, 194, 194, 128, 65,,, +6, 3, 2, 3, 1, 1, 1, 1, 256, 194, 194, 128, 65,,, +6, 3, 2, 3, 1, 1, 1, 1, 384, 194, 194, 128, 65,,, +6, 3, 2, 3, 2, 2, 1, 1, 128, 194, 194, 128, 65,,, +6, 3, 2, 3, 2, 2, 1, 1, 256, 194, 194, 128, 65,,, +6, 3, 2, 3, 2, 2, 1, 1, 384, 194, 194, 128, 65,,, +6, 3, 2, 3, 2, 2, 1, 2, 128, 194, 194, 128, 65,,, +6, 3, 2, 3, 2, 2, 1, 2, 256, 194, 194, 128, 65,,, +6, 3, 2, 3, 2, 2, 1, 2, 384, 194, 194, 128, 65,,, +6, 3, 2, 3, 2, 2, 2, 1, 128, 194, 194, 128, 65,,, +6, 3, 2, 3, 2, 2, 2, 1, 256, 194, 194, 128, 65,,, +6, 3, 2, 3, 2, 2, 2, 1, 384, 194, 194, 128, 65,,, +6, 3, 2, 3, 2, 2, 2, 2, 128, 194, 194, 128, 65,,, +6, 3, 2, 3, 2, 2, 2, 2, 256, 194, 194, 128, 65,,, +6, 3, 2, 3, 2, 2, 2, 2, 384, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 1, 1, 128, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 1, 1, 256, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 1, 1, 384, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 1, 2, 128, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 1, 2, 256, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 1, 2, 384, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 1, 3, 128, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 1, 3, 256, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 1, 3, 384, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 2, 1, 128, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 2, 1, 256, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 2, 1, 384, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 2, 2, 128, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 2, 2, 256, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 2, 2, 384, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 2, 3, 128, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 2, 3, 256, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 2, 3, 384, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 3, 1, 128, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 3, 1, 256, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 3, 1, 384, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 3, 2, 128, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 3, 2, 256, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 3, 2, 384, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 3, 3, 128, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 3, 3, 256, 194, 194, 128, 65,,, +6, 3, 2, 3, 3, 3, 3, 3, 384, 194, 194, 128, 65,,, +6, 3, 3, 1, 1, 1, 1, 1, 128, 66, 66, 44, 23,,, +6, 3, 3, 1, 1, 1, 1, 1, 256, 66, 66, 44, 23,,, +6, 3, 3, 1, 1, 1, 1, 1, 384, 66, 66, 44, 23,,, +6, 3, 3, 1, 2, 2, 1, 1, 128, 66, 66, 44, 23,,, +6, 3, 3, 1, 2, 2, 1, 1, 256, 66, 66, 44, 23,,, +6, 3, 3, 1, 2, 2, 1, 1, 384, 66, 66, 44, 23,,, +6, 3, 3, 1, 2, 2, 1, 2, 128, 66, 66, 44, 23,,, +6, 3, 3, 1, 2, 2, 1, 2, 256, 66, 66, 44, 23,,, +6, 3, 3, 1, 2, 2, 1, 2, 384, 66, 66, 44, 23,,, +6, 3, 3, 1, 2, 2, 2, 1, 128, 66, 66, 44, 23,,, +6, 3, 3, 1, 2, 2, 2, 1, 256, 66, 66, 44, 23,,, +6, 3, 3, 1, 2, 2, 2, 1, 384, 66, 66, 44, 23,,, +6, 3, 3, 1, 2, 2, 2, 2, 128, 66, 66, 44, 23,,, +6, 3, 3, 1, 2, 2, 2, 2, 256, 66, 66, 44, 23,,, +6, 3, 3, 1, 2, 2, 2, 2, 384, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 1, 1, 128, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 1, 1, 256, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 1, 1, 384, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 1, 2, 128, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 1, 2, 256, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 1, 2, 384, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 1, 3, 128, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 1, 3, 256, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 1, 3, 384, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 2, 1, 128, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 2, 1, 256, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 2, 1, 384, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 2, 2, 128, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 2, 2, 256, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 2, 2, 384, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 2, 3, 128, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 2, 3, 256, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 2, 3, 384, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 3, 1, 128, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 3, 1, 256, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 3, 1, 384, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 3, 2, 128, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 3, 2, 256, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 3, 2, 384, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 3, 3, 128, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 3, 3, 256, 66, 66, 44, 23,,, +6, 3, 3, 1, 3, 3, 3, 3, 384, 66, 66, 44, 23,,, +6, 3, 3, 2, 1, 1, 1, 1, 128, 130, 130, 86, 44,,, +6, 3, 3, 2, 1, 1, 1, 1, 256, 130, 130, 86, 44,,, +6, 3, 3, 2, 1, 1, 1, 1, 384, 130, 130, 86, 44,,, +6, 3, 3, 2, 2, 2, 1, 1, 128, 130, 130, 86, 44,,, +6, 3, 3, 2, 2, 2, 1, 1, 256, 130, 130, 86, 44,,, +6, 3, 3, 2, 2, 2, 1, 1, 384, 130, 130, 86, 44,,, +6, 3, 3, 2, 2, 2, 1, 2, 128, 130, 130, 86, 44,,, +6, 3, 3, 2, 2, 2, 1, 2, 256, 130, 130, 86, 44,,, +6, 3, 3, 2, 2, 2, 1, 2, 384, 130, 130, 86, 44,,, +6, 3, 3, 2, 2, 2, 2, 1, 128, 130, 130, 86, 44,,, +6, 3, 3, 2, 2, 2, 2, 1, 256, 130, 130, 86, 44,,, +6, 3, 3, 2, 2, 2, 2, 1, 384, 130, 130, 86, 44,,, +6, 3, 3, 2, 2, 2, 2, 2, 128, 130, 130, 86, 44,,, +6, 3, 3, 2, 2, 2, 2, 2, 256, 130, 130, 86, 44,,, +6, 3, 3, 2, 2, 2, 2, 2, 384, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 1, 1, 128, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 1, 1, 256, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 1, 1, 384, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 1, 2, 128, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 1, 2, 256, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 1, 2, 384, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 1, 3, 128, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 1, 3, 256, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 1, 3, 384, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 2, 1, 128, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 2, 1, 256, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 2, 1, 384, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 2, 2, 128, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 2, 2, 256, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 2, 2, 384, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 2, 3, 128, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 2, 3, 256, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 2, 3, 384, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 3, 1, 128, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 3, 1, 256, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 3, 1, 384, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 3, 2, 128, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 3, 2, 256, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 3, 2, 384, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 3, 3, 128, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 3, 3, 256, 130, 130, 86, 44,,, +6, 3, 3, 2, 3, 3, 3, 3, 384, 130, 130, 86, 44,,, +6, 3, 3, 3, 1, 1, 1, 1, 128, 194, 194, 128, 65,,, +6, 3, 3, 3, 1, 1, 1, 1, 256, 194, 194, 128, 65,,, +6, 3, 3, 3, 1, 1, 1, 1, 384, 194, 194, 128, 65,,, +6, 3, 3, 3, 2, 2, 1, 1, 128, 194, 194, 128, 65,,, +6, 3, 3, 3, 2, 2, 1, 1, 256, 194, 194, 128, 65,,, +6, 3, 3, 3, 2, 2, 1, 1, 384, 194, 194, 128, 65,,, +6, 3, 3, 3, 2, 2, 1, 2, 128, 194, 194, 128, 65,,, +6, 3, 3, 3, 2, 2, 1, 2, 256, 194, 194, 128, 65,,, +6, 3, 3, 3, 2, 2, 1, 2, 384, 194, 194, 128, 65,,, +6, 3, 3, 3, 2, 2, 2, 1, 128, 194, 194, 128, 65,,, +6, 3, 3, 3, 2, 2, 2, 1, 256, 194, 194, 128, 65,,, +6, 3, 3, 3, 2, 2, 2, 1, 384, 194, 194, 128, 65,,, +6, 3, 3, 3, 2, 2, 2, 2, 128, 194, 194, 128, 65,,, +6, 3, 3, 3, 2, 2, 2, 2, 256, 194, 194, 128, 65,,, +6, 3, 3, 3, 2, 2, 2, 2, 384, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 1, 1, 128, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 1, 1, 256, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 1, 1, 384, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 1, 2, 128, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 1, 2, 256, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 1, 2, 384, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 1, 3, 128, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 1, 3, 256, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 1, 3, 384, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 2, 1, 128, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 2, 1, 256, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 2, 1, 384, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 2, 2, 128, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 2, 2, 256, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 2, 2, 384, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 2, 3, 128, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 2, 3, 256, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 2, 3, 384, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 3, 1, 128, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 3, 1, 256, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 3, 1, 384, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 3, 2, 128, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 3, 2, 256, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 3, 2, 384, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 3, 3, 128, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 3, 3, 256, 194, 194, 128, 65,,, +6, 3, 3, 3, 3, 3, 3, 3, 384, 194, 194, 128, 65,,, +6, 3, 4, 1, 1, 1, 1, 1, 128, 66, 66, 44, 23,,, +6, 3, 4, 1, 1, 1, 1, 1, 256, 66, 66, 44, 23,,, +6, 3, 4, 1, 1, 1, 1, 1, 384, 66, 66, 44, 23,,, +6, 3, 4, 1, 2, 2, 1, 1, 128, 66, 66, 44, 23,,, +6, 3, 4, 1, 2, 2, 1, 1, 256, 66, 66, 44, 23,,, +6, 3, 4, 1, 2, 2, 1, 1, 384, 66, 66, 44, 23,,, +6, 3, 4, 1, 2, 2, 1, 2, 128, 66, 66, 44, 23,,, +6, 3, 4, 1, 2, 2, 1, 2, 256, 66, 66, 44, 23,,, +6, 3, 4, 1, 2, 2, 1, 2, 384, 66, 66, 44, 23,,, +6, 3, 4, 1, 2, 2, 2, 1, 128, 66, 66, 44, 23,,, +6, 3, 4, 1, 2, 2, 2, 1, 256, 66, 66, 44, 23,,, +6, 3, 4, 1, 2, 2, 2, 1, 384, 66, 66, 44, 23,,, +6, 3, 4, 1, 2, 2, 2, 2, 128, 66, 66, 44, 23,,, +6, 3, 4, 1, 2, 2, 2, 2, 256, 66, 66, 44, 23,,, +6, 3, 4, 1, 2, 2, 2, 2, 384, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 1, 1, 128, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 1, 1, 256, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 1, 1, 384, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 1, 2, 128, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 1, 2, 256, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 1, 2, 384, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 1, 3, 128, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 1, 3, 256, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 1, 3, 384, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 2, 1, 128, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 2, 1, 256, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 2, 1, 384, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 2, 2, 128, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 2, 2, 256, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 2, 2, 384, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 2, 3, 128, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 2, 3, 256, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 2, 3, 384, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 3, 1, 128, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 3, 1, 256, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 3, 1, 384, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 3, 2, 128, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 3, 2, 256, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 3, 2, 384, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 3, 3, 128, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 3, 3, 256, 66, 66, 44, 23,,, +6, 3, 4, 1, 3, 3, 3, 3, 384, 66, 66, 44, 23,,, +6, 3, 4, 2, 1, 1, 1, 1, 128, 130, 130, 86, 44,,, +6, 3, 4, 2, 1, 1, 1, 1, 256, 130, 130, 86, 44,,, +6, 3, 4, 2, 1, 1, 1, 1, 384, 130, 130, 86, 44,,, +6, 3, 4, 2, 2, 2, 1, 1, 128, 130, 130, 86, 44,,, +6, 3, 4, 2, 2, 2, 1, 1, 256, 130, 130, 86, 44,,, +6, 3, 4, 2, 2, 2, 1, 1, 384, 130, 130, 86, 44,,, +6, 3, 4, 2, 2, 2, 1, 2, 128, 130, 130, 86, 44,,, +6, 3, 4, 2, 2, 2, 1, 2, 256, 130, 130, 86, 44,,, +6, 3, 4, 2, 2, 2, 1, 2, 384, 130, 130, 86, 44,,, +6, 3, 4, 2, 2, 2, 2, 1, 128, 130, 130, 86, 44,,, +6, 3, 4, 2, 2, 2, 2, 1, 256, 130, 130, 86, 44,,, +6, 3, 4, 2, 2, 2, 2, 1, 384, 130, 130, 86, 44,,, +6, 3, 4, 2, 2, 2, 2, 2, 128, 130, 130, 86, 44,,, +6, 3, 4, 2, 2, 2, 2, 2, 256, 130, 130, 86, 44,,, +6, 3, 4, 2, 2, 2, 2, 2, 384, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 1, 1, 128, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 1, 1, 256, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 1, 1, 384, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 1, 2, 128, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 1, 2, 256, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 1, 2, 384, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 1, 3, 128, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 1, 3, 256, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 1, 3, 384, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 2, 1, 128, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 2, 1, 256, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 2, 1, 384, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 2, 2, 128, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 2, 2, 256, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 2, 2, 384, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 2, 3, 128, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 2, 3, 256, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 2, 3, 384, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 3, 1, 128, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 3, 1, 256, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 3, 1, 384, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 3, 2, 128, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 3, 2, 256, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 3, 2, 384, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 3, 3, 128, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 3, 3, 256, 130, 130, 86, 44,,, +6, 3, 4, 2, 3, 3, 3, 3, 384, 130, 130, 86, 44,,, +6, 3, 4, 3, 1, 1, 1, 1, 128, 194, 194, 128, 65,,, +6, 3, 4, 3, 1, 1, 1, 1, 256, 194, 194, 128, 65,,, +6, 3, 4, 3, 1, 1, 1, 1, 384, 194, 194, 128, 65,,, +6, 3, 4, 3, 2, 2, 1, 1, 128, 194, 194, 128, 65,,, +6, 3, 4, 3, 2, 2, 1, 1, 256, 194, 194, 128, 65,,, +6, 3, 4, 3, 2, 2, 1, 1, 384, 194, 194, 128, 65,,, +6, 3, 4, 3, 2, 2, 1, 2, 128, 194, 194, 128, 65,,, +6, 3, 4, 3, 2, 2, 1, 2, 256, 194, 194, 128, 65,,, +6, 3, 4, 3, 2, 2, 1, 2, 384, 194, 194, 128, 65,,, +6, 3, 4, 3, 2, 2, 2, 1, 128, 194, 194, 128, 65,,, +6, 3, 4, 3, 2, 2, 2, 1, 256, 194, 194, 128, 65,,, +6, 3, 4, 3, 2, 2, 2, 1, 384, 194, 194, 128, 65,,, +6, 3, 4, 3, 2, 2, 2, 2, 128, 194, 194, 128, 65,,, +6, 3, 4, 3, 2, 2, 2, 2, 256, 194, 194, 128, 65,,, +6, 3, 4, 3, 2, 2, 2, 2, 384, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 1, 1, 128, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 1, 1, 256, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 1, 1, 384, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 1, 2, 128, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 1, 2, 256, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 1, 2, 384, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 1, 3, 128, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 1, 3, 256, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 1, 3, 384, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 2, 1, 128, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 2, 1, 256, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 2, 1, 384, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 2, 2, 128, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 2, 2, 256, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 2, 2, 384, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 2, 3, 128, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 2, 3, 256, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 2, 3, 384, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 3, 1, 128, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 3, 1, 256, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 3, 1, 384, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 3, 2, 128, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 3, 2, 256, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 3, 2, 384, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 3, 3, 128, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 3, 3, 256, 194, 194, 128, 65,,, +6, 3, 4, 3, 3, 3, 3, 3, 384, 194, 194, 128, 65,,, +6, 3, 5, 1, 1, 1, 1, 1, 128, 66, 66, 44, 23,,, +6, 3, 5, 1, 1, 1, 1, 1, 256, 66, 66, 44, 23,,, +6, 3, 5, 1, 1, 1, 1, 1, 384, 66, 66, 44, 23,,, +6, 3, 5, 1, 2, 2, 1, 1, 128, 66, 66, 44, 23,,, +6, 3, 5, 1, 2, 2, 1, 1, 256, 66, 66, 44, 23,,, +6, 3, 5, 1, 2, 2, 1, 1, 384, 66, 66, 44, 23,,, +6, 3, 5, 1, 2, 2, 1, 2, 128, 66, 66, 44, 23,,, +6, 3, 5, 1, 2, 2, 1, 2, 256, 66, 66, 44, 23,,, +6, 3, 5, 1, 2, 2, 1, 2, 384, 66, 66, 44, 23,,, +6, 3, 5, 1, 2, 2, 2, 1, 128, 66, 66, 44, 23,,, +6, 3, 5, 1, 2, 2, 2, 1, 256, 66, 66, 44, 23,,, +6, 3, 5, 1, 2, 2, 2, 1, 384, 66, 66, 44, 23,,, +6, 3, 5, 1, 2, 2, 2, 2, 128, 66, 66, 44, 23,,, +6, 3, 5, 1, 2, 2, 2, 2, 256, 66, 66, 44, 23,,, +6, 3, 5, 1, 2, 2, 2, 2, 384, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 1, 1, 128, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 1, 1, 256, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 1, 1, 384, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 1, 2, 128, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 1, 2, 256, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 1, 2, 384, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 1, 3, 128, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 1, 3, 256, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 1, 3, 384, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 2, 1, 128, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 2, 1, 256, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 2, 1, 384, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 2, 2, 128, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 2, 2, 256, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 2, 2, 384, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 2, 3, 128, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 2, 3, 256, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 2, 3, 384, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 3, 1, 128, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 3, 1, 256, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 3, 1, 384, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 3, 2, 128, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 3, 2, 256, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 3, 2, 384, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 3, 3, 128, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 3, 3, 256, 66, 66, 44, 23,,, +6, 3, 5, 1, 3, 3, 3, 3, 384, 66, 66, 44, 23,,, +6, 3, 5, 2, 1, 1, 1, 1, 128, 130, 130, 86, 44,,, +6, 3, 5, 2, 1, 1, 1, 1, 256, 130, 130, 86, 44,,, +6, 3, 5, 2, 1, 1, 1, 1, 384, 130, 130, 86, 44,,, +6, 3, 5, 2, 2, 2, 1, 1, 128, 130, 130, 86, 44,,, +6, 3, 5, 2, 2, 2, 1, 1, 256, 130, 130, 86, 44,,, +6, 3, 5, 2, 2, 2, 1, 1, 384, 130, 130, 86, 44,,, +6, 3, 5, 2, 2, 2, 1, 2, 128, 130, 130, 86, 44,,, +6, 3, 5, 2, 2, 2, 1, 2, 256, 130, 130, 86, 44,,, +6, 3, 5, 2, 2, 2, 1, 2, 384, 130, 130, 86, 44,,, +6, 3, 5, 2, 2, 2, 2, 1, 128, 130, 130, 86, 44,,, +6, 3, 5, 2, 2, 2, 2, 1, 256, 130, 130, 86, 44,,, +6, 3, 5, 2, 2, 2, 2, 1, 384, 130, 130, 86, 44,,, +6, 3, 5, 2, 2, 2, 2, 2, 128, 130, 130, 86, 44,,, +6, 3, 5, 2, 2, 2, 2, 2, 256, 130, 130, 86, 44,,, +6, 3, 5, 2, 2, 2, 2, 2, 384, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 1, 1, 128, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 1, 1, 256, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 1, 1, 384, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 1, 2, 128, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 1, 2, 256, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 1, 2, 384, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 1, 3, 128, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 1, 3, 256, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 1, 3, 384, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 2, 1, 128, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 2, 1, 256, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 2, 1, 384, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 2, 2, 128, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 2, 2, 256, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 2, 2, 384, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 2, 3, 128, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 2, 3, 256, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 2, 3, 384, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 3, 1, 128, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 3, 1, 256, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 3, 1, 384, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 3, 2, 128, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 3, 2, 256, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 3, 2, 384, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 3, 3, 128, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 3, 3, 256, 130, 130, 86, 44,,, +6, 3, 5, 2, 3, 3, 3, 3, 384, 130, 130, 86, 44,,, +6, 3, 5, 3, 1, 1, 1, 1, 128, 194, 194, 128, 65,,, +6, 3, 5, 3, 1, 1, 1, 1, 256, 194, 194, 128, 65,,, +6, 3, 5, 3, 1, 1, 1, 1, 384, 194, 194, 128, 65,,, +6, 3, 5, 3, 2, 2, 1, 1, 128, 194, 194, 128, 65,,, +6, 3, 5, 3, 2, 2, 1, 1, 256, 194, 194, 128, 65,,, +6, 3, 5, 3, 2, 2, 1, 1, 384, 194, 194, 128, 65,,, +6, 3, 5, 3, 2, 2, 1, 2, 128, 194, 194, 128, 65,,, +6, 3, 5, 3, 2, 2, 1, 2, 256, 194, 194, 128, 65,,, +6, 3, 5, 3, 2, 2, 1, 2, 384, 194, 194, 128, 65,,, +6, 3, 5, 3, 2, 2, 2, 1, 128, 194, 194, 128, 65,,, +6, 3, 5, 3, 2, 2, 2, 1, 256, 194, 194, 128, 65,,, +6, 3, 5, 3, 2, 2, 2, 1, 384, 194, 194, 128, 65,,, +6, 3, 5, 3, 2, 2, 2, 2, 128, 194, 194, 128, 65,,, +6, 3, 5, 3, 2, 2, 2, 2, 256, 194, 194, 128, 65,,, +6, 3, 5, 3, 2, 2, 2, 2, 384, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 1, 1, 128, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 1, 1, 256, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 1, 1, 384, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 1, 2, 128, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 1, 2, 256, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 1, 2, 384, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 1, 3, 128, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 1, 3, 256, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 1, 3, 384, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 2, 1, 128, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 2, 1, 256, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 2, 1, 384, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 2, 2, 128, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 2, 2, 256, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 2, 2, 384, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 2, 3, 128, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 2, 3, 256, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 2, 3, 384, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 3, 1, 128, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 3, 1, 256, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 3, 1, 384, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 3, 2, 128, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 3, 2, 256, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 3, 2, 384, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 3, 3, 128, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 3, 3, 256, 194, 194, 128, 65,,, +6, 3, 5, 3, 3, 3, 3, 3, 384, 194, 194, 128, 65,,, +6, 3, 6, 1, 1, 1, 1, 1, 128, 66, 66, 44, 23,,, +6, 3, 6, 1, 1, 1, 1, 1, 256, 66, 66, 44, 23,,, +6, 3, 6, 1, 1, 1, 1, 1, 384, 66, 66, 44, 23,,, +6, 3, 6, 1, 2, 2, 1, 1, 128, 66, 66, 44, 23,,, +6, 3, 6, 1, 2, 2, 1, 1, 256, 66, 66, 44, 23,,, +6, 3, 6, 1, 2, 2, 1, 1, 384, 66, 66, 44, 23,,, +6, 3, 6, 1, 2, 2, 1, 2, 128, 66, 66, 44, 23,,, +6, 3, 6, 1, 2, 2, 1, 2, 256, 66, 66, 44, 23,,, +6, 3, 6, 1, 2, 2, 1, 2, 384, 66, 66, 44, 23,,, +6, 3, 6, 1, 2, 2, 2, 1, 128, 66, 66, 44, 23,,, +6, 3, 6, 1, 2, 2, 2, 1, 256, 66, 66, 44, 23,,, +6, 3, 6, 1, 2, 2, 2, 1, 384, 66, 66, 44, 23,,, +6, 3, 6, 1, 2, 2, 2, 2, 128, 66, 66, 44, 23,,, +6, 3, 6, 1, 2, 2, 2, 2, 256, 66, 66, 44, 23,,, +6, 3, 6, 1, 2, 2, 2, 2, 384, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 1, 1, 128, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 1, 1, 256, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 1, 1, 384, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 1, 2, 128, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 1, 2, 256, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 1, 2, 384, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 1, 3, 128, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 1, 3, 256, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 1, 3, 384, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 2, 1, 128, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 2, 1, 256, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 2, 1, 384, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 2, 2, 128, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 2, 2, 256, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 2, 2, 384, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 2, 3, 128, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 2, 3, 256, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 2, 3, 384, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 3, 1, 128, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 3, 1, 256, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 3, 1, 384, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 3, 2, 128, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 3, 2, 256, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 3, 2, 384, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 3, 3, 128, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 3, 3, 256, 66, 66, 44, 23,,, +6, 3, 6, 1, 3, 3, 3, 3, 384, 66, 66, 44, 23,,, +6, 3, 6, 2, 1, 1, 1, 1, 128, 130, 130, 86, 44,,, +6, 3, 6, 2, 1, 1, 1, 1, 256, 130, 130, 86, 44,,, +6, 3, 6, 2, 1, 1, 1, 1, 384, 130, 130, 86, 44,,, +6, 3, 6, 2, 2, 2, 1, 1, 128, 130, 130, 86, 44,,, +6, 3, 6, 2, 2, 2, 1, 1, 256, 130, 130, 86, 44,,, +6, 3, 6, 2, 2, 2, 1, 1, 384, 130, 130, 86, 44,,, +6, 3, 6, 2, 2, 2, 1, 2, 128, 130, 130, 86, 44,,, +6, 3, 6, 2, 2, 2, 1, 2, 256, 130, 130, 86, 44,,, +6, 3, 6, 2, 2, 2, 1, 2, 384, 130, 130, 86, 44,,, +6, 3, 6, 2, 2, 2, 2, 1, 128, 130, 130, 86, 44,,, +6, 3, 6, 2, 2, 2, 2, 1, 256, 130, 130, 86, 44,,, +6, 3, 6, 2, 2, 2, 2, 1, 384, 130, 130, 86, 44,,, +6, 3, 6, 2, 2, 2, 2, 2, 128, 130, 130, 86, 44,,, +6, 3, 6, 2, 2, 2, 2, 2, 256, 130, 130, 86, 44,,, +6, 3, 6, 2, 2, 2, 2, 2, 384, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 1, 1, 128, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 1, 1, 256, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 1, 1, 384, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 1, 2, 128, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 1, 2, 256, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 1, 2, 384, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 1, 3, 128, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 1, 3, 256, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 1, 3, 384, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 2, 1, 128, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 2, 1, 256, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 2, 1, 384, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 2, 2, 128, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 2, 2, 256, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 2, 2, 384, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 2, 3, 128, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 2, 3, 256, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 2, 3, 384, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 3, 1, 128, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 3, 1, 256, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 3, 1, 384, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 3, 2, 128, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 3, 2, 256, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 3, 2, 384, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 3, 3, 128, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 3, 3, 256, 130, 130, 86, 44,,, +6, 3, 6, 2, 3, 3, 3, 3, 384, 130, 130, 86, 44,,, +6, 3, 6, 3, 1, 1, 1, 1, 128, 194, 194, 128, 65,,, +6, 3, 6, 3, 1, 1, 1, 1, 256, 194, 194, 128, 65,,, +6, 3, 6, 3, 1, 1, 1, 1, 384, 194, 194, 128, 65,,, +6, 3, 6, 3, 2, 2, 1, 1, 128, 194, 194, 128, 65,,, +6, 3, 6, 3, 2, 2, 1, 1, 256, 194, 194, 128, 65,,, +6, 3, 6, 3, 2, 2, 1, 1, 384, 194, 194, 128, 65,,, +6, 3, 6, 3, 2, 2, 1, 2, 128, 194, 194, 128, 65,,, +6, 3, 6, 3, 2, 2, 1, 2, 256, 194, 194, 128, 65,,, +6, 3, 6, 3, 2, 2, 1, 2, 384, 194, 194, 128, 65,,, +6, 3, 6, 3, 2, 2, 2, 1, 128, 194, 194, 128, 65,,, +6, 3, 6, 3, 2, 2, 2, 1, 256, 194, 194, 128, 65,,, +6, 3, 6, 3, 2, 2, 2, 1, 384, 194, 194, 128, 65,,, +6, 3, 6, 3, 2, 2, 2, 2, 128, 194, 194, 128, 65,,, +6, 3, 6, 3, 2, 2, 2, 2, 256, 194, 194, 128, 65,,, +6, 3, 6, 3, 2, 2, 2, 2, 384, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 1, 1, 128, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 1, 1, 256, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 1, 1, 384, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 1, 2, 128, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 1, 2, 256, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 1, 2, 384, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 1, 3, 128, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 1, 3, 256, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 1, 3, 384, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 2, 1, 128, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 2, 1, 256, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 2, 1, 384, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 2, 2, 128, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 2, 2, 256, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 2, 2, 384, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 2, 3, 128, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 2, 3, 256, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 2, 3, 384, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 3, 1, 128, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 3, 1, 256, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 3, 1, 384, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 3, 2, 128, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 3, 2, 256, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 3, 2, 384, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 3, 3, 128, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 3, 3, 256, 194, 194, 128, 65,,, +6, 3, 6, 3, 3, 3, 3, 3, 384, 194, 194, 128, 65,,, +6, 4, 1, 1, 1, 1, 1, 1, 128, 67, 67, 45, 24,,, +6, 4, 1, 1, 1, 1, 1, 1, 256, 67, 67, 45, 24,,, +6, 4, 1, 1, 1, 1, 1, 1, 384, 67, 67, 45, 24,,, +6, 4, 1, 1, 2, 2, 1, 1, 128, 67, 66, 45, 22,,, +6, 4, 1, 1, 2, 2, 1, 1, 256, 67, 66, 45, 22,,, +6, 4, 1, 1, 2, 2, 1, 1, 384, 67, 66, 45, 22,,, +6, 4, 1, 1, 2, 2, 1, 2, 128, 67, 66, 45, 22,,, +6, 4, 1, 1, 2, 2, 1, 2, 256, 67, 66, 45, 22,,, +6, 4, 1, 1, 2, 2, 1, 2, 384, 67, 66, 45, 22,,, +6, 4, 1, 1, 2, 2, 2, 1, 128, 67, 66, 45, 22,,, +6, 4, 1, 1, 2, 2, 2, 1, 256, 67, 66, 45, 22,,, +6, 4, 1, 1, 2, 2, 2, 1, 384, 67, 66, 45, 22,,, +6, 4, 1, 1, 2, 2, 2, 2, 128, 67, 66, 45, 22,,, +6, 4, 1, 1, 2, 2, 2, 2, 256, 67, 66, 45, 22,,, +6, 4, 1, 1, 2, 2, 2, 2, 384, 67, 66, 45, 22,,, +6, 4, 1, 1, 3, 3, 1, 1, 128, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 1, 1, 256, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 1, 1, 384, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 1, 2, 128, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 1, 2, 256, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 1, 2, 384, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 1, 3, 128, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 1, 3, 256, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 1, 3, 384, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 2, 1, 128, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 2, 1, 256, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 2, 1, 384, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 2, 2, 128, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 2, 2, 256, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 2, 2, 384, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 2, 3, 128, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 2, 3, 256, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 2, 3, 384, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 3, 1, 128, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 3, 1, 256, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 3, 1, 384, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 3, 2, 128, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 3, 2, 256, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 3, 2, 384, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 3, 3, 128, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 3, 3, 256, 67, 64, 45, 21,,, +6, 4, 1, 1, 3, 3, 3, 3, 384, 67, 64, 45, 21,,, +6, 4, 1, 2, 1, 1, 1, 1, 128, 131, 131, 87, 45,,, +6, 4, 1, 2, 1, 1, 1, 1, 256, 131, 131, 87, 45,,, +6, 4, 1, 2, 1, 1, 1, 1, 384, 131, 131, 87, 45,,, +6, 4, 1, 2, 2, 2, 1, 1, 128, 131, 129, 87, 41,,, +6, 4, 1, 2, 2, 2, 1, 1, 256, 131, 129, 87, 41,,, +6, 4, 1, 2, 2, 2, 1, 1, 384, 131, 129, 87, 41,,, +6, 4, 1, 2, 2, 2, 1, 2, 128, 131, 129, 87, 41,,, +6, 4, 1, 2, 2, 2, 1, 2, 256, 131, 129, 87, 41,,, +6, 4, 1, 2, 2, 2, 1, 2, 384, 131, 129, 87, 41,,, +6, 4, 1, 2, 2, 2, 2, 1, 128, 131, 129, 87, 41,,, +6, 4, 1, 2, 2, 2, 2, 1, 256, 131, 129, 87, 41,,, +6, 4, 1, 2, 2, 2, 2, 1, 384, 131, 129, 87, 41,,, +6, 4, 1, 2, 2, 2, 2, 2, 128, 131, 129, 87, 41,,, +6, 4, 1, 2, 2, 2, 2, 2, 256, 131, 129, 87, 41,,, +6, 4, 1, 2, 2, 2, 2, 2, 384, 131, 129, 87, 41,,, +6, 4, 1, 2, 3, 3, 1, 1, 128, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 1, 1, 256, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 1, 1, 384, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 1, 2, 128, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 1, 2, 256, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 1, 2, 384, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 1, 3, 128, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 1, 3, 256, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 1, 3, 384, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 2, 1, 128, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 2, 1, 256, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 2, 1, 384, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 2, 2, 128, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 2, 2, 256, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 2, 2, 384, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 2, 3, 128, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 2, 3, 256, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 2, 3, 384, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 3, 1, 128, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 3, 1, 256, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 3, 1, 384, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 3, 2, 128, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 3, 2, 256, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 3, 2, 384, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 3, 3, 128, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 3, 3, 256, 131, 125, 87, 39,,, +6, 4, 1, 2, 3, 3, 3, 3, 384, 131, 125, 87, 39,,, +6, 4, 1, 3, 1, 1, 1, 1, 128, 195, 195, 129, 66,,, +6, 4, 1, 3, 1, 1, 1, 1, 256, 195, 195, 129, 66,,, +6, 4, 1, 3, 1, 1, 1, 1, 384, 195, 195, 129, 66,,, +6, 4, 1, 3, 2, 2, 1, 1, 128, 195, 192, 129, 60,,, +6, 4, 1, 3, 2, 2, 1, 1, 256, 195, 192, 129, 60,,, +6, 4, 1, 3, 2, 2, 1, 1, 384, 195, 192, 129, 60,,, +6, 4, 1, 3, 2, 2, 1, 2, 128, 195, 192, 129, 60,,, +6, 4, 1, 3, 2, 2, 1, 2, 256, 195, 192, 129, 60,,, +6, 4, 1, 3, 2, 2, 1, 2, 384, 195, 192, 129, 60,,, +6, 4, 1, 3, 2, 2, 2, 1, 128, 195, 192, 129, 60,,, +6, 4, 1, 3, 2, 2, 2, 1, 256, 195, 192, 129, 60,,, +6, 4, 1, 3, 2, 2, 2, 1, 384, 195, 192, 129, 60,,, +6, 4, 1, 3, 2, 2, 2, 2, 128, 195, 192, 129, 60,,, +6, 4, 1, 3, 2, 2, 2, 2, 256, 195, 192, 129, 60,,, +6, 4, 1, 3, 2, 2, 2, 2, 384, 195, 192, 129, 60,,, +6, 4, 1, 3, 3, 3, 1, 1, 128, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 1, 1, 256, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 1, 1, 384, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 1, 2, 128, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 1, 2, 256, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 1, 2, 384, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 1, 3, 128, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 1, 3, 256, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 1, 3, 384, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 2, 1, 128, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 2, 1, 256, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 2, 1, 384, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 2, 2, 128, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 2, 2, 256, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 2, 2, 384, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 2, 3, 128, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 2, 3, 256, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 2, 3, 384, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 3, 1, 128, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 3, 1, 256, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 3, 1, 384, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 3, 2, 128, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 3, 2, 256, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 3, 2, 384, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 3, 3, 128, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 3, 3, 256, 195, 186, 129, 57,,, +6, 4, 1, 3, 3, 3, 3, 3, 384, 195, 186, 129, 57,,, +6, 4, 1, 4, 1, 1, 1, 1, 128, 240, 240, 171, 87,,, +6, 4, 1, 4, 1, 1, 1, 1, 256, 240, 240, 171, 87,,, +6, 4, 1, 4, 1, 1, 1, 1, 384, 240, 240, 171, 87,,, +6, 4, 1, 4, 2, 2, 1, 1, 128, 240, 240, 171, 79,,, +6, 4, 1, 4, 2, 2, 1, 1, 256, 240, 240, 171, 79,,, +6, 4, 1, 4, 2, 2, 1, 1, 384, 240, 240, 171, 79,,, +6, 4, 1, 4, 2, 2, 1, 2, 128, 240, 240, 171, 79,,, +6, 4, 1, 4, 2, 2, 1, 2, 256, 240, 240, 171, 79,,, +6, 4, 1, 4, 2, 2, 1, 2, 384, 240, 240, 171, 79,,, +6, 4, 1, 4, 2, 2, 2, 1, 128, 240, 240, 171, 79,,, +6, 4, 1, 4, 2, 2, 2, 1, 256, 240, 240, 171, 79,,, +6, 4, 1, 4, 2, 2, 2, 1, 384, 240, 240, 171, 79,,, +6, 4, 1, 4, 2, 2, 2, 2, 128, 240, 240, 171, 79,,, +6, 4, 1, 4, 2, 2, 2, 2, 256, 240, 240, 171, 79,,, +6, 4, 1, 4, 2, 2, 2, 2, 384, 240, 240, 171, 79,,, +6, 4, 1, 4, 3, 3, 1, 1, 128, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 1, 1, 256, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 1, 1, 384, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 1, 2, 128, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 1, 2, 256, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 1, 2, 384, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 1, 3, 128, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 1, 3, 256, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 1, 3, 384, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 2, 1, 128, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 2, 1, 256, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 2, 1, 384, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 2, 2, 128, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 2, 2, 256, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 2, 2, 384, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 2, 3, 128, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 2, 3, 256, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 2, 3, 384, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 3, 1, 128, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 3, 1, 256, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 3, 1, 384, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 3, 2, 128, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 3, 2, 256, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 3, 2, 384, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 3, 3, 128, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 3, 3, 256, 240, 240, 171, 75,,, +6, 4, 1, 4, 3, 3, 3, 3, 384, 240, 240, 171, 75,,, +6, 4, 2, 1, 1, 1, 1, 1, 128, 67, 67, 45, 24,,, +6, 4, 2, 1, 1, 1, 1, 1, 256, 67, 67, 45, 24,,, +6, 4, 2, 1, 1, 1, 1, 1, 384, 67, 67, 45, 24,,, +6, 4, 2, 1, 2, 2, 1, 1, 128, 67, 66, 45, 22,,, +6, 4, 2, 1, 2, 2, 1, 1, 256, 67, 66, 45, 22,,, +6, 4, 2, 1, 2, 2, 1, 1, 384, 67, 66, 45, 22,,, +6, 4, 2, 1, 2, 2, 1, 2, 128, 67, 66, 45, 22,,, +6, 4, 2, 1, 2, 2, 1, 2, 256, 67, 66, 45, 22,,, +6, 4, 2, 1, 2, 2, 1, 2, 384, 67, 66, 45, 22,,, +6, 4, 2, 1, 2, 2, 2, 1, 128, 67, 66, 45, 22,,, +6, 4, 2, 1, 2, 2, 2, 1, 256, 67, 66, 45, 22,,, +6, 4, 2, 1, 2, 2, 2, 1, 384, 67, 66, 45, 22,,, +6, 4, 2, 1, 2, 2, 2, 2, 128, 67, 66, 45, 22,,, +6, 4, 2, 1, 2, 2, 2, 2, 256, 67, 66, 45, 22,,, +6, 4, 2, 1, 2, 2, 2, 2, 384, 67, 66, 45, 22,,, +6, 4, 2, 1, 3, 3, 1, 1, 128, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 1, 1, 256, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 1, 1, 384, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 1, 2, 128, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 1, 2, 256, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 1, 2, 384, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 1, 3, 128, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 1, 3, 256, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 1, 3, 384, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 2, 1, 128, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 2, 1, 256, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 2, 1, 384, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 2, 2, 128, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 2, 2, 256, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 2, 2, 384, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 2, 3, 128, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 2, 3, 256, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 2, 3, 384, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 3, 1, 128, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 3, 1, 256, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 3, 1, 384, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 3, 2, 128, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 3, 2, 256, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 3, 2, 384, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 3, 3, 128, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 3, 3, 256, 67, 64, 45, 21,,, +6, 4, 2, 1, 3, 3, 3, 3, 384, 67, 64, 45, 21,,, +6, 4, 2, 2, 1, 1, 1, 1, 128, 131, 131, 87, 45,,, +6, 4, 2, 2, 1, 1, 1, 1, 256, 131, 131, 87, 45,,, +6, 4, 2, 2, 1, 1, 1, 1, 384, 131, 131, 87, 45,,, +6, 4, 2, 2, 2, 2, 1, 1, 128, 131, 129, 87, 41,,, +6, 4, 2, 2, 2, 2, 1, 1, 256, 131, 129, 87, 41,,, +6, 4, 2, 2, 2, 2, 1, 1, 384, 131, 129, 87, 41,,, +6, 4, 2, 2, 2, 2, 1, 2, 128, 131, 129, 87, 41,,, +6, 4, 2, 2, 2, 2, 1, 2, 256, 131, 129, 87, 41,,, +6, 4, 2, 2, 2, 2, 1, 2, 384, 131, 129, 87, 41,,, +6, 4, 2, 2, 2, 2, 2, 1, 128, 131, 129, 87, 41,,, +6, 4, 2, 2, 2, 2, 2, 1, 256, 131, 129, 87, 41,,, +6, 4, 2, 2, 2, 2, 2, 1, 384, 131, 129, 87, 41,,, +6, 4, 2, 2, 2, 2, 2, 2, 128, 131, 129, 87, 41,,, +6, 4, 2, 2, 2, 2, 2, 2, 256, 131, 129, 87, 41,,, +6, 4, 2, 2, 2, 2, 2, 2, 384, 131, 129, 87, 41,,, +6, 4, 2, 2, 3, 3, 1, 1, 128, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 1, 1, 256, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 1, 1, 384, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 1, 2, 128, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 1, 2, 256, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 1, 2, 384, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 1, 3, 128, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 1, 3, 256, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 1, 3, 384, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 2, 1, 128, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 2, 1, 256, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 2, 1, 384, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 2, 2, 128, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 2, 2, 256, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 2, 2, 384, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 2, 3, 128, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 2, 3, 256, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 2, 3, 384, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 3, 1, 128, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 3, 1, 256, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 3, 1, 384, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 3, 2, 128, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 3, 2, 256, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 3, 2, 384, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 3, 3, 128, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 3, 3, 256, 131, 125, 87, 39,,, +6, 4, 2, 2, 3, 3, 3, 3, 384, 131, 125, 87, 39,,, +6, 4, 2, 3, 1, 1, 1, 1, 128, 195, 195, 129, 66,,, +6, 4, 2, 3, 1, 1, 1, 1, 256, 195, 195, 129, 66,,, +6, 4, 2, 3, 1, 1, 1, 1, 384, 195, 195, 129, 66,,, +6, 4, 2, 3, 2, 2, 1, 1, 128, 195, 192, 129, 60,,, +6, 4, 2, 3, 2, 2, 1, 1, 256, 195, 192, 129, 60,,, +6, 4, 2, 3, 2, 2, 1, 1, 384, 195, 192, 129, 60,,, +6, 4, 2, 3, 2, 2, 1, 2, 128, 195, 192, 129, 60,,, +6, 4, 2, 3, 2, 2, 1, 2, 256, 195, 192, 129, 60,,, +6, 4, 2, 3, 2, 2, 1, 2, 384, 195, 192, 129, 60,,, +6, 4, 2, 3, 2, 2, 2, 1, 128, 195, 192, 129, 60,,, +6, 4, 2, 3, 2, 2, 2, 1, 256, 195, 192, 129, 60,,, +6, 4, 2, 3, 2, 2, 2, 1, 384, 195, 192, 129, 60,,, +6, 4, 2, 3, 2, 2, 2, 2, 128, 195, 192, 129, 60,,, +6, 4, 2, 3, 2, 2, 2, 2, 256, 195, 192, 129, 60,,, +6, 4, 2, 3, 2, 2, 2, 2, 384, 195, 192, 129, 60,,, +6, 4, 2, 3, 3, 3, 1, 1, 128, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 1, 1, 256, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 1, 1, 384, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 1, 2, 128, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 1, 2, 256, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 1, 2, 384, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 1, 3, 128, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 1, 3, 256, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 1, 3, 384, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 2, 1, 128, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 2, 1, 256, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 2, 1, 384, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 2, 2, 128, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 2, 2, 256, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 2, 2, 384, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 2, 3, 128, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 2, 3, 256, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 2, 3, 384, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 3, 1, 128, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 3, 1, 256, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 3, 1, 384, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 3, 2, 128, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 3, 2, 256, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 3, 2, 384, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 3, 3, 128, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 3, 3, 256, 195, 186, 129, 57,,, +6, 4, 2, 3, 3, 3, 3, 3, 384, 195, 186, 129, 57,,, +6, 4, 2, 4, 1, 1, 1, 1, 128, 240, 240, 171, 87,,, +6, 4, 2, 4, 1, 1, 1, 1, 256, 240, 240, 171, 87,,, +6, 4, 2, 4, 1, 1, 1, 1, 384, 240, 240, 171, 87,,, +6, 4, 2, 4, 2, 2, 1, 1, 128, 240, 240, 171, 79,,, +6, 4, 2, 4, 2, 2, 1, 1, 256, 240, 240, 171, 79,,, +6, 4, 2, 4, 2, 2, 1, 1, 384, 240, 240, 171, 79,,, +6, 4, 2, 4, 2, 2, 1, 2, 128, 240, 240, 171, 79,,, +6, 4, 2, 4, 2, 2, 1, 2, 256, 240, 240, 171, 79,,, +6, 4, 2, 4, 2, 2, 1, 2, 384, 240, 240, 171, 79,,, +6, 4, 2, 4, 2, 2, 2, 1, 128, 240, 240, 171, 79,,, +6, 4, 2, 4, 2, 2, 2, 1, 256, 240, 240, 171, 79,,, +6, 4, 2, 4, 2, 2, 2, 1, 384, 240, 240, 171, 79,,, +6, 4, 2, 4, 2, 2, 2, 2, 128, 240, 240, 171, 79,,, +6, 4, 2, 4, 2, 2, 2, 2, 256, 240, 240, 171, 79,,, +6, 4, 2, 4, 2, 2, 2, 2, 384, 240, 240, 171, 79,,, +6, 4, 2, 4, 3, 3, 1, 1, 128, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 1, 1, 256, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 1, 1, 384, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 1, 2, 128, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 1, 2, 256, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 1, 2, 384, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 1, 3, 128, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 1, 3, 256, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 1, 3, 384, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 2, 1, 128, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 2, 1, 256, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 2, 1, 384, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 2, 2, 128, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 2, 2, 256, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 2, 2, 384, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 2, 3, 128, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 2, 3, 256, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 2, 3, 384, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 3, 1, 128, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 3, 1, 256, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 3, 1, 384, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 3, 2, 128, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 3, 2, 256, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 3, 2, 384, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 3, 3, 128, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 3, 3, 256, 240, 240, 171, 75,,, +6, 4, 2, 4, 3, 3, 3, 3, 384, 240, 240, 171, 75,,, +6, 4, 3, 1, 1, 1, 1, 1, 128, 67, 67, 45, 24,,, +6, 4, 3, 1, 1, 1, 1, 1, 256, 67, 67, 45, 24,,, +6, 4, 3, 1, 1, 1, 1, 1, 384, 67, 67, 45, 24,,, +6, 4, 3, 1, 2, 2, 1, 1, 128, 67, 66, 45, 22,,, +6, 4, 3, 1, 2, 2, 1, 1, 256, 67, 66, 45, 22,,, +6, 4, 3, 1, 2, 2, 1, 1, 384, 67, 66, 45, 22,,, +6, 4, 3, 1, 2, 2, 1, 2, 128, 67, 66, 45, 22,,, +6, 4, 3, 1, 2, 2, 1, 2, 256, 67, 66, 45, 22,,, +6, 4, 3, 1, 2, 2, 1, 2, 384, 67, 66, 45, 22,,, +6, 4, 3, 1, 2, 2, 2, 1, 128, 67, 66, 45, 22,,, +6, 4, 3, 1, 2, 2, 2, 1, 256, 67, 66, 45, 22,,, +6, 4, 3, 1, 2, 2, 2, 1, 384, 67, 66, 45, 22,,, +6, 4, 3, 1, 2, 2, 2, 2, 128, 67, 66, 45, 22,,, +6, 4, 3, 1, 2, 2, 2, 2, 256, 67, 66, 45, 22,,, +6, 4, 3, 1, 2, 2, 2, 2, 384, 67, 66, 45, 22,,, +6, 4, 3, 1, 3, 3, 1, 1, 128, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 1, 1, 256, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 1, 1, 384, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 1, 2, 128, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 1, 2, 256, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 1, 2, 384, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 1, 3, 128, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 1, 3, 256, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 1, 3, 384, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 2, 1, 128, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 2, 1, 256, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 2, 1, 384, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 2, 2, 128, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 2, 2, 256, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 2, 2, 384, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 2, 3, 128, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 2, 3, 256, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 2, 3, 384, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 3, 1, 128, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 3, 1, 256, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 3, 1, 384, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 3, 2, 128, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 3, 2, 256, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 3, 2, 384, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 3, 3, 128, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 3, 3, 256, 67, 64, 45, 21,,, +6, 4, 3, 1, 3, 3, 3, 3, 384, 67, 64, 45, 21,,, +6, 4, 3, 2, 1, 1, 1, 1, 128, 131, 131, 87, 45,,, +6, 4, 3, 2, 1, 1, 1, 1, 256, 131, 131, 87, 45,,, +6, 4, 3, 2, 1, 1, 1, 1, 384, 131, 131, 87, 45,,, +6, 4, 3, 2, 2, 2, 1, 1, 128, 131, 129, 87, 41,,, +6, 4, 3, 2, 2, 2, 1, 1, 256, 131, 129, 87, 41,,, +6, 4, 3, 2, 2, 2, 1, 1, 384, 131, 129, 87, 41,,, +6, 4, 3, 2, 2, 2, 1, 2, 128, 131, 129, 87, 41,,, +6, 4, 3, 2, 2, 2, 1, 2, 256, 131, 129, 87, 41,,, +6, 4, 3, 2, 2, 2, 1, 2, 384, 131, 129, 87, 41,,, +6, 4, 3, 2, 2, 2, 2, 1, 128, 131, 129, 87, 41,,, +6, 4, 3, 2, 2, 2, 2, 1, 256, 131, 129, 87, 41,,, +6, 4, 3, 2, 2, 2, 2, 1, 384, 131, 129, 87, 41,,, +6, 4, 3, 2, 2, 2, 2, 2, 128, 131, 129, 87, 41,,, +6, 4, 3, 2, 2, 2, 2, 2, 256, 131, 129, 87, 41,,, +6, 4, 3, 2, 2, 2, 2, 2, 384, 131, 129, 87, 41,,, +6, 4, 3, 2, 3, 3, 1, 1, 128, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 1, 1, 256, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 1, 1, 384, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 1, 2, 128, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 1, 2, 256, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 1, 2, 384, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 1, 3, 128, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 1, 3, 256, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 1, 3, 384, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 2, 1, 128, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 2, 1, 256, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 2, 1, 384, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 2, 2, 128, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 2, 2, 256, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 2, 2, 384, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 2, 3, 128, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 2, 3, 256, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 2, 3, 384, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 3, 1, 128, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 3, 1, 256, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 3, 1, 384, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 3, 2, 128, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 3, 2, 256, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 3, 2, 384, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 3, 3, 128, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 3, 3, 256, 131, 125, 87, 39,,, +6, 4, 3, 2, 3, 3, 3, 3, 384, 131, 125, 87, 39,,, +6, 4, 3, 3, 1, 1, 1, 1, 128, 195, 195, 129, 66,,, +6, 4, 3, 3, 1, 1, 1, 1, 256, 195, 195, 129, 66,,, +6, 4, 3, 3, 1, 1, 1, 1, 384, 195, 195, 129, 66,,, +6, 4, 3, 3, 2, 2, 1, 1, 128, 195, 192, 129, 60,,, +6, 4, 3, 3, 2, 2, 1, 1, 256, 195, 192, 129, 60,,, +6, 4, 3, 3, 2, 2, 1, 1, 384, 195, 192, 129, 60,,, +6, 4, 3, 3, 2, 2, 1, 2, 128, 195, 192, 129, 60,,, +6, 4, 3, 3, 2, 2, 1, 2, 256, 195, 192, 129, 60,,, +6, 4, 3, 3, 2, 2, 1, 2, 384, 195, 192, 129, 60,,, +6, 4, 3, 3, 2, 2, 2, 1, 128, 195, 192, 129, 60,,, +6, 4, 3, 3, 2, 2, 2, 1, 256, 195, 192, 129, 60,,, +6, 4, 3, 3, 2, 2, 2, 1, 384, 195, 192, 129, 60,,, +6, 4, 3, 3, 2, 2, 2, 2, 128, 195, 192, 129, 60,,, +6, 4, 3, 3, 2, 2, 2, 2, 256, 195, 192, 129, 60,,, +6, 4, 3, 3, 2, 2, 2, 2, 384, 195, 192, 129, 60,,, +6, 4, 3, 3, 3, 3, 1, 1, 128, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 1, 1, 256, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 1, 1, 384, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 1, 2, 128, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 1, 2, 256, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 1, 2, 384, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 1, 3, 128, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 1, 3, 256, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 1, 3, 384, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 2, 1, 128, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 2, 1, 256, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 2, 1, 384, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 2, 2, 128, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 2, 2, 256, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 2, 2, 384, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 2, 3, 128, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 2, 3, 256, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 2, 3, 384, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 3, 1, 128, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 3, 1, 256, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 3, 1, 384, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 3, 2, 128, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 3, 2, 256, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 3, 2, 384, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 3, 3, 128, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 3, 3, 256, 195, 186, 129, 57,,, +6, 4, 3, 3, 3, 3, 3, 3, 384, 195, 186, 129, 57,,, +6, 4, 3, 4, 1, 1, 1, 1, 128, 240, 240, 171, 87,,, +6, 4, 3, 4, 1, 1, 1, 1, 256, 240, 240, 171, 87,,, +6, 4, 3, 4, 1, 1, 1, 1, 384, 240, 240, 171, 87,,, +6, 4, 3, 4, 2, 2, 1, 1, 128, 240, 240, 171, 79,,, +6, 4, 3, 4, 2, 2, 1, 1, 256, 240, 240, 171, 79,,, +6, 4, 3, 4, 2, 2, 1, 1, 384, 240, 240, 171, 79,,, +6, 4, 3, 4, 2, 2, 1, 2, 128, 240, 240, 171, 79,,, +6, 4, 3, 4, 2, 2, 1, 2, 256, 240, 240, 171, 79,,, +6, 4, 3, 4, 2, 2, 1, 2, 384, 240, 240, 171, 79,,, +6, 4, 3, 4, 2, 2, 2, 1, 128, 240, 240, 171, 79,,, +6, 4, 3, 4, 2, 2, 2, 1, 256, 240, 240, 171, 79,,, +6, 4, 3, 4, 2, 2, 2, 1, 384, 240, 240, 171, 79,,, +6, 4, 3, 4, 2, 2, 2, 2, 128, 240, 240, 171, 79,,, +6, 4, 3, 4, 2, 2, 2, 2, 256, 240, 240, 171, 79,,, +6, 4, 3, 4, 2, 2, 2, 2, 384, 240, 240, 171, 79,,, +6, 4, 3, 4, 3, 3, 1, 1, 128, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 1, 1, 256, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 1, 1, 384, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 1, 2, 128, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 1, 2, 256, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 1, 2, 384, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 1, 3, 128, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 1, 3, 256, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 1, 3, 384, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 2, 1, 128, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 2, 1, 256, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 2, 1, 384, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 2, 2, 128, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 2, 2, 256, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 2, 2, 384, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 2, 3, 128, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 2, 3, 256, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 2, 3, 384, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 3, 1, 128, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 3, 1, 256, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 3, 1, 384, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 3, 2, 128, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 3, 2, 256, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 3, 2, 384, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 3, 3, 128, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 3, 3, 256, 240, 240, 171, 75,,, +6, 4, 3, 4, 3, 3, 3, 3, 384, 240, 240, 171, 75,,, +6, 4, 4, 1, 1, 1, 1, 1, 128, 67, 67, 45, 24,,, +6, 4, 4, 1, 1, 1, 1, 1, 256, 67, 67, 45, 24,,, +6, 4, 4, 1, 1, 1, 1, 1, 384, 67, 67, 45, 24,,, +6, 4, 4, 1, 2, 2, 1, 1, 128, 67, 66, 45, 22,,, +6, 4, 4, 1, 2, 2, 1, 1, 256, 67, 66, 45, 22,,, +6, 4, 4, 1, 2, 2, 1, 1, 384, 67, 66, 45, 22,,, +6, 4, 4, 1, 2, 2, 1, 2, 128, 67, 66, 45, 22,,, +6, 4, 4, 1, 2, 2, 1, 2, 256, 67, 66, 45, 22,,, +6, 4, 4, 1, 2, 2, 1, 2, 384, 67, 66, 45, 22,,, +6, 4, 4, 1, 2, 2, 2, 1, 128, 67, 66, 45, 22,,, +6, 4, 4, 1, 2, 2, 2, 1, 256, 67, 66, 45, 22,,, +6, 4, 4, 1, 2, 2, 2, 1, 384, 67, 66, 45, 22,,, +6, 4, 4, 1, 2, 2, 2, 2, 128, 67, 66, 45, 22,,, +6, 4, 4, 1, 2, 2, 2, 2, 256, 67, 66, 45, 22,,, +6, 4, 4, 1, 2, 2, 2, 2, 384, 67, 66, 45, 22,,, +6, 4, 4, 1, 3, 3, 1, 1, 128, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 1, 1, 256, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 1, 1, 384, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 1, 2, 128, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 1, 2, 256, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 1, 2, 384, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 1, 3, 128, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 1, 3, 256, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 1, 3, 384, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 2, 1, 128, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 2, 1, 256, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 2, 1, 384, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 2, 2, 128, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 2, 2, 256, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 2, 2, 384, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 2, 3, 128, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 2, 3, 256, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 2, 3, 384, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 3, 1, 128, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 3, 1, 256, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 3, 1, 384, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 3, 2, 128, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 3, 2, 256, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 3, 2, 384, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 3, 3, 128, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 3, 3, 256, 67, 64, 45, 21,,, +6, 4, 4, 1, 3, 3, 3, 3, 384, 67, 64, 45, 21,,, +6, 4, 4, 2, 1, 1, 1, 1, 128, 131, 131, 87, 45,,, +6, 4, 4, 2, 1, 1, 1, 1, 256, 131, 131, 87, 45,,, +6, 4, 4, 2, 1, 1, 1, 1, 384, 131, 131, 87, 45,,, +6, 4, 4, 2, 2, 2, 1, 1, 128, 131, 129, 87, 41,,, +6, 4, 4, 2, 2, 2, 1, 1, 256, 131, 129, 87, 41,,, +6, 4, 4, 2, 2, 2, 1, 1, 384, 131, 129, 87, 41,,, +6, 4, 4, 2, 2, 2, 1, 2, 128, 131, 129, 87, 41,,, +6, 4, 4, 2, 2, 2, 1, 2, 256, 131, 129, 87, 41,,, +6, 4, 4, 2, 2, 2, 1, 2, 384, 131, 129, 87, 41,,, +6, 4, 4, 2, 2, 2, 2, 1, 128, 131, 129, 87, 41,,, +6, 4, 4, 2, 2, 2, 2, 1, 256, 131, 129, 87, 41,,, +6, 4, 4, 2, 2, 2, 2, 1, 384, 131, 129, 87, 41,,, +6, 4, 4, 2, 2, 2, 2, 2, 128, 131, 129, 87, 41,,, +6, 4, 4, 2, 2, 2, 2, 2, 256, 131, 129, 87, 41,,, +6, 4, 4, 2, 2, 2, 2, 2, 384, 131, 129, 87, 41,,, +6, 4, 4, 2, 3, 3, 1, 1, 128, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 1, 1, 256, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 1, 1, 384, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 1, 2, 128, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 1, 2, 256, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 1, 2, 384, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 1, 3, 128, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 1, 3, 256, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 1, 3, 384, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 2, 1, 128, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 2, 1, 256, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 2, 1, 384, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 2, 2, 128, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 2, 2, 256, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 2, 2, 384, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 2, 3, 128, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 2, 3, 256, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 2, 3, 384, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 3, 1, 128, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 3, 1, 256, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 3, 1, 384, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 3, 2, 128, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 3, 2, 256, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 3, 2, 384, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 3, 3, 128, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 3, 3, 256, 131, 125, 87, 39,,, +6, 4, 4, 2, 3, 3, 3, 3, 384, 131, 125, 87, 39,,, +6, 4, 4, 3, 1, 1, 1, 1, 128, 195, 195, 129, 66,,, +6, 4, 4, 3, 1, 1, 1, 1, 256, 195, 195, 129, 66,,, +6, 4, 4, 3, 1, 1, 1, 1, 384, 195, 195, 129, 66,,, +6, 4, 4, 3, 2, 2, 1, 1, 128, 195, 192, 129, 60,,, +6, 4, 4, 3, 2, 2, 1, 1, 256, 195, 192, 129, 60,,, +6, 4, 4, 3, 2, 2, 1, 1, 384, 195, 192, 129, 60,,, +6, 4, 4, 3, 2, 2, 1, 2, 128, 195, 192, 129, 60,,, +6, 4, 4, 3, 2, 2, 1, 2, 256, 195, 192, 129, 60,,, +6, 4, 4, 3, 2, 2, 1, 2, 384, 195, 192, 129, 60,,, +6, 4, 4, 3, 2, 2, 2, 1, 128, 195, 192, 129, 60,,, +6, 4, 4, 3, 2, 2, 2, 1, 256, 195, 192, 129, 60,,, +6, 4, 4, 3, 2, 2, 2, 1, 384, 195, 192, 129, 60,,, +6, 4, 4, 3, 2, 2, 2, 2, 128, 195, 192, 129, 60,,, +6, 4, 4, 3, 2, 2, 2, 2, 256, 195, 192, 129, 60,,, +6, 4, 4, 3, 2, 2, 2, 2, 384, 195, 192, 129, 60,,, +6, 4, 4, 3, 3, 3, 1, 1, 128, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 1, 1, 256, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 1, 1, 384, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 1, 2, 128, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 1, 2, 256, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 1, 2, 384, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 1, 3, 128, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 1, 3, 256, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 1, 3, 384, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 2, 1, 128, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 2, 1, 256, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 2, 1, 384, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 2, 2, 128, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 2, 2, 256, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 2, 2, 384, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 2, 3, 128, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 2, 3, 256, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 2, 3, 384, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 3, 1, 128, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 3, 1, 256, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 3, 1, 384, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 3, 2, 128, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 3, 2, 256, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 3, 2, 384, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 3, 3, 128, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 3, 3, 256, 195, 186, 129, 57,,, +6, 4, 4, 3, 3, 3, 3, 3, 384, 195, 186, 129, 57,,, +6, 4, 4, 4, 1, 1, 1, 1, 128, 240, 240, 171, 87,,, +6, 4, 4, 4, 1, 1, 1, 1, 256, 240, 240, 171, 87,,, +6, 4, 4, 4, 1, 1, 1, 1, 384, 240, 240, 171, 87,,, +6, 4, 4, 4, 2, 2, 1, 1, 128, 240, 240, 171, 79,,, +6, 4, 4, 4, 2, 2, 1, 1, 256, 240, 240, 171, 79,,, +6, 4, 4, 4, 2, 2, 1, 1, 384, 240, 240, 171, 79,,, +6, 4, 4, 4, 2, 2, 1, 2, 128, 240, 240, 171, 79,,, +6, 4, 4, 4, 2, 2, 1, 2, 256, 240, 240, 171, 79,,, +6, 4, 4, 4, 2, 2, 1, 2, 384, 240, 240, 171, 79,,, +6, 4, 4, 4, 2, 2, 2, 1, 128, 240, 240, 171, 79,,, +6, 4, 4, 4, 2, 2, 2, 1, 256, 240, 240, 171, 79,,, +6, 4, 4, 4, 2, 2, 2, 1, 384, 240, 240, 171, 79,,, +6, 4, 4, 4, 2, 2, 2, 2, 128, 240, 240, 171, 79,,, +6, 4, 4, 4, 2, 2, 2, 2, 256, 240, 240, 171, 79,,, +6, 4, 4, 4, 2, 2, 2, 2, 384, 240, 240, 171, 79,,, +6, 4, 4, 4, 3, 3, 1, 1, 128, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 1, 1, 256, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 1, 1, 384, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 1, 2, 128, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 1, 2, 256, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 1, 2, 384, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 1, 3, 128, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 1, 3, 256, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 1, 3, 384, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 2, 1, 128, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 2, 1, 256, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 2, 1, 384, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 2, 2, 128, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 2, 2, 256, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 2, 2, 384, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 2, 3, 128, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 2, 3, 256, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 2, 3, 384, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 3, 1, 128, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 3, 1, 256, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 3, 1, 384, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 3, 2, 128, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 3, 2, 256, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 3, 2, 384, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 3, 3, 128, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 3, 3, 256, 240, 240, 171, 75,,, +6, 4, 4, 4, 3, 3, 3, 3, 384, 240, 240, 171, 75,,, +6, 4, 5, 1, 1, 1, 1, 1, 128, 67, 67, 45, 24,,, +6, 4, 5, 1, 1, 1, 1, 1, 256, 67, 67, 45, 24,,, +6, 4, 5, 1, 1, 1, 1, 1, 384, 67, 67, 45, 24,,, +6, 4, 5, 1, 2, 2, 1, 1, 128, 67, 66, 45, 22,,, +6, 4, 5, 1, 2, 2, 1, 1, 256, 67, 66, 45, 22,,, +6, 4, 5, 1, 2, 2, 1, 1, 384, 67, 66, 45, 22,,, +6, 4, 5, 1, 2, 2, 1, 2, 128, 67, 66, 45, 22,,, +6, 4, 5, 1, 2, 2, 1, 2, 256, 67, 66, 45, 22,,, +6, 4, 5, 1, 2, 2, 1, 2, 384, 67, 66, 45, 22,,, +6, 4, 5, 1, 2, 2, 2, 1, 128, 67, 66, 45, 22,,, +6, 4, 5, 1, 2, 2, 2, 1, 256, 67, 66, 45, 22,,, +6, 4, 5, 1, 2, 2, 2, 1, 384, 67, 66, 45, 22,,, +6, 4, 5, 1, 2, 2, 2, 2, 128, 67, 66, 45, 22,,, +6, 4, 5, 1, 2, 2, 2, 2, 256, 67, 66, 45, 22,,, +6, 4, 5, 1, 2, 2, 2, 2, 384, 67, 66, 45, 22,,, +6, 4, 5, 1, 3, 3, 1, 1, 128, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 1, 1, 256, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 1, 1, 384, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 1, 2, 128, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 1, 2, 256, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 1, 2, 384, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 1, 3, 128, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 1, 3, 256, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 1, 3, 384, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 2, 1, 128, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 2, 1, 256, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 2, 1, 384, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 2, 2, 128, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 2, 2, 256, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 2, 2, 384, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 2, 3, 128, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 2, 3, 256, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 2, 3, 384, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 3, 1, 128, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 3, 1, 256, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 3, 1, 384, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 3, 2, 128, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 3, 2, 256, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 3, 2, 384, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 3, 3, 128, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 3, 3, 256, 67, 64, 45, 21,,, +6, 4, 5, 1, 3, 3, 3, 3, 384, 67, 64, 45, 21,,, +6, 4, 5, 2, 1, 1, 1, 1, 128, 131, 131, 87, 45,,, +6, 4, 5, 2, 1, 1, 1, 1, 256, 131, 131, 87, 45,,, +6, 4, 5, 2, 1, 1, 1, 1, 384, 131, 131, 87, 45,,, +6, 4, 5, 2, 2, 2, 1, 1, 128, 131, 129, 87, 41,,, +6, 4, 5, 2, 2, 2, 1, 1, 256, 131, 129, 87, 41,,, +6, 4, 5, 2, 2, 2, 1, 1, 384, 131, 129, 87, 41,,, +6, 4, 5, 2, 2, 2, 1, 2, 128, 131, 129, 87, 41,,, +6, 4, 5, 2, 2, 2, 1, 2, 256, 131, 129, 87, 41,,, +6, 4, 5, 2, 2, 2, 1, 2, 384, 131, 129, 87, 41,,, +6, 4, 5, 2, 2, 2, 2, 1, 128, 131, 129, 87, 41,,, +6, 4, 5, 2, 2, 2, 2, 1, 256, 131, 129, 87, 41,,, +6, 4, 5, 2, 2, 2, 2, 1, 384, 131, 129, 87, 41,,, +6, 4, 5, 2, 2, 2, 2, 2, 128, 131, 129, 87, 41,,, +6, 4, 5, 2, 2, 2, 2, 2, 256, 131, 129, 87, 41,,, +6, 4, 5, 2, 2, 2, 2, 2, 384, 131, 129, 87, 41,,, +6, 4, 5, 2, 3, 3, 1, 1, 128, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 1, 1, 256, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 1, 1, 384, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 1, 2, 128, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 1, 2, 256, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 1, 2, 384, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 1, 3, 128, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 1, 3, 256, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 1, 3, 384, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 2, 1, 128, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 2, 1, 256, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 2, 1, 384, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 2, 2, 128, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 2, 2, 256, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 2, 2, 384, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 2, 3, 128, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 2, 3, 256, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 2, 3, 384, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 3, 1, 128, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 3, 1, 256, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 3, 1, 384, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 3, 2, 128, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 3, 2, 256, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 3, 2, 384, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 3, 3, 128, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 3, 3, 256, 131, 125, 87, 39,,, +6, 4, 5, 2, 3, 3, 3, 3, 384, 131, 125, 87, 39,,, +6, 4, 5, 3, 1, 1, 1, 1, 128, 195, 195, 129, 66,,, +6, 4, 5, 3, 1, 1, 1, 1, 256, 195, 195, 129, 66,,, +6, 4, 5, 3, 1, 1, 1, 1, 384, 195, 195, 129, 66,,, +6, 4, 5, 3, 2, 2, 1, 1, 128, 195, 192, 129, 60,,, +6, 4, 5, 3, 2, 2, 1, 1, 256, 195, 192, 129, 60,,, +6, 4, 5, 3, 2, 2, 1, 1, 384, 195, 192, 129, 60,,, +6, 4, 5, 3, 2, 2, 1, 2, 128, 195, 192, 129, 60,,, +6, 4, 5, 3, 2, 2, 1, 2, 256, 195, 192, 129, 60,,, +6, 4, 5, 3, 2, 2, 1, 2, 384, 195, 192, 129, 60,,, +6, 4, 5, 3, 2, 2, 2, 1, 128, 195, 192, 129, 60,,, +6, 4, 5, 3, 2, 2, 2, 1, 256, 195, 192, 129, 60,,, +6, 4, 5, 3, 2, 2, 2, 1, 384, 195, 192, 129, 60,,, +6, 4, 5, 3, 2, 2, 2, 2, 128, 195, 192, 129, 60,,, +6, 4, 5, 3, 2, 2, 2, 2, 256, 195, 192, 129, 60,,, +6, 4, 5, 3, 2, 2, 2, 2, 384, 195, 192, 129, 60,,, +6, 4, 5, 3, 3, 3, 1, 1, 128, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 1, 1, 256, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 1, 1, 384, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 1, 2, 128, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 1, 2, 256, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 1, 2, 384, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 1, 3, 128, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 1, 3, 256, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 1, 3, 384, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 2, 1, 128, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 2, 1, 256, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 2, 1, 384, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 2, 2, 128, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 2, 2, 256, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 2, 2, 384, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 2, 3, 128, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 2, 3, 256, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 2, 3, 384, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 3, 1, 128, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 3, 1, 256, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 3, 1, 384, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 3, 2, 128, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 3, 2, 256, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 3, 2, 384, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 3, 3, 128, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 3, 3, 256, 195, 186, 129, 57,,, +6, 4, 5, 3, 3, 3, 3, 3, 384, 195, 186, 129, 57,,, +6, 4, 5, 4, 1, 1, 1, 1, 128, 240, 240, 171, 87,,, +6, 4, 5, 4, 1, 1, 1, 1, 256, 240, 240, 171, 87,,, +6, 4, 5, 4, 1, 1, 1, 1, 384, 240, 240, 171, 87,,, +6, 4, 5, 4, 2, 2, 1, 1, 128, 240, 240, 171, 79,,, +6, 4, 5, 4, 2, 2, 1, 1, 256, 240, 240, 171, 79,,, +6, 4, 5, 4, 2, 2, 1, 1, 384, 240, 240, 171, 79,,, +6, 4, 5, 4, 2, 2, 1, 2, 128, 240, 240, 171, 79,,, +6, 4, 5, 4, 2, 2, 1, 2, 256, 240, 240, 171, 79,,, +6, 4, 5, 4, 2, 2, 1, 2, 384, 240, 240, 171, 79,,, +6, 4, 5, 4, 2, 2, 2, 1, 128, 240, 240, 171, 79,,, +6, 4, 5, 4, 2, 2, 2, 1, 256, 240, 240, 171, 79,,, +6, 4, 5, 4, 2, 2, 2, 1, 384, 240, 240, 171, 79,,, +6, 4, 5, 4, 2, 2, 2, 2, 128, 240, 240, 171, 79,,, +6, 4, 5, 4, 2, 2, 2, 2, 256, 240, 240, 171, 79,,, +6, 4, 5, 4, 2, 2, 2, 2, 384, 240, 240, 171, 79,,, +6, 4, 5, 4, 3, 3, 1, 1, 128, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 1, 1, 256, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 1, 1, 384, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 1, 2, 128, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 1, 2, 256, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 1, 2, 384, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 1, 3, 128, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 1, 3, 256, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 1, 3, 384, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 2, 1, 128, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 2, 1, 256, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 2, 1, 384, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 2, 2, 128, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 2, 2, 256, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 2, 2, 384, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 2, 3, 128, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 2, 3, 256, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 2, 3, 384, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 3, 1, 128, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 3, 1, 256, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 3, 1, 384, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 3, 2, 128, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 3, 2, 256, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 3, 2, 384, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 3, 3, 128, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 3, 3, 256, 240, 240, 171, 75,,, +6, 4, 5, 4, 3, 3, 3, 3, 384, 240, 240, 171, 75,,, +6, 4, 6, 1, 1, 1, 1, 1, 128, 67, 67, 45, 24,,, +6, 4, 6, 1, 1, 1, 1, 1, 256, 67, 67, 45, 24,,, +6, 4, 6, 1, 1, 1, 1, 1, 384, 67, 67, 45, 24,,, +6, 4, 6, 1, 2, 2, 1, 1, 128, 67, 66, 45, 22,,, +6, 4, 6, 1, 2, 2, 1, 1, 256, 67, 66, 45, 22,,, +6, 4, 6, 1, 2, 2, 1, 1, 384, 67, 66, 45, 22,,, +6, 4, 6, 1, 2, 2, 1, 2, 128, 67, 66, 45, 22,,, +6, 4, 6, 1, 2, 2, 1, 2, 256, 67, 66, 45, 22,,, +6, 4, 6, 1, 2, 2, 1, 2, 384, 67, 66, 45, 22,,, +6, 4, 6, 1, 2, 2, 2, 1, 128, 67, 66, 45, 22,,, +6, 4, 6, 1, 2, 2, 2, 1, 256, 67, 66, 45, 22,,, +6, 4, 6, 1, 2, 2, 2, 1, 384, 67, 66, 45, 22,,, +6, 4, 6, 1, 2, 2, 2, 2, 128, 67, 66, 45, 22,,, +6, 4, 6, 1, 2, 2, 2, 2, 256, 67, 66, 45, 22,,, +6, 4, 6, 1, 2, 2, 2, 2, 384, 67, 66, 45, 22,,, +6, 4, 6, 1, 3, 3, 1, 1, 128, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 1, 1, 256, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 1, 1, 384, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 1, 2, 128, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 1, 2, 256, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 1, 2, 384, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 1, 3, 128, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 1, 3, 256, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 1, 3, 384, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 2, 1, 128, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 2, 1, 256, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 2, 1, 384, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 2, 2, 128, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 2, 2, 256, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 2, 2, 384, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 2, 3, 128, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 2, 3, 256, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 2, 3, 384, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 3, 1, 128, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 3, 1, 256, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 3, 1, 384, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 3, 2, 128, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 3, 2, 256, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 3, 2, 384, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 3, 3, 128, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 3, 3, 256, 67, 64, 45, 21,,, +6, 4, 6, 1, 3, 3, 3, 3, 384, 67, 64, 45, 21,,, +6, 4, 6, 2, 1, 1, 1, 1, 128, 131, 131, 87, 45,,, +6, 4, 6, 2, 1, 1, 1, 1, 256, 131, 131, 87, 45,,, +6, 4, 6, 2, 1, 1, 1, 1, 384, 131, 131, 87, 45,,, +6, 4, 6, 2, 2, 2, 1, 1, 128, 131, 129, 87, 41,,, +6, 4, 6, 2, 2, 2, 1, 1, 256, 131, 129, 87, 41,,, +6, 4, 6, 2, 2, 2, 1, 1, 384, 131, 129, 87, 41,,, +6, 4, 6, 2, 2, 2, 1, 2, 128, 131, 129, 87, 41,,, +6, 4, 6, 2, 2, 2, 1, 2, 256, 131, 129, 87, 41,,, +6, 4, 6, 2, 2, 2, 1, 2, 384, 131, 129, 87, 41,,, +6, 4, 6, 2, 2, 2, 2, 1, 128, 131, 129, 87, 41,,, +6, 4, 6, 2, 2, 2, 2, 1, 256, 131, 129, 87, 41,,, +6, 4, 6, 2, 2, 2, 2, 1, 384, 131, 129, 87, 41,,, +6, 4, 6, 2, 2, 2, 2, 2, 128, 131, 129, 87, 41,,, +6, 4, 6, 2, 2, 2, 2, 2, 256, 131, 129, 87, 41,,, +6, 4, 6, 2, 2, 2, 2, 2, 384, 131, 129, 87, 41,,, +6, 4, 6, 2, 3, 3, 1, 1, 128, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 1, 1, 256, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 1, 1, 384, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 1, 2, 128, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 1, 2, 256, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 1, 2, 384, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 1, 3, 128, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 1, 3, 256, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 1, 3, 384, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 2, 1, 128, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 2, 1, 256, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 2, 1, 384, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 2, 2, 128, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 2, 2, 256, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 2, 2, 384, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 2, 3, 128, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 2, 3, 256, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 2, 3, 384, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 3, 1, 128, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 3, 1, 256, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 3, 1, 384, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 3, 2, 128, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 3, 2, 256, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 3, 2, 384, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 3, 3, 128, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 3, 3, 256, 131, 125, 87, 39,,, +6, 4, 6, 2, 3, 3, 3, 3, 384, 131, 125, 87, 39,,, +6, 4, 6, 3, 1, 1, 1, 1, 128, 195, 195, 129, 66,,, +6, 4, 6, 3, 1, 1, 1, 1, 256, 195, 195, 129, 66,,, +6, 4, 6, 3, 1, 1, 1, 1, 384, 195, 195, 129, 66,,, +6, 4, 6, 3, 2, 2, 1, 1, 128, 195, 192, 129, 60,,, +6, 4, 6, 3, 2, 2, 1, 1, 256, 195, 192, 129, 60,,, +6, 4, 6, 3, 2, 2, 1, 1, 384, 195, 192, 129, 60,,, +6, 4, 6, 3, 2, 2, 1, 2, 128, 195, 192, 129, 60,,, +6, 4, 6, 3, 2, 2, 1, 2, 256, 195, 192, 129, 60,,, +6, 4, 6, 3, 2, 2, 1, 2, 384, 195, 192, 129, 60,,, +6, 4, 6, 3, 2, 2, 2, 1, 128, 195, 192, 129, 60,,, +6, 4, 6, 3, 2, 2, 2, 1, 256, 195, 192, 129, 60,,, +6, 4, 6, 3, 2, 2, 2, 1, 384, 195, 192, 129, 60,,, +6, 4, 6, 3, 2, 2, 2, 2, 128, 195, 192, 129, 60,,, +6, 4, 6, 3, 2, 2, 2, 2, 256, 195, 192, 129, 60,,, +6, 4, 6, 3, 2, 2, 2, 2, 384, 195, 192, 129, 60,,, +6, 4, 6, 3, 3, 3, 1, 1, 128, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 1, 1, 256, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 1, 1, 384, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 1, 2, 128, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 1, 2, 256, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 1, 2, 384, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 1, 3, 128, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 1, 3, 256, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 1, 3, 384, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 2, 1, 128, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 2, 1, 256, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 2, 1, 384, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 2, 2, 128, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 2, 2, 256, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 2, 2, 384, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 2, 3, 128, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 2, 3, 256, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 2, 3, 384, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 3, 1, 128, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 3, 1, 256, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 3, 1, 384, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 3, 2, 128, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 3, 2, 256, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 3, 2, 384, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 3, 3, 128, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 3, 3, 256, 195, 186, 129, 57,,, +6, 4, 6, 3, 3, 3, 3, 3, 384, 195, 186, 129, 57,,, +6, 4, 6, 4, 1, 1, 1, 1, 128, 240, 240, 171, 87,,, +6, 4, 6, 4, 1, 1, 1, 1, 256, 240, 240, 171, 87,,, +6, 4, 6, 4, 1, 1, 1, 1, 384, 240, 240, 171, 87,,, +6, 4, 6, 4, 2, 2, 1, 1, 128, 240, 240, 171, 79,,, +6, 4, 6, 4, 2, 2, 1, 1, 256, 240, 240, 171, 79,,, +6, 4, 6, 4, 2, 2, 1, 1, 384, 240, 240, 171, 79,,, +6, 4, 6, 4, 2, 2, 1, 2, 128, 240, 240, 171, 79,,, +6, 4, 6, 4, 2, 2, 1, 2, 256, 240, 240, 171, 79,,, +6, 4, 6, 4, 2, 2, 1, 2, 384, 240, 240, 171, 79,,, +6, 4, 6, 4, 2, 2, 2, 1, 128, 240, 240, 171, 79,,, +6, 4, 6, 4, 2, 2, 2, 1, 256, 240, 240, 171, 79,,, +6, 4, 6, 4, 2, 2, 2, 1, 384, 240, 240, 171, 79,,, +6, 4, 6, 4, 2, 2, 2, 2, 128, 240, 240, 171, 79,,, +6, 4, 6, 4, 2, 2, 2, 2, 256, 240, 240, 171, 79,,, +6, 4, 6, 4, 2, 2, 2, 2, 384, 240, 240, 171, 79,,, +6, 4, 6, 4, 3, 3, 1, 1, 128, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 1, 1, 256, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 1, 1, 384, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 1, 2, 128, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 1, 2, 256, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 1, 2, 384, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 1, 3, 128, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 1, 3, 256, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 1, 3, 384, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 2, 1, 128, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 2, 1, 256, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 2, 1, 384, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 2, 2, 128, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 2, 2, 256, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 2, 2, 384, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 2, 3, 128, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 2, 3, 256, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 2, 3, 384, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 3, 1, 128, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 3, 1, 256, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 3, 1, 384, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 3, 2, 128, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 3, 2, 256, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 3, 2, 384, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 3, 3, 128, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 3, 3, 256, 240, 240, 171, 75,,, +6, 4, 6, 4, 3, 3, 3, 3, 384, 240, 240, 171, 75,,, +6, 5, 1, 1, 1, 1, 1, 1, 128, 68, 68, 46,,,, +6, 5, 1, 1, 1, 1, 1, 1, 256, 68, 68, 46,,,, +6, 5, 1, 1, 1, 1, 1, 1, 384, 68, 68, 46,,,, +6, 5, 1, 1, 2, 2, 1, 1, 128, 68, 64, 45,,,, +6, 5, 1, 1, 2, 2, 1, 1, 256, 68, 64, 45,,,, +6, 5, 1, 1, 2, 2, 1, 1, 384, 68, 64, 45,,,, +6, 5, 1, 1, 2, 2, 1, 2, 128, 68, 64, 45,,,, +6, 5, 1, 1, 2, 2, 1, 2, 256, 68, 64, 45,,,, +6, 5, 1, 1, 2, 2, 1, 2, 384, 68, 64, 45,,,, +6, 5, 1, 1, 2, 2, 2, 1, 128, 68, 64, 45,,,, +6, 5, 1, 1, 2, 2, 2, 1, 256, 68, 64, 45,,,, +6, 5, 1, 1, 2, 2, 2, 1, 384, 68, 64, 45,,,, +6, 5, 1, 1, 2, 2, 2, 2, 128, 68, 64, 45,,,, +6, 5, 1, 1, 2, 2, 2, 2, 256, 68, 64, 45,,,, +6, 5, 1, 1, 2, 2, 2, 2, 384, 68, 64, 45,,,, +6, 5, 1, 1, 3, 3, 1, 1, 128, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 1, 1, 256, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 1, 1, 384, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 1, 2, 128, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 1, 2, 256, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 1, 2, 384, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 1, 3, 128, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 1, 3, 256, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 1, 3, 384, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 2, 1, 128, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 2, 1, 256, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 2, 1, 384, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 2, 2, 128, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 2, 2, 256, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 2, 2, 384, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 2, 3, 128, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 2, 3, 256, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 2, 3, 384, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 3, 1, 128, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 3, 1, 256, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 3, 1, 384, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 3, 2, 128, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 3, 2, 256, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 3, 2, 384, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 3, 3, 128, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 3, 3, 256, 68, 61, 44,,,, +6, 5, 1, 1, 3, 3, 3, 3, 384, 68, 61, 44,,,, +6, 5, 1, 2, 1, 1, 1, 1, 128, 132, 132, 88,,,, +6, 5, 1, 2, 1, 1, 1, 1, 256, 132, 132, 88,,,, +6, 5, 1, 2, 1, 1, 1, 1, 384, 132, 132, 88,,,, +6, 5, 1, 2, 2, 2, 1, 1, 128, 132, 124, 86,,,, +6, 5, 1, 2, 2, 2, 1, 1, 256, 132, 124, 86,,,, +6, 5, 1, 2, 2, 2, 1, 1, 384, 132, 124, 86,,,, +6, 5, 1, 2, 2, 2, 1, 2, 128, 132, 124, 86,,,, +6, 5, 1, 2, 2, 2, 1, 2, 256, 132, 124, 86,,,, +6, 5, 1, 2, 2, 2, 1, 2, 384, 132, 124, 86,,,, +6, 5, 1, 2, 2, 2, 2, 1, 128, 132, 124, 86,,,, +6, 5, 1, 2, 2, 2, 2, 1, 256, 132, 124, 86,,,, +6, 5, 1, 2, 2, 2, 2, 1, 384, 132, 124, 86,,,, +6, 5, 1, 2, 2, 2, 2, 2, 128, 132, 124, 86,,,, +6, 5, 1, 2, 2, 2, 2, 2, 256, 132, 124, 86,,,, +6, 5, 1, 2, 2, 2, 2, 2, 384, 132, 124, 86,,,, +6, 5, 1, 2, 3, 3, 1, 1, 128, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 1, 1, 256, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 1, 1, 384, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 1, 2, 128, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 1, 2, 256, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 1, 2, 384, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 1, 3, 128, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 1, 3, 256, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 1, 3, 384, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 2, 1, 128, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 2, 1, 256, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 2, 1, 384, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 2, 2, 128, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 2, 2, 256, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 2, 2, 384, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 2, 3, 128, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 2, 3, 256, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 2, 3, 384, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 3, 1, 128, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 3, 1, 256, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 3, 1, 384, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 3, 2, 128, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 3, 2, 256, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 3, 2, 384, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 3, 3, 128, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 3, 3, 256, 132, 118, 84,,,, +6, 5, 1, 2, 3, 3, 3, 3, 384, 132, 118, 84,,,, +6, 5, 1, 3, 1, 1, 1, 1, 128, 196, 196, 130,,,, +6, 5, 1, 3, 1, 1, 1, 1, 256, 196, 196, 130,,,, +6, 5, 1, 3, 1, 1, 1, 1, 384, 196, 196, 130,,,, +6, 5, 1, 3, 2, 2, 1, 1, 128, 196, 184, 127,,,, +6, 5, 1, 3, 2, 2, 1, 1, 256, 196, 184, 127,,,, +6, 5, 1, 3, 2, 2, 1, 1, 384, 196, 184, 127,,,, +6, 5, 1, 3, 2, 2, 1, 2, 128, 196, 184, 127,,,, +6, 5, 1, 3, 2, 2, 1, 2, 256, 196, 184, 127,,,, +6, 5, 1, 3, 2, 2, 1, 2, 384, 196, 184, 127,,,, +6, 5, 1, 3, 2, 2, 2, 1, 128, 196, 184, 127,,,, +6, 5, 1, 3, 2, 2, 2, 1, 256, 196, 184, 127,,,, +6, 5, 1, 3, 2, 2, 2, 1, 384, 196, 184, 127,,,, +6, 5, 1, 3, 2, 2, 2, 2, 128, 196, 184, 127,,,, +6, 5, 1, 3, 2, 2, 2, 2, 256, 196, 184, 127,,,, +6, 5, 1, 3, 2, 2, 2, 2, 384, 196, 184, 127,,,, +6, 5, 1, 3, 3, 3, 1, 1, 128, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 1, 1, 256, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 1, 1, 384, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 1, 2, 128, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 1, 2, 256, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 1, 2, 384, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 1, 3, 128, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 1, 3, 256, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 1, 3, 384, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 2, 1, 128, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 2, 1, 256, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 2, 1, 384, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 2, 2, 128, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 2, 2, 256, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 2, 2, 384, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 2, 3, 128, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 2, 3, 256, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 2, 3, 384, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 3, 1, 128, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 3, 1, 256, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 3, 1, 384, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 3, 2, 128, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 3, 2, 256, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 3, 2, 384, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 3, 3, 128, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 3, 3, 256, 196, 175, 124,,,, +6, 5, 1, 3, 3, 3, 3, 3, 384, 196, 175, 124,,,, +6, 5, 1, 4, 1, 1, 1, 1, 128, 240, 240, 172,,,, +6, 5, 1, 4, 1, 1, 1, 1, 256, 240, 240, 172,,,, +6, 5, 1, 4, 1, 1, 1, 1, 384, 240, 240, 172,,,, +6, 5, 1, 4, 2, 2, 1, 1, 128, 240, 240, 168,,,, +6, 5, 1, 4, 2, 2, 1, 1, 256, 240, 240, 168,,,, +6, 5, 1, 4, 2, 2, 1, 1, 384, 240, 240, 168,,,, +6, 5, 1, 4, 2, 2, 1, 2, 128, 240, 240, 168,,,, +6, 5, 1, 4, 2, 2, 1, 2, 256, 240, 240, 168,,,, +6, 5, 1, 4, 2, 2, 1, 2, 384, 240, 240, 168,,,, +6, 5, 1, 4, 2, 2, 2, 1, 128, 240, 240, 168,,,, +6, 5, 1, 4, 2, 2, 2, 1, 256, 240, 240, 168,,,, +6, 5, 1, 4, 2, 2, 2, 1, 384, 240, 240, 168,,,, +6, 5, 1, 4, 2, 2, 2, 2, 128, 240, 240, 168,,,, +6, 5, 1, 4, 2, 2, 2, 2, 256, 240, 240, 168,,,, +6, 5, 1, 4, 2, 2, 2, 2, 384, 240, 240, 168,,,, +6, 5, 1, 4, 3, 3, 1, 1, 128, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 1, 1, 256, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 1, 1, 384, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 1, 2, 128, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 1, 2, 256, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 1, 2, 384, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 1, 3, 128, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 1, 3, 256, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 1, 3, 384, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 2, 1, 128, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 2, 1, 256, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 2, 1, 384, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 2, 2, 128, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 2, 2, 256, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 2, 2, 384, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 2, 3, 128, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 2, 3, 256, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 2, 3, 384, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 3, 1, 128, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 3, 1, 256, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 3, 1, 384, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 3, 2, 128, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 3, 2, 256, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 3, 2, 384, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 3, 3, 128, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 3, 3, 256, 240, 232, 164,,,, +6, 5, 1, 4, 3, 3, 3, 3, 384, 240, 232, 164,,,, +6, 5, 1, 5, 1, 1, 1, 1, 128, 240, 240, 214,,,, +6, 5, 1, 5, 1, 1, 1, 1, 256, 240, 240, 214,,,, +6, 5, 1, 5, 1, 1, 1, 1, 384, 240, 240, 214,,,, +6, 5, 1, 5, 2, 2, 1, 1, 128, 240, 240, 209,,,, +6, 5, 1, 5, 2, 2, 1, 1, 256, 240, 240, 209,,,, +6, 5, 1, 5, 2, 2, 1, 1, 384, 240, 240, 209,,,, +6, 5, 1, 5, 2, 2, 1, 2, 128, 240, 240, 209,,,, +6, 5, 1, 5, 2, 2, 1, 2, 256, 240, 240, 209,,,, +6, 5, 1, 5, 2, 2, 1, 2, 384, 240, 240, 209,,,, +6, 5, 1, 5, 2, 2, 2, 1, 128, 240, 240, 209,,,, +6, 5, 1, 5, 2, 2, 2, 1, 256, 240, 240, 209,,,, +6, 5, 1, 5, 2, 2, 2, 1, 384, 240, 240, 209,,,, +6, 5, 1, 5, 2, 2, 2, 2, 128, 240, 240, 209,,,, +6, 5, 1, 5, 2, 2, 2, 2, 256, 240, 240, 209,,,, +6, 5, 1, 5, 2, 2, 2, 2, 384, 240, 240, 209,,,, +6, 5, 1, 5, 3, 3, 1, 1, 128, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 1, 1, 256, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 1, 1, 384, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 1, 2, 128, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 1, 2, 256, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 1, 2, 384, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 1, 3, 128, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 1, 3, 256, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 1, 3, 384, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 2, 1, 128, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 2, 1, 256, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 2, 1, 384, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 2, 2, 128, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 2, 2, 256, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 2, 2, 384, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 2, 3, 128, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 2, 3, 256, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 2, 3, 384, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 3, 1, 128, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 3, 1, 256, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 3, 1, 384, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 3, 2, 128, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 3, 2, 256, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 3, 2, 384, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 3, 3, 128, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 3, 3, 256, 240, 240, 204,,,, +6, 5, 1, 5, 3, 3, 3, 3, 384, 240, 240, 204,,,, +6, 5, 2, 1, 1, 1, 1, 1, 128, 68, 68, 46,,,, +6, 5, 2, 1, 1, 1, 1, 1, 256, 68, 68, 46,,,, +6, 5, 2, 1, 1, 1, 1, 1, 384, 68, 68, 46,,,, +6, 5, 2, 1, 2, 2, 1, 1, 128, 68, 64, 45,,,, +6, 5, 2, 1, 2, 2, 1, 1, 256, 68, 64, 45,,,, +6, 5, 2, 1, 2, 2, 1, 1, 384, 68, 64, 45,,,, +6, 5, 2, 1, 2, 2, 1, 2, 128, 68, 64, 45,,,, +6, 5, 2, 1, 2, 2, 1, 2, 256, 68, 64, 45,,,, +6, 5, 2, 1, 2, 2, 1, 2, 384, 68, 64, 45,,,, +6, 5, 2, 1, 2, 2, 2, 1, 128, 68, 64, 45,,,, +6, 5, 2, 1, 2, 2, 2, 1, 256, 68, 64, 45,,,, +6, 5, 2, 1, 2, 2, 2, 1, 384, 68, 64, 45,,,, +6, 5, 2, 1, 2, 2, 2, 2, 128, 68, 64, 45,,,, +6, 5, 2, 1, 2, 2, 2, 2, 256, 68, 64, 45,,,, +6, 5, 2, 1, 2, 2, 2, 2, 384, 68, 64, 45,,,, +6, 5, 2, 1, 3, 3, 1, 1, 128, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 1, 1, 256, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 1, 1, 384, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 1, 2, 128, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 1, 2, 256, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 1, 2, 384, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 1, 3, 128, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 1, 3, 256, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 1, 3, 384, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 2, 1, 128, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 2, 1, 256, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 2, 1, 384, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 2, 2, 128, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 2, 2, 256, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 2, 2, 384, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 2, 3, 128, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 2, 3, 256, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 2, 3, 384, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 3, 1, 128, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 3, 1, 256, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 3, 1, 384, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 3, 2, 128, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 3, 2, 256, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 3, 2, 384, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 3, 3, 128, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 3, 3, 256, 68, 61, 44,,,, +6, 5, 2, 1, 3, 3, 3, 3, 384, 68, 61, 44,,,, +6, 5, 2, 2, 1, 1, 1, 1, 128, 132, 132, 88,,,, +6, 5, 2, 2, 1, 1, 1, 1, 256, 132, 132, 88,,,, +6, 5, 2, 2, 1, 1, 1, 1, 384, 132, 132, 88,,,, +6, 5, 2, 2, 2, 2, 1, 1, 128, 132, 124, 86,,,, +6, 5, 2, 2, 2, 2, 1, 1, 256, 132, 124, 86,,,, +6, 5, 2, 2, 2, 2, 1, 1, 384, 132, 124, 86,,,, +6, 5, 2, 2, 2, 2, 1, 2, 128, 132, 124, 86,,,, +6, 5, 2, 2, 2, 2, 1, 2, 256, 132, 124, 86,,,, +6, 5, 2, 2, 2, 2, 1, 2, 384, 132, 124, 86,,,, +6, 5, 2, 2, 2, 2, 2, 1, 128, 132, 124, 86,,,, +6, 5, 2, 2, 2, 2, 2, 1, 256, 132, 124, 86,,,, +6, 5, 2, 2, 2, 2, 2, 1, 384, 132, 124, 86,,,, +6, 5, 2, 2, 2, 2, 2, 2, 128, 132, 124, 86,,,, +6, 5, 2, 2, 2, 2, 2, 2, 256, 132, 124, 86,,,, +6, 5, 2, 2, 2, 2, 2, 2, 384, 132, 124, 86,,,, +6, 5, 2, 2, 3, 3, 1, 1, 128, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 1, 1, 256, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 1, 1, 384, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 1, 2, 128, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 1, 2, 256, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 1, 2, 384, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 1, 3, 128, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 1, 3, 256, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 1, 3, 384, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 2, 1, 128, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 2, 1, 256, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 2, 1, 384, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 2, 2, 128, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 2, 2, 256, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 2, 2, 384, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 2, 3, 128, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 2, 3, 256, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 2, 3, 384, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 3, 1, 128, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 3, 1, 256, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 3, 1, 384, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 3, 2, 128, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 3, 2, 256, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 3, 2, 384, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 3, 3, 128, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 3, 3, 256, 132, 118, 84,,,, +6, 5, 2, 2, 3, 3, 3, 3, 384, 132, 118, 84,,,, +6, 5, 2, 3, 1, 1, 1, 1, 128, 196, 196, 130,,,, +6, 5, 2, 3, 1, 1, 1, 1, 256, 196, 196, 130,,,, +6, 5, 2, 3, 1, 1, 1, 1, 384, 196, 196, 130,,,, +6, 5, 2, 3, 2, 2, 1, 1, 128, 196, 184, 127,,,, +6, 5, 2, 3, 2, 2, 1, 1, 256, 196, 184, 127,,,, +6, 5, 2, 3, 2, 2, 1, 1, 384, 196, 184, 127,,,, +6, 5, 2, 3, 2, 2, 1, 2, 128, 196, 184, 127,,,, +6, 5, 2, 3, 2, 2, 1, 2, 256, 196, 184, 127,,,, +6, 5, 2, 3, 2, 2, 1, 2, 384, 196, 184, 127,,,, +6, 5, 2, 3, 2, 2, 2, 1, 128, 196, 184, 127,,,, +6, 5, 2, 3, 2, 2, 2, 1, 256, 196, 184, 127,,,, +6, 5, 2, 3, 2, 2, 2, 1, 384, 196, 184, 127,,,, +6, 5, 2, 3, 2, 2, 2, 2, 128, 196, 184, 127,,,, +6, 5, 2, 3, 2, 2, 2, 2, 256, 196, 184, 127,,,, +6, 5, 2, 3, 2, 2, 2, 2, 384, 196, 184, 127,,,, +6, 5, 2, 3, 3, 3, 1, 1, 128, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 1, 1, 256, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 1, 1, 384, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 1, 2, 128, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 1, 2, 256, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 1, 2, 384, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 1, 3, 128, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 1, 3, 256, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 1, 3, 384, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 2, 1, 128, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 2, 1, 256, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 2, 1, 384, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 2, 2, 128, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 2, 2, 256, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 2, 2, 384, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 2, 3, 128, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 2, 3, 256, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 2, 3, 384, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 3, 1, 128, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 3, 1, 256, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 3, 1, 384, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 3, 2, 128, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 3, 2, 256, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 3, 2, 384, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 3, 3, 128, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 3, 3, 256, 196, 175, 124,,,, +6, 5, 2, 3, 3, 3, 3, 3, 384, 196, 175, 124,,,, +6, 5, 2, 4, 1, 1, 1, 1, 128, 240, 240, 172,,,, +6, 5, 2, 4, 1, 1, 1, 1, 256, 240, 240, 172,,,, +6, 5, 2, 4, 1, 1, 1, 1, 384, 240, 240, 172,,,, +6, 5, 2, 4, 2, 2, 1, 1, 128, 240, 240, 168,,,, +6, 5, 2, 4, 2, 2, 1, 1, 256, 240, 240, 168,,,, +6, 5, 2, 4, 2, 2, 1, 1, 384, 240, 240, 168,,,, +6, 5, 2, 4, 2, 2, 1, 2, 128, 240, 240, 168,,,, +6, 5, 2, 4, 2, 2, 1, 2, 256, 240, 240, 168,,,, +6, 5, 2, 4, 2, 2, 1, 2, 384, 240, 240, 168,,,, +6, 5, 2, 4, 2, 2, 2, 1, 128, 240, 240, 168,,,, +6, 5, 2, 4, 2, 2, 2, 1, 256, 240, 240, 168,,,, +6, 5, 2, 4, 2, 2, 2, 1, 384, 240, 240, 168,,,, +6, 5, 2, 4, 2, 2, 2, 2, 128, 240, 240, 168,,,, +6, 5, 2, 4, 2, 2, 2, 2, 256, 240, 240, 168,,,, +6, 5, 2, 4, 2, 2, 2, 2, 384, 240, 240, 168,,,, +6, 5, 2, 4, 3, 3, 1, 1, 128, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 1, 1, 256, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 1, 1, 384, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 1, 2, 128, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 1, 2, 256, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 1, 2, 384, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 1, 3, 128, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 1, 3, 256, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 1, 3, 384, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 2, 1, 128, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 2, 1, 256, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 2, 1, 384, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 2, 2, 128, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 2, 2, 256, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 2, 2, 384, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 2, 3, 128, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 2, 3, 256, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 2, 3, 384, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 3, 1, 128, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 3, 1, 256, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 3, 1, 384, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 3, 2, 128, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 3, 2, 256, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 3, 2, 384, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 3, 3, 128, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 3, 3, 256, 240, 232, 164,,,, +6, 5, 2, 4, 3, 3, 3, 3, 384, 240, 232, 164,,,, +6, 5, 2, 5, 1, 1, 1, 1, 128, 240, 240, 214,,,, +6, 5, 2, 5, 1, 1, 1, 1, 256, 240, 240, 214,,,, +6, 5, 2, 5, 1, 1, 1, 1, 384, 240, 240, 214,,,, +6, 5, 2, 5, 2, 2, 1, 1, 128, 240, 240, 209,,,, +6, 5, 2, 5, 2, 2, 1, 1, 256, 240, 240, 209,,,, +6, 5, 2, 5, 2, 2, 1, 1, 384, 240, 240, 209,,,, +6, 5, 2, 5, 2, 2, 1, 2, 128, 240, 240, 209,,,, +6, 5, 2, 5, 2, 2, 1, 2, 256, 240, 240, 209,,,, +6, 5, 2, 5, 2, 2, 1, 2, 384, 240, 240, 209,,,, +6, 5, 2, 5, 2, 2, 2, 1, 128, 240, 240, 209,,,, +6, 5, 2, 5, 2, 2, 2, 1, 256, 240, 240, 209,,,, +6, 5, 2, 5, 2, 2, 2, 1, 384, 240, 240, 209,,,, +6, 5, 2, 5, 2, 2, 2, 2, 128, 240, 240, 209,,,, +6, 5, 2, 5, 2, 2, 2, 2, 256, 240, 240, 209,,,, +6, 5, 2, 5, 2, 2, 2, 2, 384, 240, 240, 209,,,, +6, 5, 2, 5, 3, 3, 1, 1, 128, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 1, 1, 256, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 1, 1, 384, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 1, 2, 128, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 1, 2, 256, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 1, 2, 384, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 1, 3, 128, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 1, 3, 256, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 1, 3, 384, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 2, 1, 128, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 2, 1, 256, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 2, 1, 384, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 2, 2, 128, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 2, 2, 256, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 2, 2, 384, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 2, 3, 128, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 2, 3, 256, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 2, 3, 384, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 3, 1, 128, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 3, 1, 256, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 3, 1, 384, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 3, 2, 128, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 3, 2, 256, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 3, 2, 384, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 3, 3, 128, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 3, 3, 256, 240, 240, 204,,,, +6, 5, 2, 5, 3, 3, 3, 3, 384, 240, 240, 204,,,, +6, 5, 3, 1, 1, 1, 1, 1, 128, 68, 68, 46,,,, +6, 5, 3, 1, 1, 1, 1, 1, 256, 68, 68, 46,,,, +6, 5, 3, 1, 1, 1, 1, 1, 384, 68, 68, 46,,,, +6, 5, 3, 1, 2, 2, 1, 1, 128, 68, 64, 45,,,, +6, 5, 3, 1, 2, 2, 1, 1, 256, 68, 64, 45,,,, +6, 5, 3, 1, 2, 2, 1, 1, 384, 68, 64, 45,,,, +6, 5, 3, 1, 2, 2, 1, 2, 128, 68, 64, 45,,,, +6, 5, 3, 1, 2, 2, 1, 2, 256, 68, 64, 45,,,, +6, 5, 3, 1, 2, 2, 1, 2, 384, 68, 64, 45,,,, +6, 5, 3, 1, 2, 2, 2, 1, 128, 68, 64, 45,,,, +6, 5, 3, 1, 2, 2, 2, 1, 256, 68, 64, 45,,,, +6, 5, 3, 1, 2, 2, 2, 1, 384, 68, 64, 45,,,, +6, 5, 3, 1, 2, 2, 2, 2, 128, 68, 64, 45,,,, +6, 5, 3, 1, 2, 2, 2, 2, 256, 68, 64, 45,,,, +6, 5, 3, 1, 2, 2, 2, 2, 384, 68, 64, 45,,,, +6, 5, 3, 1, 3, 3, 1, 1, 128, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 1, 1, 256, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 1, 1, 384, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 1, 2, 128, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 1, 2, 256, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 1, 2, 384, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 1, 3, 128, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 1, 3, 256, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 1, 3, 384, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 2, 1, 128, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 2, 1, 256, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 2, 1, 384, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 2, 2, 128, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 2, 2, 256, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 2, 2, 384, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 2, 3, 128, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 2, 3, 256, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 2, 3, 384, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 3, 1, 128, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 3, 1, 256, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 3, 1, 384, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 3, 2, 128, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 3, 2, 256, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 3, 2, 384, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 3, 3, 128, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 3, 3, 256, 68, 61, 44,,,, +6, 5, 3, 1, 3, 3, 3, 3, 384, 68, 61, 44,,,, +6, 5, 3, 2, 1, 1, 1, 1, 128, 132, 132, 88,,,, +6, 5, 3, 2, 1, 1, 1, 1, 256, 132, 132, 88,,,, +6, 5, 3, 2, 1, 1, 1, 1, 384, 132, 132, 88,,,, +6, 5, 3, 2, 2, 2, 1, 1, 128, 132, 124, 86,,,, +6, 5, 3, 2, 2, 2, 1, 1, 256, 132, 124, 86,,,, +6, 5, 3, 2, 2, 2, 1, 1, 384, 132, 124, 86,,,, +6, 5, 3, 2, 2, 2, 1, 2, 128, 132, 124, 86,,,, +6, 5, 3, 2, 2, 2, 1, 2, 256, 132, 124, 86,,,, +6, 5, 3, 2, 2, 2, 1, 2, 384, 132, 124, 86,,,, +6, 5, 3, 2, 2, 2, 2, 1, 128, 132, 124, 86,,,, +6, 5, 3, 2, 2, 2, 2, 1, 256, 132, 124, 86,,,, +6, 5, 3, 2, 2, 2, 2, 1, 384, 132, 124, 86,,,, +6, 5, 3, 2, 2, 2, 2, 2, 128, 132, 124, 86,,,, +6, 5, 3, 2, 2, 2, 2, 2, 256, 132, 124, 86,,,, +6, 5, 3, 2, 2, 2, 2, 2, 384, 132, 124, 86,,,, +6, 5, 3, 2, 3, 3, 1, 1, 128, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 1, 1, 256, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 1, 1, 384, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 1, 2, 128, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 1, 2, 256, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 1, 2, 384, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 1, 3, 128, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 1, 3, 256, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 1, 3, 384, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 2, 1, 128, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 2, 1, 256, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 2, 1, 384, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 2, 2, 128, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 2, 2, 256, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 2, 2, 384, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 2, 3, 128, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 2, 3, 256, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 2, 3, 384, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 3, 1, 128, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 3, 1, 256, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 3, 1, 384, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 3, 2, 128, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 3, 2, 256, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 3, 2, 384, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 3, 3, 128, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 3, 3, 256, 132, 118, 84,,,, +6, 5, 3, 2, 3, 3, 3, 3, 384, 132, 118, 84,,,, +6, 5, 3, 3, 1, 1, 1, 1, 128, 196, 196, 130,,,, +6, 5, 3, 3, 1, 1, 1, 1, 256, 196, 196, 130,,,, +6, 5, 3, 3, 1, 1, 1, 1, 384, 196, 196, 130,,,, +6, 5, 3, 3, 2, 2, 1, 1, 128, 196, 184, 127,,,, +6, 5, 3, 3, 2, 2, 1, 1, 256, 196, 184, 127,,,, +6, 5, 3, 3, 2, 2, 1, 1, 384, 196, 184, 127,,,, +6, 5, 3, 3, 2, 2, 1, 2, 128, 196, 184, 127,,,, +6, 5, 3, 3, 2, 2, 1, 2, 256, 196, 184, 127,,,, +6, 5, 3, 3, 2, 2, 1, 2, 384, 196, 184, 127,,,, +6, 5, 3, 3, 2, 2, 2, 1, 128, 196, 184, 127,,,, +6, 5, 3, 3, 2, 2, 2, 1, 256, 196, 184, 127,,,, +6, 5, 3, 3, 2, 2, 2, 1, 384, 196, 184, 127,,,, +6, 5, 3, 3, 2, 2, 2, 2, 128, 196, 184, 127,,,, +6, 5, 3, 3, 2, 2, 2, 2, 256, 196, 184, 127,,,, +6, 5, 3, 3, 2, 2, 2, 2, 384, 196, 184, 127,,,, +6, 5, 3, 3, 3, 3, 1, 1, 128, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 1, 1, 256, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 1, 1, 384, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 1, 2, 128, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 1, 2, 256, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 1, 2, 384, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 1, 3, 128, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 1, 3, 256, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 1, 3, 384, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 2, 1, 128, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 2, 1, 256, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 2, 1, 384, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 2, 2, 128, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 2, 2, 256, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 2, 2, 384, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 2, 3, 128, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 2, 3, 256, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 2, 3, 384, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 3, 1, 128, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 3, 1, 256, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 3, 1, 384, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 3, 2, 128, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 3, 2, 256, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 3, 2, 384, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 3, 3, 128, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 3, 3, 256, 196, 175, 124,,,, +6, 5, 3, 3, 3, 3, 3, 3, 384, 196, 175, 124,,,, +6, 5, 3, 4, 1, 1, 1, 1, 128, 240, 240, 172,,,, +6, 5, 3, 4, 1, 1, 1, 1, 256, 240, 240, 172,,,, +6, 5, 3, 4, 1, 1, 1, 1, 384, 240, 240, 172,,,, +6, 5, 3, 4, 2, 2, 1, 1, 128, 240, 240, 168,,,, +6, 5, 3, 4, 2, 2, 1, 1, 256, 240, 240, 168,,,, +6, 5, 3, 4, 2, 2, 1, 1, 384, 240, 240, 168,,,, +6, 5, 3, 4, 2, 2, 1, 2, 128, 240, 240, 168,,,, +6, 5, 3, 4, 2, 2, 1, 2, 256, 240, 240, 168,,,, +6, 5, 3, 4, 2, 2, 1, 2, 384, 240, 240, 168,,,, +6, 5, 3, 4, 2, 2, 2, 1, 128, 240, 240, 168,,,, +6, 5, 3, 4, 2, 2, 2, 1, 256, 240, 240, 168,,,, +6, 5, 3, 4, 2, 2, 2, 1, 384, 240, 240, 168,,,, +6, 5, 3, 4, 2, 2, 2, 2, 128, 240, 240, 168,,,, +6, 5, 3, 4, 2, 2, 2, 2, 256, 240, 240, 168,,,, +6, 5, 3, 4, 2, 2, 2, 2, 384, 240, 240, 168,,,, +6, 5, 3, 4, 3, 3, 1, 1, 128, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 1, 1, 256, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 1, 1, 384, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 1, 2, 128, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 1, 2, 256, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 1, 2, 384, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 1, 3, 128, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 1, 3, 256, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 1, 3, 384, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 2, 1, 128, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 2, 1, 256, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 2, 1, 384, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 2, 2, 128, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 2, 2, 256, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 2, 2, 384, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 2, 3, 128, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 2, 3, 256, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 2, 3, 384, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 3, 1, 128, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 3, 1, 256, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 3, 1, 384, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 3, 2, 128, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 3, 2, 256, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 3, 2, 384, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 3, 3, 128, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 3, 3, 256, 240, 232, 164,,,, +6, 5, 3, 4, 3, 3, 3, 3, 384, 240, 232, 164,,,, +6, 5, 3, 5, 1, 1, 1, 1, 128, 240, 240, 214,,,, +6, 5, 3, 5, 1, 1, 1, 1, 256, 240, 240, 214,,,, +6, 5, 3, 5, 1, 1, 1, 1, 384, 240, 240, 214,,,, +6, 5, 3, 5, 2, 2, 1, 1, 128, 240, 240, 209,,,, +6, 5, 3, 5, 2, 2, 1, 1, 256, 240, 240, 209,,,, +6, 5, 3, 5, 2, 2, 1, 1, 384, 240, 240, 209,,,, +6, 5, 3, 5, 2, 2, 1, 2, 128, 240, 240, 209,,,, +6, 5, 3, 5, 2, 2, 1, 2, 256, 240, 240, 209,,,, +6, 5, 3, 5, 2, 2, 1, 2, 384, 240, 240, 209,,,, +6, 5, 3, 5, 2, 2, 2, 1, 128, 240, 240, 209,,,, +6, 5, 3, 5, 2, 2, 2, 1, 256, 240, 240, 209,,,, +6, 5, 3, 5, 2, 2, 2, 1, 384, 240, 240, 209,,,, +6, 5, 3, 5, 2, 2, 2, 2, 128, 240, 240, 209,,,, +6, 5, 3, 5, 2, 2, 2, 2, 256, 240, 240, 209,,,, +6, 5, 3, 5, 2, 2, 2, 2, 384, 240, 240, 209,,,, +6, 5, 3, 5, 3, 3, 1, 1, 128, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 1, 1, 256, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 1, 1, 384, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 1, 2, 128, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 1, 2, 256, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 1, 2, 384, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 1, 3, 128, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 1, 3, 256, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 1, 3, 384, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 2, 1, 128, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 2, 1, 256, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 2, 1, 384, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 2, 2, 128, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 2, 2, 256, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 2, 2, 384, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 2, 3, 128, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 2, 3, 256, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 2, 3, 384, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 3, 1, 128, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 3, 1, 256, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 3, 1, 384, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 3, 2, 128, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 3, 2, 256, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 3, 2, 384, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 3, 3, 128, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 3, 3, 256, 240, 240, 204,,,, +6, 5, 3, 5, 3, 3, 3, 3, 384, 240, 240, 204,,,, +6, 5, 4, 1, 1, 1, 1, 1, 128, 68, 68, 46,,,, +6, 5, 4, 1, 1, 1, 1, 1, 256, 68, 68, 46,,,, +6, 5, 4, 1, 1, 1, 1, 1, 384, 68, 68, 46,,,, +6, 5, 4, 1, 2, 2, 1, 1, 128, 68, 64, 45,,,, +6, 5, 4, 1, 2, 2, 1, 1, 256, 68, 64, 45,,,, +6, 5, 4, 1, 2, 2, 1, 1, 384, 68, 64, 45,,,, +6, 5, 4, 1, 2, 2, 1, 2, 128, 68, 64, 45,,,, +6, 5, 4, 1, 2, 2, 1, 2, 256, 68, 64, 45,,,, +6, 5, 4, 1, 2, 2, 1, 2, 384, 68, 64, 45,,,, +6, 5, 4, 1, 2, 2, 2, 1, 128, 68, 64, 45,,,, +6, 5, 4, 1, 2, 2, 2, 1, 256, 68, 64, 45,,,, +6, 5, 4, 1, 2, 2, 2, 1, 384, 68, 64, 45,,,, +6, 5, 4, 1, 2, 2, 2, 2, 128, 68, 64, 45,,,, +6, 5, 4, 1, 2, 2, 2, 2, 256, 68, 64, 45,,,, +6, 5, 4, 1, 2, 2, 2, 2, 384, 68, 64, 45,,,, +6, 5, 4, 1, 3, 3, 1, 1, 128, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 1, 1, 256, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 1, 1, 384, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 1, 2, 128, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 1, 2, 256, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 1, 2, 384, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 1, 3, 128, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 1, 3, 256, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 1, 3, 384, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 2, 1, 128, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 2, 1, 256, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 2, 1, 384, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 2, 2, 128, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 2, 2, 256, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 2, 2, 384, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 2, 3, 128, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 2, 3, 256, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 2, 3, 384, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 3, 1, 128, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 3, 1, 256, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 3, 1, 384, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 3, 2, 128, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 3, 2, 256, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 3, 2, 384, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 3, 3, 128, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 3, 3, 256, 68, 61, 44,,,, +6, 5, 4, 1, 3, 3, 3, 3, 384, 68, 61, 44,,,, +6, 5, 4, 2, 1, 1, 1, 1, 128, 132, 132, 88,,,, +6, 5, 4, 2, 1, 1, 1, 1, 256, 132, 132, 88,,,, +6, 5, 4, 2, 1, 1, 1, 1, 384, 132, 132, 88,,,, +6, 5, 4, 2, 2, 2, 1, 1, 128, 132, 124, 86,,,, +6, 5, 4, 2, 2, 2, 1, 1, 256, 132, 124, 86,,,, +6, 5, 4, 2, 2, 2, 1, 1, 384, 132, 124, 86,,,, +6, 5, 4, 2, 2, 2, 1, 2, 128, 132, 124, 86,,,, +6, 5, 4, 2, 2, 2, 1, 2, 256, 132, 124, 86,,,, +6, 5, 4, 2, 2, 2, 1, 2, 384, 132, 124, 86,,,, +6, 5, 4, 2, 2, 2, 2, 1, 128, 132, 124, 86,,,, +6, 5, 4, 2, 2, 2, 2, 1, 256, 132, 124, 86,,,, +6, 5, 4, 2, 2, 2, 2, 1, 384, 132, 124, 86,,,, +6, 5, 4, 2, 2, 2, 2, 2, 128, 132, 124, 86,,,, +6, 5, 4, 2, 2, 2, 2, 2, 256, 132, 124, 86,,,, +6, 5, 4, 2, 2, 2, 2, 2, 384, 132, 124, 86,,,, +6, 5, 4, 2, 3, 3, 1, 1, 128, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 1, 1, 256, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 1, 1, 384, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 1, 2, 128, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 1, 2, 256, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 1, 2, 384, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 1, 3, 128, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 1, 3, 256, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 1, 3, 384, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 2, 1, 128, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 2, 1, 256, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 2, 1, 384, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 2, 2, 128, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 2, 2, 256, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 2, 2, 384, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 2, 3, 128, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 2, 3, 256, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 2, 3, 384, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 3, 1, 128, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 3, 1, 256, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 3, 1, 384, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 3, 2, 128, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 3, 2, 256, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 3, 2, 384, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 3, 3, 128, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 3, 3, 256, 132, 118, 84,,,, +6, 5, 4, 2, 3, 3, 3, 3, 384, 132, 118, 84,,,, +6, 5, 4, 3, 1, 1, 1, 1, 128, 196, 196, 130,,,, +6, 5, 4, 3, 1, 1, 1, 1, 256, 196, 196, 130,,,, +6, 5, 4, 3, 1, 1, 1, 1, 384, 196, 196, 130,,,, +6, 5, 4, 3, 2, 2, 1, 1, 128, 196, 184, 127,,,, +6, 5, 4, 3, 2, 2, 1, 1, 256, 196, 184, 127,,,, +6, 5, 4, 3, 2, 2, 1, 1, 384, 196, 184, 127,,,, +6, 5, 4, 3, 2, 2, 1, 2, 128, 196, 184, 127,,,, +6, 5, 4, 3, 2, 2, 1, 2, 256, 196, 184, 127,,,, +6, 5, 4, 3, 2, 2, 1, 2, 384, 196, 184, 127,,,, +6, 5, 4, 3, 2, 2, 2, 1, 128, 196, 184, 127,,,, +6, 5, 4, 3, 2, 2, 2, 1, 256, 196, 184, 127,,,, +6, 5, 4, 3, 2, 2, 2, 1, 384, 196, 184, 127,,,, +6, 5, 4, 3, 2, 2, 2, 2, 128, 196, 184, 127,,,, +6, 5, 4, 3, 2, 2, 2, 2, 256, 196, 184, 127,,,, +6, 5, 4, 3, 2, 2, 2, 2, 384, 196, 184, 127,,,, +6, 5, 4, 3, 3, 3, 1, 1, 128, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 1, 1, 256, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 1, 1, 384, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 1, 2, 128, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 1, 2, 256, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 1, 2, 384, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 1, 3, 128, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 1, 3, 256, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 1, 3, 384, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 2, 1, 128, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 2, 1, 256, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 2, 1, 384, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 2, 2, 128, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 2, 2, 256, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 2, 2, 384, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 2, 3, 128, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 2, 3, 256, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 2, 3, 384, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 3, 1, 128, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 3, 1, 256, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 3, 1, 384, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 3, 2, 128, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 3, 2, 256, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 3, 2, 384, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 3, 3, 128, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 3, 3, 256, 196, 175, 124,,,, +6, 5, 4, 3, 3, 3, 3, 3, 384, 196, 175, 124,,,, +6, 5, 4, 4, 1, 1, 1, 1, 128, 240, 240, 172,,,, +6, 5, 4, 4, 1, 1, 1, 1, 256, 240, 240, 172,,,, +6, 5, 4, 4, 1, 1, 1, 1, 384, 240, 240, 172,,,, +6, 5, 4, 4, 2, 2, 1, 1, 128, 240, 240, 168,,,, +6, 5, 4, 4, 2, 2, 1, 1, 256, 240, 240, 168,,,, +6, 5, 4, 4, 2, 2, 1, 1, 384, 240, 240, 168,,,, +6, 5, 4, 4, 2, 2, 1, 2, 128, 240, 240, 168,,,, +6, 5, 4, 4, 2, 2, 1, 2, 256, 240, 240, 168,,,, +6, 5, 4, 4, 2, 2, 1, 2, 384, 240, 240, 168,,,, +6, 5, 4, 4, 2, 2, 2, 1, 128, 240, 240, 168,,,, +6, 5, 4, 4, 2, 2, 2, 1, 256, 240, 240, 168,,,, +6, 5, 4, 4, 2, 2, 2, 1, 384, 240, 240, 168,,,, +6, 5, 4, 4, 2, 2, 2, 2, 128, 240, 240, 168,,,, +6, 5, 4, 4, 2, 2, 2, 2, 256, 240, 240, 168,,,, +6, 5, 4, 4, 2, 2, 2, 2, 384, 240, 240, 168,,,, +6, 5, 4, 4, 3, 3, 1, 1, 128, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 1, 1, 256, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 1, 1, 384, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 1, 2, 128, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 1, 2, 256, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 1, 2, 384, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 1, 3, 128, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 1, 3, 256, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 1, 3, 384, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 2, 1, 128, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 2, 1, 256, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 2, 1, 384, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 2, 2, 128, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 2, 2, 256, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 2, 2, 384, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 2, 3, 128, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 2, 3, 256, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 2, 3, 384, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 3, 1, 128, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 3, 1, 256, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 3, 1, 384, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 3, 2, 128, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 3, 2, 256, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 3, 2, 384, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 3, 3, 128, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 3, 3, 256, 240, 232, 164,,,, +6, 5, 4, 4, 3, 3, 3, 3, 384, 240, 232, 164,,,, +6, 5, 4, 5, 1, 1, 1, 1, 128, 240, 240, 214,,,, +6, 5, 4, 5, 1, 1, 1, 1, 256, 240, 240, 214,,,, +6, 5, 4, 5, 1, 1, 1, 1, 384, 240, 240, 214,,,, +6, 5, 4, 5, 2, 2, 1, 1, 128, 240, 240, 209,,,, +6, 5, 4, 5, 2, 2, 1, 1, 256, 240, 240, 209,,,, +6, 5, 4, 5, 2, 2, 1, 1, 384, 240, 240, 209,,,, +6, 5, 4, 5, 2, 2, 1, 2, 128, 240, 240, 209,,,, +6, 5, 4, 5, 2, 2, 1, 2, 256, 240, 240, 209,,,, +6, 5, 4, 5, 2, 2, 1, 2, 384, 240, 240, 209,,,, +6, 5, 4, 5, 2, 2, 2, 1, 128, 240, 240, 209,,,, +6, 5, 4, 5, 2, 2, 2, 1, 256, 240, 240, 209,,,, +6, 5, 4, 5, 2, 2, 2, 1, 384, 240, 240, 209,,,, +6, 5, 4, 5, 2, 2, 2, 2, 128, 240, 240, 209,,,, +6, 5, 4, 5, 2, 2, 2, 2, 256, 240, 240, 209,,,, +6, 5, 4, 5, 2, 2, 2, 2, 384, 240, 240, 209,,,, +6, 5, 4, 5, 3, 3, 1, 1, 128, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 1, 1, 256, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 1, 1, 384, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 1, 2, 128, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 1, 2, 256, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 1, 2, 384, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 1, 3, 128, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 1, 3, 256, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 1, 3, 384, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 2, 1, 128, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 2, 1, 256, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 2, 1, 384, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 2, 2, 128, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 2, 2, 256, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 2, 2, 384, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 2, 3, 128, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 2, 3, 256, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 2, 3, 384, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 3, 1, 128, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 3, 1, 256, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 3, 1, 384, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 3, 2, 128, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 3, 2, 256, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 3, 2, 384, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 3, 3, 128, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 3, 3, 256, 240, 240, 204,,,, +6, 5, 4, 5, 3, 3, 3, 3, 384, 240, 240, 204,,,, +6, 5, 5, 1, 1, 1, 1, 1, 128, 68, 68, 46,,,, +6, 5, 5, 1, 1, 1, 1, 1, 256, 68, 68, 46,,,, +6, 5, 5, 1, 1, 1, 1, 1, 384, 68, 68, 46,,,, +6, 5, 5, 1, 2, 2, 1, 1, 128, 68, 64, 45,,,, +6, 5, 5, 1, 2, 2, 1, 1, 256, 68, 64, 45,,,, +6, 5, 5, 1, 2, 2, 1, 1, 384, 68, 64, 45,,,, +6, 5, 5, 1, 2, 2, 1, 2, 128, 68, 64, 45,,,, +6, 5, 5, 1, 2, 2, 1, 2, 256, 68, 64, 45,,,, +6, 5, 5, 1, 2, 2, 1, 2, 384, 68, 64, 45,,,, +6, 5, 5, 1, 2, 2, 2, 1, 128, 68, 64, 45,,,, +6, 5, 5, 1, 2, 2, 2, 1, 256, 68, 64, 45,,,, +6, 5, 5, 1, 2, 2, 2, 1, 384, 68, 64, 45,,,, +6, 5, 5, 1, 2, 2, 2, 2, 128, 68, 64, 45,,,, +6, 5, 5, 1, 2, 2, 2, 2, 256, 68, 64, 45,,,, +6, 5, 5, 1, 2, 2, 2, 2, 384, 68, 64, 45,,,, +6, 5, 5, 1, 3, 3, 1, 1, 128, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 1, 1, 256, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 1, 1, 384, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 1, 2, 128, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 1, 2, 256, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 1, 2, 384, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 1, 3, 128, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 1, 3, 256, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 1, 3, 384, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 2, 1, 128, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 2, 1, 256, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 2, 1, 384, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 2, 2, 128, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 2, 2, 256, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 2, 2, 384, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 2, 3, 128, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 2, 3, 256, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 2, 3, 384, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 3, 1, 128, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 3, 1, 256, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 3, 1, 384, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 3, 2, 128, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 3, 2, 256, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 3, 2, 384, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 3, 3, 128, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 3, 3, 256, 68, 61, 44,,,, +6, 5, 5, 1, 3, 3, 3, 3, 384, 68, 61, 44,,,, +6, 5, 5, 2, 1, 1, 1, 1, 128, 132, 132, 88,,,, +6, 5, 5, 2, 1, 1, 1, 1, 256, 132, 132, 88,,,, +6, 5, 5, 2, 1, 1, 1, 1, 384, 132, 132, 88,,,, +6, 5, 5, 2, 2, 2, 1, 1, 128, 132, 124, 86,,,, +6, 5, 5, 2, 2, 2, 1, 1, 256, 132, 124, 86,,,, +6, 5, 5, 2, 2, 2, 1, 1, 384, 132, 124, 86,,,, +6, 5, 5, 2, 2, 2, 1, 2, 128, 132, 124, 86,,,, +6, 5, 5, 2, 2, 2, 1, 2, 256, 132, 124, 86,,,, +6, 5, 5, 2, 2, 2, 1, 2, 384, 132, 124, 86,,,, +6, 5, 5, 2, 2, 2, 2, 1, 128, 132, 124, 86,,,, +6, 5, 5, 2, 2, 2, 2, 1, 256, 132, 124, 86,,,, +6, 5, 5, 2, 2, 2, 2, 1, 384, 132, 124, 86,,,, +6, 5, 5, 2, 2, 2, 2, 2, 128, 132, 124, 86,,,, +6, 5, 5, 2, 2, 2, 2, 2, 256, 132, 124, 86,,,, +6, 5, 5, 2, 2, 2, 2, 2, 384, 132, 124, 86,,,, +6, 5, 5, 2, 3, 3, 1, 1, 128, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 1, 1, 256, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 1, 1, 384, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 1, 2, 128, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 1, 2, 256, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 1, 2, 384, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 1, 3, 128, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 1, 3, 256, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 1, 3, 384, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 2, 1, 128, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 2, 1, 256, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 2, 1, 384, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 2, 2, 128, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 2, 2, 256, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 2, 2, 384, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 2, 3, 128, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 2, 3, 256, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 2, 3, 384, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 3, 1, 128, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 3, 1, 256, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 3, 1, 384, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 3, 2, 128, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 3, 2, 256, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 3, 2, 384, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 3, 3, 128, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 3, 3, 256, 132, 118, 84,,,, +6, 5, 5, 2, 3, 3, 3, 3, 384, 132, 118, 84,,,, +6, 5, 5, 3, 1, 1, 1, 1, 128, 196, 196, 130,,,, +6, 5, 5, 3, 1, 1, 1, 1, 256, 196, 196, 130,,,, +6, 5, 5, 3, 1, 1, 1, 1, 384, 196, 196, 130,,,, +6, 5, 5, 3, 2, 2, 1, 1, 128, 196, 184, 127,,,, +6, 5, 5, 3, 2, 2, 1, 1, 256, 196, 184, 127,,,, +6, 5, 5, 3, 2, 2, 1, 1, 384, 196, 184, 127,,,, +6, 5, 5, 3, 2, 2, 1, 2, 128, 196, 184, 127,,,, +6, 5, 5, 3, 2, 2, 1, 2, 256, 196, 184, 127,,,, +6, 5, 5, 3, 2, 2, 1, 2, 384, 196, 184, 127,,,, +6, 5, 5, 3, 2, 2, 2, 1, 128, 196, 184, 127,,,, +6, 5, 5, 3, 2, 2, 2, 1, 256, 196, 184, 127,,,, +6, 5, 5, 3, 2, 2, 2, 1, 384, 196, 184, 127,,,, +6, 5, 5, 3, 2, 2, 2, 2, 128, 196, 184, 127,,,, +6, 5, 5, 3, 2, 2, 2, 2, 256, 196, 184, 127,,,, +6, 5, 5, 3, 2, 2, 2, 2, 384, 196, 184, 127,,,, +6, 5, 5, 3, 3, 3, 1, 1, 128, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 1, 1, 256, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 1, 1, 384, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 1, 2, 128, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 1, 2, 256, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 1, 2, 384, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 1, 3, 128, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 1, 3, 256, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 1, 3, 384, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 2, 1, 128, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 2, 1, 256, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 2, 1, 384, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 2, 2, 128, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 2, 2, 256, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 2, 2, 384, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 2, 3, 128, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 2, 3, 256, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 2, 3, 384, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 3, 1, 128, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 3, 1, 256, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 3, 1, 384, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 3, 2, 128, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 3, 2, 256, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 3, 2, 384, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 3, 3, 128, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 3, 3, 256, 196, 175, 124,,,, +6, 5, 5, 3, 3, 3, 3, 3, 384, 196, 175, 124,,,, +6, 5, 5, 4, 1, 1, 1, 1, 128, 240, 240, 172,,,, +6, 5, 5, 4, 1, 1, 1, 1, 256, 240, 240, 172,,,, +6, 5, 5, 4, 1, 1, 1, 1, 384, 240, 240, 172,,,, +6, 5, 5, 4, 2, 2, 1, 1, 128, 240, 240, 168,,,, +6, 5, 5, 4, 2, 2, 1, 1, 256, 240, 240, 168,,,, +6, 5, 5, 4, 2, 2, 1, 1, 384, 240, 240, 168,,,, +6, 5, 5, 4, 2, 2, 1, 2, 128, 240, 240, 168,,,, +6, 5, 5, 4, 2, 2, 1, 2, 256, 240, 240, 168,,,, +6, 5, 5, 4, 2, 2, 1, 2, 384, 240, 240, 168,,,, +6, 5, 5, 4, 2, 2, 2, 1, 128, 240, 240, 168,,,, +6, 5, 5, 4, 2, 2, 2, 1, 256, 240, 240, 168,,,, +6, 5, 5, 4, 2, 2, 2, 1, 384, 240, 240, 168,,,, +6, 5, 5, 4, 2, 2, 2, 2, 128, 240, 240, 168,,,, +6, 5, 5, 4, 2, 2, 2, 2, 256, 240, 240, 168,,,, +6, 5, 5, 4, 2, 2, 2, 2, 384, 240, 240, 168,,,, +6, 5, 5, 4, 3, 3, 1, 1, 128, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 1, 1, 256, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 1, 1, 384, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 1, 2, 128, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 1, 2, 256, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 1, 2, 384, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 1, 3, 128, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 1, 3, 256, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 1, 3, 384, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 2, 1, 128, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 2, 1, 256, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 2, 1, 384, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 2, 2, 128, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 2, 2, 256, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 2, 2, 384, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 2, 3, 128, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 2, 3, 256, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 2, 3, 384, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 3, 1, 128, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 3, 1, 256, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 3, 1, 384, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 3, 2, 128, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 3, 2, 256, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 3, 2, 384, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 3, 3, 128, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 3, 3, 256, 240, 232, 164,,,, +6, 5, 5, 4, 3, 3, 3, 3, 384, 240, 232, 164,,,, +6, 5, 5, 5, 1, 1, 1, 1, 128, 240, 240, 214,,,, +6, 5, 5, 5, 1, 1, 1, 1, 256, 240, 240, 214,,,, +6, 5, 5, 5, 1, 1, 1, 1, 384, 240, 240, 214,,,, +6, 5, 5, 5, 2, 2, 1, 1, 128, 240, 240, 209,,,, +6, 5, 5, 5, 2, 2, 1, 1, 256, 240, 240, 209,,,, +6, 5, 5, 5, 2, 2, 1, 1, 384, 240, 240, 209,,,, +6, 5, 5, 5, 2, 2, 1, 2, 128, 240, 240, 209,,,, +6, 5, 5, 5, 2, 2, 1, 2, 256, 240, 240, 209,,,, +6, 5, 5, 5, 2, 2, 1, 2, 384, 240, 240, 209,,,, +6, 5, 5, 5, 2, 2, 2, 1, 128, 240, 240, 209,,,, +6, 5, 5, 5, 2, 2, 2, 1, 256, 240, 240, 209,,,, +6, 5, 5, 5, 2, 2, 2, 1, 384, 240, 240, 209,,,, +6, 5, 5, 5, 2, 2, 2, 2, 128, 240, 240, 209,,,, +6, 5, 5, 5, 2, 2, 2, 2, 256, 240, 240, 209,,,, +6, 5, 5, 5, 2, 2, 2, 2, 384, 240, 240, 209,,,, +6, 5, 5, 5, 3, 3, 1, 1, 128, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 1, 1, 256, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 1, 1, 384, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 1, 2, 128, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 1, 2, 256, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 1, 2, 384, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 1, 3, 128, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 1, 3, 256, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 1, 3, 384, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 2, 1, 128, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 2, 1, 256, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 2, 1, 384, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 2, 2, 128, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 2, 2, 256, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 2, 2, 384, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 2, 3, 128, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 2, 3, 256, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 2, 3, 384, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 3, 1, 128, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 3, 1, 256, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 3, 1, 384, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 3, 2, 128, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 3, 2, 256, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 3, 2, 384, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 3, 3, 128, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 3, 3, 256, 240, 240, 204,,,, +6, 5, 5, 5, 3, 3, 3, 3, 384, 240, 240, 204,,,, +6, 5, 6, 1, 1, 1, 1, 1, 128, 68, 68, 46,,,, +6, 5, 6, 1, 1, 1, 1, 1, 256, 68, 68, 46,,,, +6, 5, 6, 1, 1, 1, 1, 1, 384, 68, 68, 46,,,, +6, 5, 6, 1, 2, 2, 1, 1, 128, 68, 64, 45,,,, +6, 5, 6, 1, 2, 2, 1, 1, 256, 68, 64, 45,,,, +6, 5, 6, 1, 2, 2, 1, 1, 384, 68, 64, 45,,,, +6, 5, 6, 1, 2, 2, 1, 2, 128, 68, 64, 45,,,, +6, 5, 6, 1, 2, 2, 1, 2, 256, 68, 64, 45,,,, +6, 5, 6, 1, 2, 2, 1, 2, 384, 68, 64, 45,,,, +6, 5, 6, 1, 2, 2, 2, 1, 128, 68, 64, 45,,,, +6, 5, 6, 1, 2, 2, 2, 1, 256, 68, 64, 45,,,, +6, 5, 6, 1, 2, 2, 2, 1, 384, 68, 64, 45,,,, +6, 5, 6, 1, 2, 2, 2, 2, 128, 68, 64, 45,,,, +6, 5, 6, 1, 2, 2, 2, 2, 256, 68, 64, 45,,,, +6, 5, 6, 1, 2, 2, 2, 2, 384, 68, 64, 45,,,, +6, 5, 6, 1, 3, 3, 1, 1, 128, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 1, 1, 256, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 1, 1, 384, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 1, 2, 128, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 1, 2, 256, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 1, 2, 384, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 1, 3, 128, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 1, 3, 256, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 1, 3, 384, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 2, 1, 128, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 2, 1, 256, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 2, 1, 384, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 2, 2, 128, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 2, 2, 256, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 2, 2, 384, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 2, 3, 128, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 2, 3, 256, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 2, 3, 384, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 3, 1, 128, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 3, 1, 256, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 3, 1, 384, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 3, 2, 128, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 3, 2, 256, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 3, 2, 384, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 3, 3, 128, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 3, 3, 256, 68, 61, 44,,,, +6, 5, 6, 1, 3, 3, 3, 3, 384, 68, 61, 44,,,, +6, 5, 6, 2, 1, 1, 1, 1, 128, 132, 132, 88,,,, +6, 5, 6, 2, 1, 1, 1, 1, 256, 132, 132, 88,,,, +6, 5, 6, 2, 1, 1, 1, 1, 384, 132, 132, 88,,,, +6, 5, 6, 2, 2, 2, 1, 1, 128, 132, 124, 86,,,, +6, 5, 6, 2, 2, 2, 1, 1, 256, 132, 124, 86,,,, +6, 5, 6, 2, 2, 2, 1, 1, 384, 132, 124, 86,,,, +6, 5, 6, 2, 2, 2, 1, 2, 128, 132, 124, 86,,,, +6, 5, 6, 2, 2, 2, 1, 2, 256, 132, 124, 86,,,, +6, 5, 6, 2, 2, 2, 1, 2, 384, 132, 124, 86,,,, +6, 5, 6, 2, 2, 2, 2, 1, 128, 132, 124, 86,,,, +6, 5, 6, 2, 2, 2, 2, 1, 256, 132, 124, 86,,,, +6, 5, 6, 2, 2, 2, 2, 1, 384, 132, 124, 86,,,, +6, 5, 6, 2, 2, 2, 2, 2, 128, 132, 124, 86,,,, +6, 5, 6, 2, 2, 2, 2, 2, 256, 132, 124, 86,,,, +6, 5, 6, 2, 2, 2, 2, 2, 384, 132, 124, 86,,,, +6, 5, 6, 2, 3, 3, 1, 1, 128, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 1, 1, 256, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 1, 1, 384, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 1, 2, 128, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 1, 2, 256, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 1, 2, 384, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 1, 3, 128, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 1, 3, 256, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 1, 3, 384, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 2, 1, 128, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 2, 1, 256, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 2, 1, 384, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 2, 2, 128, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 2, 2, 256, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 2, 2, 384, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 2, 3, 128, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 2, 3, 256, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 2, 3, 384, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 3, 1, 128, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 3, 1, 256, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 3, 1, 384, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 3, 2, 128, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 3, 2, 256, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 3, 2, 384, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 3, 3, 128, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 3, 3, 256, 132, 118, 84,,,, +6, 5, 6, 2, 3, 3, 3, 3, 384, 132, 118, 84,,,, +6, 5, 6, 3, 1, 1, 1, 1, 128, 196, 196, 130,,,, +6, 5, 6, 3, 1, 1, 1, 1, 256, 196, 196, 130,,,, +6, 5, 6, 3, 1, 1, 1, 1, 384, 196, 196, 130,,,, +6, 5, 6, 3, 2, 2, 1, 1, 128, 196, 184, 127,,,, +6, 5, 6, 3, 2, 2, 1, 1, 256, 196, 184, 127,,,, +6, 5, 6, 3, 2, 2, 1, 1, 384, 196, 184, 127,,,, +6, 5, 6, 3, 2, 2, 1, 2, 128, 196, 184, 127,,,, +6, 5, 6, 3, 2, 2, 1, 2, 256, 196, 184, 127,,,, +6, 5, 6, 3, 2, 2, 1, 2, 384, 196, 184, 127,,,, +6, 5, 6, 3, 2, 2, 2, 1, 128, 196, 184, 127,,,, +6, 5, 6, 3, 2, 2, 2, 1, 256, 196, 184, 127,,,, +6, 5, 6, 3, 2, 2, 2, 1, 384, 196, 184, 127,,,, +6, 5, 6, 3, 2, 2, 2, 2, 128, 196, 184, 127,,,, +6, 5, 6, 3, 2, 2, 2, 2, 256, 196, 184, 127,,,, +6, 5, 6, 3, 2, 2, 2, 2, 384, 196, 184, 127,,,, +6, 5, 6, 3, 3, 3, 1, 1, 128, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 1, 1, 256, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 1, 1, 384, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 1, 2, 128, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 1, 2, 256, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 1, 2, 384, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 1, 3, 128, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 1, 3, 256, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 1, 3, 384, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 2, 1, 128, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 2, 1, 256, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 2, 1, 384, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 2, 2, 128, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 2, 2, 256, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 2, 2, 384, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 2, 3, 128, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 2, 3, 256, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 2, 3, 384, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 3, 1, 128, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 3, 1, 256, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 3, 1, 384, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 3, 2, 128, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 3, 2, 256, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 3, 2, 384, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 3, 3, 128, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 3, 3, 256, 196, 175, 124,,,, +6, 5, 6, 3, 3, 3, 3, 3, 384, 196, 175, 124,,,, +6, 5, 6, 4, 1, 1, 1, 1, 128, 240, 240, 172,,,, +6, 5, 6, 4, 1, 1, 1, 1, 256, 240, 240, 172,,,, +6, 5, 6, 4, 1, 1, 1, 1, 384, 240, 240, 172,,,, +6, 5, 6, 4, 2, 2, 1, 1, 128, 240, 240, 168,,,, +6, 5, 6, 4, 2, 2, 1, 1, 256, 240, 240, 168,,,, +6, 5, 6, 4, 2, 2, 1, 1, 384, 240, 240, 168,,,, +6, 5, 6, 4, 2, 2, 1, 2, 128, 240, 240, 168,,,, +6, 5, 6, 4, 2, 2, 1, 2, 256, 240, 240, 168,,,, +6, 5, 6, 4, 2, 2, 1, 2, 384, 240, 240, 168,,,, +6, 5, 6, 4, 2, 2, 2, 1, 128, 240, 240, 168,,,, +6, 5, 6, 4, 2, 2, 2, 1, 256, 240, 240, 168,,,, +6, 5, 6, 4, 2, 2, 2, 1, 384, 240, 240, 168,,,, +6, 5, 6, 4, 2, 2, 2, 2, 128, 240, 240, 168,,,, +6, 5, 6, 4, 2, 2, 2, 2, 256, 240, 240, 168,,,, +6, 5, 6, 4, 2, 2, 2, 2, 384, 240, 240, 168,,,, +6, 5, 6, 4, 3, 3, 1, 1, 128, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 1, 1, 256, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 1, 1, 384, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 1, 2, 128, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 1, 2, 256, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 1, 2, 384, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 1, 3, 128, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 1, 3, 256, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 1, 3, 384, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 2, 1, 128, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 2, 1, 256, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 2, 1, 384, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 2, 2, 128, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 2, 2, 256, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 2, 2, 384, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 2, 3, 128, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 2, 3, 256, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 2, 3, 384, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 3, 1, 128, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 3, 1, 256, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 3, 1, 384, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 3, 2, 128, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 3, 2, 256, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 3, 2, 384, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 3, 3, 128, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 3, 3, 256, 240, 232, 164,,,, +6, 5, 6, 4, 3, 3, 3, 3, 384, 240, 232, 164,,,, +6, 5, 6, 5, 1, 1, 1, 1, 128, 240, 240, 214,,,, +6, 5, 6, 5, 1, 1, 1, 1, 256, 240, 240, 214,,,, +6, 5, 6, 5, 1, 1, 1, 1, 384, 240, 240, 214,,,, +6, 5, 6, 5, 2, 2, 1, 1, 128, 240, 240, 209,,,, +6, 5, 6, 5, 2, 2, 1, 1, 256, 240, 240, 209,,,, +6, 5, 6, 5, 2, 2, 1, 1, 384, 240, 240, 209,,,, +6, 5, 6, 5, 2, 2, 1, 2, 128, 240, 240, 209,,,, +6, 5, 6, 5, 2, 2, 1, 2, 256, 240, 240, 209,,,, +6, 5, 6, 5, 2, 2, 1, 2, 384, 240, 240, 209,,,, +6, 5, 6, 5, 2, 2, 2, 1, 128, 240, 240, 209,,,, +6, 5, 6, 5, 2, 2, 2, 1, 256, 240, 240, 209,,,, +6, 5, 6, 5, 2, 2, 2, 1, 384, 240, 240, 209,,,, +6, 5, 6, 5, 2, 2, 2, 2, 128, 240, 240, 209,,,, +6, 5, 6, 5, 2, 2, 2, 2, 256, 240, 240, 209,,,, +6, 5, 6, 5, 2, 2, 2, 2, 384, 240, 240, 209,,,, +6, 5, 6, 5, 3, 3, 1, 1, 128, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 1, 1, 256, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 1, 1, 384, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 1, 2, 128, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 1, 2, 256, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 1, 2, 384, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 1, 3, 128, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 1, 3, 256, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 1, 3, 384, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 2, 1, 128, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 2, 1, 256, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 2, 1, 384, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 2, 2, 128, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 2, 2, 256, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 2, 2, 384, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 2, 3, 128, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 2, 3, 256, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 2, 3, 384, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 3, 1, 128, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 3, 1, 256, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 3, 1, 384, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 3, 2, 128, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 3, 2, 256, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 3, 2, 384, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 3, 3, 128, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 3, 3, 256, 240, 240, 204,,,, +6, 5, 6, 5, 3, 3, 3, 3, 384, 240, 240, 204,,,, +6, 6, 1, 1, 1, 1, 1, 1, 128, 69, 47, 26,,,, +6, 6, 1, 1, 1, 1, 1, 1, 256, 69, 47, 26,,,, +6, 6, 1, 1, 1, 1, 1, 1, 384, 69, 47, 26,,,, +6, 6, 1, 1, 2, 2, 1, 1, 128, 69, 47, 26,,,, +6, 6, 1, 1, 2, 2, 1, 1, 256, 69, 47, 26,,,, +6, 6, 1, 1, 2, 2, 1, 1, 384, 69, 47, 26,,,, +6, 6, 1, 1, 2, 2, 1, 2, 128, 69, 47, 26,,,, +6, 6, 1, 1, 2, 2, 1, 2, 256, 69, 47, 26,,,, +6, 6, 1, 1, 2, 2, 1, 2, 384, 69, 47, 26,,,, +6, 6, 1, 1, 2, 2, 2, 1, 128, 69, 47, 26,,,, +6, 6, 1, 1, 2, 2, 2, 1, 256, 69, 47, 26,,,, +6, 6, 1, 1, 2, 2, 2, 1, 384, 69, 47, 26,,,, +6, 6, 1, 1, 2, 2, 2, 2, 128, 69, 47, 26,,,, +6, 6, 1, 1, 2, 2, 2, 2, 256, 69, 47, 26,,,, +6, 6, 1, 1, 2, 2, 2, 2, 384, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 1, 1, 128, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 1, 1, 256, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 1, 1, 384, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 1, 2, 128, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 1, 2, 256, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 1, 2, 384, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 1, 3, 128, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 1, 3, 256, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 1, 3, 384, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 2, 1, 128, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 2, 1, 256, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 2, 1, 384, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 2, 2, 128, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 2, 2, 256, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 2, 2, 384, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 2, 3, 128, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 2, 3, 256, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 2, 3, 384, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 3, 1, 128, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 3, 1, 256, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 3, 1, 384, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 3, 2, 128, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 3, 2, 256, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 3, 2, 384, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 3, 3, 128, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 3, 3, 256, 69, 47, 26,,,, +6, 6, 1, 1, 3, 3, 3, 3, 384, 69, 47, 26,,,, +6, 6, 1, 2, 1, 1, 1, 1, 128, 133, 89, 47,,,, +6, 6, 1, 2, 1, 1, 1, 1, 256, 133, 89, 47,,,, +6, 6, 1, 2, 1, 1, 1, 1, 384, 133, 89, 47,,,, +6, 6, 1, 2, 2, 2, 1, 1, 128, 133, 89, 47,,,, +6, 6, 1, 2, 2, 2, 1, 1, 256, 133, 89, 47,,,, +6, 6, 1, 2, 2, 2, 1, 1, 384, 133, 89, 47,,,, +6, 6, 1, 2, 2, 2, 1, 2, 128, 133, 89, 47,,,, +6, 6, 1, 2, 2, 2, 1, 2, 256, 133, 89, 47,,,, +6, 6, 1, 2, 2, 2, 1, 2, 384, 133, 89, 47,,,, +6, 6, 1, 2, 2, 2, 2, 1, 128, 133, 89, 47,,,, +6, 6, 1, 2, 2, 2, 2, 1, 256, 133, 89, 47,,,, +6, 6, 1, 2, 2, 2, 2, 1, 384, 133, 89, 47,,,, +6, 6, 1, 2, 2, 2, 2, 2, 128, 133, 89, 47,,,, +6, 6, 1, 2, 2, 2, 2, 2, 256, 133, 89, 47,,,, +6, 6, 1, 2, 2, 2, 2, 2, 384, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 1, 1, 128, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 1, 1, 256, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 1, 1, 384, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 1, 2, 128, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 1, 2, 256, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 1, 2, 384, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 1, 3, 128, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 1, 3, 256, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 1, 3, 384, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 2, 1, 128, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 2, 1, 256, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 2, 1, 384, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 2, 2, 128, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 2, 2, 256, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 2, 2, 384, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 2, 3, 128, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 2, 3, 256, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 2, 3, 384, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 3, 1, 128, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 3, 1, 256, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 3, 1, 384, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 3, 2, 128, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 3, 2, 256, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 3, 2, 384, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 3, 3, 128, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 3, 3, 256, 133, 89, 47,,,, +6, 6, 1, 2, 3, 3, 3, 3, 384, 133, 89, 47,,,, +6, 6, 1, 3, 1, 1, 1, 1, 128, 197, 131, 68,,,, +6, 6, 1, 3, 1, 1, 1, 1, 256, 197, 131, 68,,,, +6, 6, 1, 3, 1, 1, 1, 1, 384, 197, 131, 68,,,, +6, 6, 1, 3, 2, 2, 1, 1, 128, 197, 131, 68,,,, +6, 6, 1, 3, 2, 2, 1, 1, 256, 197, 131, 68,,,, +6, 6, 1, 3, 2, 2, 1, 1, 384, 197, 131, 68,,,, +6, 6, 1, 3, 2, 2, 1, 2, 128, 197, 131, 68,,,, +6, 6, 1, 3, 2, 2, 1, 2, 256, 197, 131, 68,,,, +6, 6, 1, 3, 2, 2, 1, 2, 384, 197, 131, 68,,,, +6, 6, 1, 3, 2, 2, 2, 1, 128, 197, 131, 68,,,, +6, 6, 1, 3, 2, 2, 2, 1, 256, 197, 131, 68,,,, +6, 6, 1, 3, 2, 2, 2, 1, 384, 197, 131, 68,,,, +6, 6, 1, 3, 2, 2, 2, 2, 128, 197, 131, 68,,,, +6, 6, 1, 3, 2, 2, 2, 2, 256, 197, 131, 68,,,, +6, 6, 1, 3, 2, 2, 2, 2, 384, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 1, 1, 128, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 1, 1, 256, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 1, 1, 384, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 1, 2, 128, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 1, 2, 256, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 1, 2, 384, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 1, 3, 128, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 1, 3, 256, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 1, 3, 384, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 2, 1, 128, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 2, 1, 256, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 2, 1, 384, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 2, 2, 128, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 2, 2, 256, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 2, 2, 384, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 2, 3, 128, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 2, 3, 256, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 2, 3, 384, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 3, 1, 128, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 3, 1, 256, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 3, 1, 384, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 3, 2, 128, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 3, 2, 256, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 3, 2, 384, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 3, 3, 128, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 3, 3, 256, 197, 131, 68,,,, +6, 6, 1, 3, 3, 3, 3, 3, 384, 197, 131, 68,,,, +6, 6, 1, 4, 1, 1, 1, 1, 128, 240, 173, 89,,,, +6, 6, 1, 4, 1, 1, 1, 1, 256, 240, 173, 89,,,, +6, 6, 1, 4, 1, 1, 1, 1, 384, 240, 173, 89,,,, +6, 6, 1, 4, 2, 2, 1, 1, 128, 240, 173, 89,,,, +6, 6, 1, 4, 2, 2, 1, 1, 256, 240, 173, 89,,,, +6, 6, 1, 4, 2, 2, 1, 1, 384, 240, 173, 89,,,, +6, 6, 1, 4, 2, 2, 1, 2, 128, 240, 173, 89,,,, +6, 6, 1, 4, 2, 2, 1, 2, 256, 240, 173, 89,,,, +6, 6, 1, 4, 2, 2, 1, 2, 384, 240, 173, 89,,,, +6, 6, 1, 4, 2, 2, 2, 1, 128, 240, 173, 89,,,, +6, 6, 1, 4, 2, 2, 2, 1, 256, 240, 173, 89,,,, +6, 6, 1, 4, 2, 2, 2, 1, 384, 240, 173, 89,,,, +6, 6, 1, 4, 2, 2, 2, 2, 128, 240, 173, 89,,,, +6, 6, 1, 4, 2, 2, 2, 2, 256, 240, 173, 89,,,, +6, 6, 1, 4, 2, 2, 2, 2, 384, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 1, 1, 128, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 1, 1, 256, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 1, 1, 384, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 1, 2, 128, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 1, 2, 256, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 1, 2, 384, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 1, 3, 128, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 1, 3, 256, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 1, 3, 384, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 2, 1, 128, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 2, 1, 256, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 2, 1, 384, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 2, 2, 128, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 2, 2, 256, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 2, 2, 384, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 2, 3, 128, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 2, 3, 256, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 2, 3, 384, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 3, 1, 128, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 3, 1, 256, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 3, 1, 384, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 3, 2, 128, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 3, 2, 256, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 3, 2, 384, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 3, 3, 128, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 3, 3, 256, 240, 173, 89,,,, +6, 6, 1, 4, 3, 3, 3, 3, 384, 240, 173, 89,,,, +6, 6, 1, 5, 1, 1, 1, 1, 128, 240, 215, 110,,,, +6, 6, 1, 5, 1, 1, 1, 1, 256, 240, 215, 110,,,, +6, 6, 1, 5, 1, 1, 1, 1, 384, 240, 215, 110,,,, +6, 6, 1, 5, 2, 2, 1, 1, 128, 240, 215, 110,,,, +6, 6, 1, 5, 2, 2, 1, 1, 256, 240, 215, 110,,,, +6, 6, 1, 5, 2, 2, 1, 1, 384, 240, 215, 110,,,, +6, 6, 1, 5, 2, 2, 1, 2, 128, 240, 215, 110,,,, +6, 6, 1, 5, 2, 2, 1, 2, 256, 240, 215, 110,,,, +6, 6, 1, 5, 2, 2, 1, 2, 384, 240, 215, 110,,,, +6, 6, 1, 5, 2, 2, 2, 1, 128, 240, 215, 110,,,, +6, 6, 1, 5, 2, 2, 2, 1, 256, 240, 215, 110,,,, +6, 6, 1, 5, 2, 2, 2, 1, 384, 240, 215, 110,,,, +6, 6, 1, 5, 2, 2, 2, 2, 128, 240, 215, 110,,,, +6, 6, 1, 5, 2, 2, 2, 2, 256, 240, 215, 110,,,, +6, 6, 1, 5, 2, 2, 2, 2, 384, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 1, 1, 128, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 1, 1, 256, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 1, 1, 384, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 1, 2, 128, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 1, 2, 256, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 1, 2, 384, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 1, 3, 128, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 1, 3, 256, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 1, 3, 384, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 2, 1, 128, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 2, 1, 256, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 2, 1, 384, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 2, 2, 128, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 2, 2, 256, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 2, 2, 384, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 2, 3, 128, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 2, 3, 256, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 2, 3, 384, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 3, 1, 128, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 3, 1, 256, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 3, 1, 384, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 3, 2, 128, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 3, 2, 256, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 3, 2, 384, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 3, 3, 128, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 3, 3, 256, 240, 215, 110,,,, +6, 6, 1, 5, 3, 3, 3, 3, 384, 240, 215, 110,,,, +6, 6, 1, 6, 1, 1, 1, 1, 128, 240, 240, 131,,,, +6, 6, 1, 6, 1, 1, 1, 1, 256, 240, 240, 131,,,, +6, 6, 1, 6, 1, 1, 1, 1, 384, 240, 240, 131,,,, +6, 6, 1, 6, 2, 2, 1, 1, 128, 240, 240, 131,,,, +6, 6, 1, 6, 2, 2, 1, 1, 256, 240, 240, 131,,,, +6, 6, 1, 6, 2, 2, 1, 1, 384, 240, 240, 131,,,, +6, 6, 1, 6, 2, 2, 1, 2, 128, 240, 240, 131,,,, +6, 6, 1, 6, 2, 2, 1, 2, 256, 240, 240, 131,,,, +6, 6, 1, 6, 2, 2, 1, 2, 384, 240, 240, 131,,,, +6, 6, 1, 6, 2, 2, 2, 1, 128, 240, 240, 131,,,, +6, 6, 1, 6, 2, 2, 2, 1, 256, 240, 240, 131,,,, +6, 6, 1, 6, 2, 2, 2, 1, 384, 240, 240, 131,,,, +6, 6, 1, 6, 2, 2, 2, 2, 128, 240, 240, 131,,,, +6, 6, 1, 6, 2, 2, 2, 2, 256, 240, 240, 131,,,, +6, 6, 1, 6, 2, 2, 2, 2, 384, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 1, 1, 128, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 1, 1, 256, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 1, 1, 384, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 1, 2, 128, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 1, 2, 256, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 1, 2, 384, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 1, 3, 128, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 1, 3, 256, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 1, 3, 384, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 2, 1, 128, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 2, 1, 256, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 2, 1, 384, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 2, 2, 128, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 2, 2, 256, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 2, 2, 384, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 2, 3, 128, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 2, 3, 256, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 2, 3, 384, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 3, 1, 128, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 3, 1, 256, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 3, 1, 384, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 3, 2, 128, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 3, 2, 256, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 3, 2, 384, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 3, 3, 128, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 3, 3, 256, 240, 240, 131,,,, +6, 6, 1, 6, 3, 3, 3, 3, 384, 240, 240, 131,,,, +6, 6, 2, 1, 1, 1, 1, 1, 128, 69, 47, 26,,,, +6, 6, 2, 1, 1, 1, 1, 1, 256, 69, 47, 26,,,, +6, 6, 2, 1, 1, 1, 1, 1, 384, 69, 47, 26,,,, +6, 6, 2, 1, 2, 2, 1, 1, 128, 69, 47, 26,,,, +6, 6, 2, 1, 2, 2, 1, 1, 256, 69, 47, 26,,,, +6, 6, 2, 1, 2, 2, 1, 1, 384, 69, 47, 26,,,, +6, 6, 2, 1, 2, 2, 1, 2, 128, 69, 47, 26,,,, +6, 6, 2, 1, 2, 2, 1, 2, 256, 69, 47, 26,,,, +6, 6, 2, 1, 2, 2, 1, 2, 384, 69, 47, 26,,,, +6, 6, 2, 1, 2, 2, 2, 1, 128, 69, 47, 26,,,, +6, 6, 2, 1, 2, 2, 2, 1, 256, 69, 47, 26,,,, +6, 6, 2, 1, 2, 2, 2, 1, 384, 69, 47, 26,,,, +6, 6, 2, 1, 2, 2, 2, 2, 128, 69, 47, 26,,,, +6, 6, 2, 1, 2, 2, 2, 2, 256, 69, 47, 26,,,, +6, 6, 2, 1, 2, 2, 2, 2, 384, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 1, 1, 128, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 1, 1, 256, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 1, 1, 384, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 1, 2, 128, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 1, 2, 256, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 1, 2, 384, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 1, 3, 128, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 1, 3, 256, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 1, 3, 384, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 2, 1, 128, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 2, 1, 256, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 2, 1, 384, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 2, 2, 128, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 2, 2, 256, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 2, 2, 384, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 2, 3, 128, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 2, 3, 256, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 2, 3, 384, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 3, 1, 128, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 3, 1, 256, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 3, 1, 384, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 3, 2, 128, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 3, 2, 256, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 3, 2, 384, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 3, 3, 128, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 3, 3, 256, 69, 47, 26,,,, +6, 6, 2, 1, 3, 3, 3, 3, 384, 69, 47, 26,,,, +6, 6, 2, 2, 1, 1, 1, 1, 128, 133, 89, 47,,,, +6, 6, 2, 2, 1, 1, 1, 1, 256, 133, 89, 47,,,, +6, 6, 2, 2, 1, 1, 1, 1, 384, 133, 89, 47,,,, +6, 6, 2, 2, 2, 2, 1, 1, 128, 133, 89, 47,,,, +6, 6, 2, 2, 2, 2, 1, 1, 256, 133, 89, 47,,,, +6, 6, 2, 2, 2, 2, 1, 1, 384, 133, 89, 47,,,, +6, 6, 2, 2, 2, 2, 1, 2, 128, 133, 89, 47,,,, +6, 6, 2, 2, 2, 2, 1, 2, 256, 133, 89, 47,,,, +6, 6, 2, 2, 2, 2, 1, 2, 384, 133, 89, 47,,,, +6, 6, 2, 2, 2, 2, 2, 1, 128, 133, 89, 47,,,, +6, 6, 2, 2, 2, 2, 2, 1, 256, 133, 89, 47,,,, +6, 6, 2, 2, 2, 2, 2, 1, 384, 133, 89, 47,,,, +6, 6, 2, 2, 2, 2, 2, 2, 128, 133, 89, 47,,,, +6, 6, 2, 2, 2, 2, 2, 2, 256, 133, 89, 47,,,, +6, 6, 2, 2, 2, 2, 2, 2, 384, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 1, 1, 128, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 1, 1, 256, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 1, 1, 384, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 1, 2, 128, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 1, 2, 256, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 1, 2, 384, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 1, 3, 128, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 1, 3, 256, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 1, 3, 384, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 2, 1, 128, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 2, 1, 256, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 2, 1, 384, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 2, 2, 128, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 2, 2, 256, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 2, 2, 384, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 2, 3, 128, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 2, 3, 256, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 2, 3, 384, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 3, 1, 128, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 3, 1, 256, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 3, 1, 384, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 3, 2, 128, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 3, 2, 256, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 3, 2, 384, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 3, 3, 128, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 3, 3, 256, 133, 89, 47,,,, +6, 6, 2, 2, 3, 3, 3, 3, 384, 133, 89, 47,,,, +6, 6, 2, 3, 1, 1, 1, 1, 128, 197, 131, 68,,,, +6, 6, 2, 3, 1, 1, 1, 1, 256, 197, 131, 68,,,, +6, 6, 2, 3, 1, 1, 1, 1, 384, 197, 131, 68,,,, +6, 6, 2, 3, 2, 2, 1, 1, 128, 197, 131, 68,,,, +6, 6, 2, 3, 2, 2, 1, 1, 256, 197, 131, 68,,,, +6, 6, 2, 3, 2, 2, 1, 1, 384, 197, 131, 68,,,, +6, 6, 2, 3, 2, 2, 1, 2, 128, 197, 131, 68,,,, +6, 6, 2, 3, 2, 2, 1, 2, 256, 197, 131, 68,,,, +6, 6, 2, 3, 2, 2, 1, 2, 384, 197, 131, 68,,,, +6, 6, 2, 3, 2, 2, 2, 1, 128, 197, 131, 68,,,, +6, 6, 2, 3, 2, 2, 2, 1, 256, 197, 131, 68,,,, +6, 6, 2, 3, 2, 2, 2, 1, 384, 197, 131, 68,,,, +6, 6, 2, 3, 2, 2, 2, 2, 128, 197, 131, 68,,,, +6, 6, 2, 3, 2, 2, 2, 2, 256, 197, 131, 68,,,, +6, 6, 2, 3, 2, 2, 2, 2, 384, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 1, 1, 128, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 1, 1, 256, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 1, 1, 384, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 1, 2, 128, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 1, 2, 256, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 1, 2, 384, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 1, 3, 128, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 1, 3, 256, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 1, 3, 384, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 2, 1, 128, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 2, 1, 256, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 2, 1, 384, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 2, 2, 128, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 2, 2, 256, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 2, 2, 384, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 2, 3, 128, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 2, 3, 256, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 2, 3, 384, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 3, 1, 128, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 3, 1, 256, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 3, 1, 384, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 3, 2, 128, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 3, 2, 256, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 3, 2, 384, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 3, 3, 128, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 3, 3, 256, 197, 131, 68,,,, +6, 6, 2, 3, 3, 3, 3, 3, 384, 197, 131, 68,,,, +6, 6, 2, 4, 1, 1, 1, 1, 128, 240, 173, 89,,,, +6, 6, 2, 4, 1, 1, 1, 1, 256, 240, 173, 89,,,, +6, 6, 2, 4, 1, 1, 1, 1, 384, 240, 173, 89,,,, +6, 6, 2, 4, 2, 2, 1, 1, 128, 240, 173, 89,,,, +6, 6, 2, 4, 2, 2, 1, 1, 256, 240, 173, 89,,,, +6, 6, 2, 4, 2, 2, 1, 1, 384, 240, 173, 89,,,, +6, 6, 2, 4, 2, 2, 1, 2, 128, 240, 173, 89,,,, +6, 6, 2, 4, 2, 2, 1, 2, 256, 240, 173, 89,,,, +6, 6, 2, 4, 2, 2, 1, 2, 384, 240, 173, 89,,,, +6, 6, 2, 4, 2, 2, 2, 1, 128, 240, 173, 89,,,, +6, 6, 2, 4, 2, 2, 2, 1, 256, 240, 173, 89,,,, +6, 6, 2, 4, 2, 2, 2, 1, 384, 240, 173, 89,,,, +6, 6, 2, 4, 2, 2, 2, 2, 128, 240, 173, 89,,,, +6, 6, 2, 4, 2, 2, 2, 2, 256, 240, 173, 89,,,, +6, 6, 2, 4, 2, 2, 2, 2, 384, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 1, 1, 128, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 1, 1, 256, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 1, 1, 384, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 1, 2, 128, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 1, 2, 256, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 1, 2, 384, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 1, 3, 128, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 1, 3, 256, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 1, 3, 384, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 2, 1, 128, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 2, 1, 256, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 2, 1, 384, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 2, 2, 128, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 2, 2, 256, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 2, 2, 384, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 2, 3, 128, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 2, 3, 256, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 2, 3, 384, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 3, 1, 128, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 3, 1, 256, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 3, 1, 384, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 3, 2, 128, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 3, 2, 256, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 3, 2, 384, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 3, 3, 128, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 3, 3, 256, 240, 173, 89,,,, +6, 6, 2, 4, 3, 3, 3, 3, 384, 240, 173, 89,,,, +6, 6, 2, 5, 1, 1, 1, 1, 128, 240, 215, 110,,,, +6, 6, 2, 5, 1, 1, 1, 1, 256, 240, 215, 110,,,, +6, 6, 2, 5, 1, 1, 1, 1, 384, 240, 215, 110,,,, +6, 6, 2, 5, 2, 2, 1, 1, 128, 240, 215, 110,,,, +6, 6, 2, 5, 2, 2, 1, 1, 256, 240, 215, 110,,,, +6, 6, 2, 5, 2, 2, 1, 1, 384, 240, 215, 110,,,, +6, 6, 2, 5, 2, 2, 1, 2, 128, 240, 215, 110,,,, +6, 6, 2, 5, 2, 2, 1, 2, 256, 240, 215, 110,,,, +6, 6, 2, 5, 2, 2, 1, 2, 384, 240, 215, 110,,,, +6, 6, 2, 5, 2, 2, 2, 1, 128, 240, 215, 110,,,, +6, 6, 2, 5, 2, 2, 2, 1, 256, 240, 215, 110,,,, +6, 6, 2, 5, 2, 2, 2, 1, 384, 240, 215, 110,,,, +6, 6, 2, 5, 2, 2, 2, 2, 128, 240, 215, 110,,,, +6, 6, 2, 5, 2, 2, 2, 2, 256, 240, 215, 110,,,, +6, 6, 2, 5, 2, 2, 2, 2, 384, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 1, 1, 128, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 1, 1, 256, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 1, 1, 384, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 1, 2, 128, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 1, 2, 256, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 1, 2, 384, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 1, 3, 128, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 1, 3, 256, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 1, 3, 384, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 2, 1, 128, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 2, 1, 256, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 2, 1, 384, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 2, 2, 128, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 2, 2, 256, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 2, 2, 384, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 2, 3, 128, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 2, 3, 256, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 2, 3, 384, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 3, 1, 128, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 3, 1, 256, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 3, 1, 384, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 3, 2, 128, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 3, 2, 256, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 3, 2, 384, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 3, 3, 128, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 3, 3, 256, 240, 215, 110,,,, +6, 6, 2, 5, 3, 3, 3, 3, 384, 240, 215, 110,,,, +6, 6, 2, 6, 1, 1, 1, 1, 128, 240, 240, 131,,,, +6, 6, 2, 6, 1, 1, 1, 1, 256, 240, 240, 131,,,, +6, 6, 2, 6, 1, 1, 1, 1, 384, 240, 240, 131,,,, +6, 6, 2, 6, 2, 2, 1, 1, 128, 240, 240, 131,,,, +6, 6, 2, 6, 2, 2, 1, 1, 256, 240, 240, 131,,,, +6, 6, 2, 6, 2, 2, 1, 1, 384, 240, 240, 131,,,, +6, 6, 2, 6, 2, 2, 1, 2, 128, 240, 240, 131,,,, +6, 6, 2, 6, 2, 2, 1, 2, 256, 240, 240, 131,,,, +6, 6, 2, 6, 2, 2, 1, 2, 384, 240, 240, 131,,,, +6, 6, 2, 6, 2, 2, 2, 1, 128, 240, 240, 131,,,, +6, 6, 2, 6, 2, 2, 2, 1, 256, 240, 240, 131,,,, +6, 6, 2, 6, 2, 2, 2, 1, 384, 240, 240, 131,,,, +6, 6, 2, 6, 2, 2, 2, 2, 128, 240, 240, 131,,,, +6, 6, 2, 6, 2, 2, 2, 2, 256, 240, 240, 131,,,, +6, 6, 2, 6, 2, 2, 2, 2, 384, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 1, 1, 128, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 1, 1, 256, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 1, 1, 384, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 1, 2, 128, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 1, 2, 256, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 1, 2, 384, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 1, 3, 128, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 1, 3, 256, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 1, 3, 384, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 2, 1, 128, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 2, 1, 256, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 2, 1, 384, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 2, 2, 128, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 2, 2, 256, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 2, 2, 384, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 2, 3, 128, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 2, 3, 256, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 2, 3, 384, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 3, 1, 128, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 3, 1, 256, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 3, 1, 384, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 3, 2, 128, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 3, 2, 256, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 3, 2, 384, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 3, 3, 128, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 3, 3, 256, 240, 240, 131,,,, +6, 6, 2, 6, 3, 3, 3, 3, 384, 240, 240, 131,,,, +6, 6, 3, 1, 1, 1, 1, 1, 128, 69, 47, 26,,,, +6, 6, 3, 1, 1, 1, 1, 1, 256, 69, 47, 26,,,, +6, 6, 3, 1, 1, 1, 1, 1, 384, 69, 47, 26,,,, +6, 6, 3, 1, 2, 2, 1, 1, 128, 69, 47, 26,,,, +6, 6, 3, 1, 2, 2, 1, 1, 256, 69, 47, 26,,,, +6, 6, 3, 1, 2, 2, 1, 1, 384, 69, 47, 26,,,, +6, 6, 3, 1, 2, 2, 1, 2, 128, 69, 47, 26,,,, +6, 6, 3, 1, 2, 2, 1, 2, 256, 69, 47, 26,,,, +6, 6, 3, 1, 2, 2, 1, 2, 384, 69, 47, 26,,,, +6, 6, 3, 1, 2, 2, 2, 1, 128, 69, 47, 26,,,, +6, 6, 3, 1, 2, 2, 2, 1, 256, 69, 47, 26,,,, +6, 6, 3, 1, 2, 2, 2, 1, 384, 69, 47, 26,,,, +6, 6, 3, 1, 2, 2, 2, 2, 128, 69, 47, 26,,,, +6, 6, 3, 1, 2, 2, 2, 2, 256, 69, 47, 26,,,, +6, 6, 3, 1, 2, 2, 2, 2, 384, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 1, 1, 128, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 1, 1, 256, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 1, 1, 384, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 1, 2, 128, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 1, 2, 256, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 1, 2, 384, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 1, 3, 128, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 1, 3, 256, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 1, 3, 384, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 2, 1, 128, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 2, 1, 256, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 2, 1, 384, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 2, 2, 128, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 2, 2, 256, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 2, 2, 384, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 2, 3, 128, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 2, 3, 256, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 2, 3, 384, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 3, 1, 128, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 3, 1, 256, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 3, 1, 384, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 3, 2, 128, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 3, 2, 256, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 3, 2, 384, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 3, 3, 128, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 3, 3, 256, 69, 47, 26,,,, +6, 6, 3, 1, 3, 3, 3, 3, 384, 69, 47, 26,,,, +6, 6, 3, 2, 1, 1, 1, 1, 128, 133, 89, 47,,,, +6, 6, 3, 2, 1, 1, 1, 1, 256, 133, 89, 47,,,, +6, 6, 3, 2, 1, 1, 1, 1, 384, 133, 89, 47,,,, +6, 6, 3, 2, 2, 2, 1, 1, 128, 133, 89, 47,,,, +6, 6, 3, 2, 2, 2, 1, 1, 256, 133, 89, 47,,,, +6, 6, 3, 2, 2, 2, 1, 1, 384, 133, 89, 47,,,, +6, 6, 3, 2, 2, 2, 1, 2, 128, 133, 89, 47,,,, +6, 6, 3, 2, 2, 2, 1, 2, 256, 133, 89, 47,,,, +6, 6, 3, 2, 2, 2, 1, 2, 384, 133, 89, 47,,,, +6, 6, 3, 2, 2, 2, 2, 1, 128, 133, 89, 47,,,, +6, 6, 3, 2, 2, 2, 2, 1, 256, 133, 89, 47,,,, +6, 6, 3, 2, 2, 2, 2, 1, 384, 133, 89, 47,,,, +6, 6, 3, 2, 2, 2, 2, 2, 128, 133, 89, 47,,,, +6, 6, 3, 2, 2, 2, 2, 2, 256, 133, 89, 47,,,, +6, 6, 3, 2, 2, 2, 2, 2, 384, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 1, 1, 128, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 1, 1, 256, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 1, 1, 384, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 1, 2, 128, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 1, 2, 256, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 1, 2, 384, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 1, 3, 128, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 1, 3, 256, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 1, 3, 384, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 2, 1, 128, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 2, 1, 256, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 2, 1, 384, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 2, 2, 128, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 2, 2, 256, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 2, 2, 384, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 2, 3, 128, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 2, 3, 256, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 2, 3, 384, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 3, 1, 128, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 3, 1, 256, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 3, 1, 384, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 3, 2, 128, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 3, 2, 256, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 3, 2, 384, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 3, 3, 128, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 3, 3, 256, 133, 89, 47,,,, +6, 6, 3, 2, 3, 3, 3, 3, 384, 133, 89, 47,,,, +6, 6, 3, 3, 1, 1, 1, 1, 128, 197, 131, 68,,,, +6, 6, 3, 3, 1, 1, 1, 1, 256, 197, 131, 68,,,, +6, 6, 3, 3, 1, 1, 1, 1, 384, 197, 131, 68,,,, +6, 6, 3, 3, 2, 2, 1, 1, 128, 197, 131, 68,,,, +6, 6, 3, 3, 2, 2, 1, 1, 256, 197, 131, 68,,,, +6, 6, 3, 3, 2, 2, 1, 1, 384, 197, 131, 68,,,, +6, 6, 3, 3, 2, 2, 1, 2, 128, 197, 131, 68,,,, +6, 6, 3, 3, 2, 2, 1, 2, 256, 197, 131, 68,,,, +6, 6, 3, 3, 2, 2, 1, 2, 384, 197, 131, 68,,,, +6, 6, 3, 3, 2, 2, 2, 1, 128, 197, 131, 68,,,, +6, 6, 3, 3, 2, 2, 2, 1, 256, 197, 131, 68,,,, +6, 6, 3, 3, 2, 2, 2, 1, 384, 197, 131, 68,,,, +6, 6, 3, 3, 2, 2, 2, 2, 128, 197, 131, 68,,,, +6, 6, 3, 3, 2, 2, 2, 2, 256, 197, 131, 68,,,, +6, 6, 3, 3, 2, 2, 2, 2, 384, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 1, 1, 128, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 1, 1, 256, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 1, 1, 384, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 1, 2, 128, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 1, 2, 256, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 1, 2, 384, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 1, 3, 128, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 1, 3, 256, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 1, 3, 384, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 2, 1, 128, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 2, 1, 256, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 2, 1, 384, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 2, 2, 128, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 2, 2, 256, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 2, 2, 384, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 2, 3, 128, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 2, 3, 256, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 2, 3, 384, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 3, 1, 128, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 3, 1, 256, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 3, 1, 384, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 3, 2, 128, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 3, 2, 256, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 3, 2, 384, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 3, 3, 128, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 3, 3, 256, 197, 131, 68,,,, +6, 6, 3, 3, 3, 3, 3, 3, 384, 197, 131, 68,,,, +6, 6, 3, 4, 1, 1, 1, 1, 128, 240, 173, 89,,,, +6, 6, 3, 4, 1, 1, 1, 1, 256, 240, 173, 89,,,, +6, 6, 3, 4, 1, 1, 1, 1, 384, 240, 173, 89,,,, +6, 6, 3, 4, 2, 2, 1, 1, 128, 240, 173, 89,,,, +6, 6, 3, 4, 2, 2, 1, 1, 256, 240, 173, 89,,,, +6, 6, 3, 4, 2, 2, 1, 1, 384, 240, 173, 89,,,, +6, 6, 3, 4, 2, 2, 1, 2, 128, 240, 173, 89,,,, +6, 6, 3, 4, 2, 2, 1, 2, 256, 240, 173, 89,,,, +6, 6, 3, 4, 2, 2, 1, 2, 384, 240, 173, 89,,,, +6, 6, 3, 4, 2, 2, 2, 1, 128, 240, 173, 89,,,, +6, 6, 3, 4, 2, 2, 2, 1, 256, 240, 173, 89,,,, +6, 6, 3, 4, 2, 2, 2, 1, 384, 240, 173, 89,,,, +6, 6, 3, 4, 2, 2, 2, 2, 128, 240, 173, 89,,,, +6, 6, 3, 4, 2, 2, 2, 2, 256, 240, 173, 89,,,, +6, 6, 3, 4, 2, 2, 2, 2, 384, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 1, 1, 128, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 1, 1, 256, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 1, 1, 384, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 1, 2, 128, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 1, 2, 256, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 1, 2, 384, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 1, 3, 128, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 1, 3, 256, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 1, 3, 384, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 2, 1, 128, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 2, 1, 256, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 2, 1, 384, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 2, 2, 128, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 2, 2, 256, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 2, 2, 384, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 2, 3, 128, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 2, 3, 256, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 2, 3, 384, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 3, 1, 128, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 3, 1, 256, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 3, 1, 384, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 3, 2, 128, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 3, 2, 256, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 3, 2, 384, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 3, 3, 128, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 3, 3, 256, 240, 173, 89,,,, +6, 6, 3, 4, 3, 3, 3, 3, 384, 240, 173, 89,,,, +6, 6, 3, 5, 1, 1, 1, 1, 128, 240, 215, 110,,,, +6, 6, 3, 5, 1, 1, 1, 1, 256, 240, 215, 110,,,, +6, 6, 3, 5, 1, 1, 1, 1, 384, 240, 215, 110,,,, +6, 6, 3, 5, 2, 2, 1, 1, 128, 240, 215, 110,,,, +6, 6, 3, 5, 2, 2, 1, 1, 256, 240, 215, 110,,,, +6, 6, 3, 5, 2, 2, 1, 1, 384, 240, 215, 110,,,, +6, 6, 3, 5, 2, 2, 1, 2, 128, 240, 215, 110,,,, +6, 6, 3, 5, 2, 2, 1, 2, 256, 240, 215, 110,,,, +6, 6, 3, 5, 2, 2, 1, 2, 384, 240, 215, 110,,,, +6, 6, 3, 5, 2, 2, 2, 1, 128, 240, 215, 110,,,, +6, 6, 3, 5, 2, 2, 2, 1, 256, 240, 215, 110,,,, +6, 6, 3, 5, 2, 2, 2, 1, 384, 240, 215, 110,,,, +6, 6, 3, 5, 2, 2, 2, 2, 128, 240, 215, 110,,,, +6, 6, 3, 5, 2, 2, 2, 2, 256, 240, 215, 110,,,, +6, 6, 3, 5, 2, 2, 2, 2, 384, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 1, 1, 128, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 1, 1, 256, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 1, 1, 384, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 1, 2, 128, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 1, 2, 256, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 1, 2, 384, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 1, 3, 128, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 1, 3, 256, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 1, 3, 384, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 2, 1, 128, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 2, 1, 256, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 2, 1, 384, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 2, 2, 128, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 2, 2, 256, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 2, 2, 384, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 2, 3, 128, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 2, 3, 256, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 2, 3, 384, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 3, 1, 128, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 3, 1, 256, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 3, 1, 384, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 3, 2, 128, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 3, 2, 256, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 3, 2, 384, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 3, 3, 128, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 3, 3, 256, 240, 215, 110,,,, +6, 6, 3, 5, 3, 3, 3, 3, 384, 240, 215, 110,,,, +6, 6, 3, 6, 1, 1, 1, 1, 128, 240, 240, 131,,,, +6, 6, 3, 6, 1, 1, 1, 1, 256, 240, 240, 131,,,, +6, 6, 3, 6, 1, 1, 1, 1, 384, 240, 240, 131,,,, +6, 6, 3, 6, 2, 2, 1, 1, 128, 240, 240, 131,,,, +6, 6, 3, 6, 2, 2, 1, 1, 256, 240, 240, 131,,,, +6, 6, 3, 6, 2, 2, 1, 1, 384, 240, 240, 131,,,, +6, 6, 3, 6, 2, 2, 1, 2, 128, 240, 240, 131,,,, +6, 6, 3, 6, 2, 2, 1, 2, 256, 240, 240, 131,,,, +6, 6, 3, 6, 2, 2, 1, 2, 384, 240, 240, 131,,,, +6, 6, 3, 6, 2, 2, 2, 1, 128, 240, 240, 131,,,, +6, 6, 3, 6, 2, 2, 2, 1, 256, 240, 240, 131,,,, +6, 6, 3, 6, 2, 2, 2, 1, 384, 240, 240, 131,,,, +6, 6, 3, 6, 2, 2, 2, 2, 128, 240, 240, 131,,,, +6, 6, 3, 6, 2, 2, 2, 2, 256, 240, 240, 131,,,, +6, 6, 3, 6, 2, 2, 2, 2, 384, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 1, 1, 128, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 1, 1, 256, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 1, 1, 384, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 1, 2, 128, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 1, 2, 256, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 1, 2, 384, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 1, 3, 128, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 1, 3, 256, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 1, 3, 384, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 2, 1, 128, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 2, 1, 256, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 2, 1, 384, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 2, 2, 128, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 2, 2, 256, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 2, 2, 384, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 2, 3, 128, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 2, 3, 256, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 2, 3, 384, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 3, 1, 128, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 3, 1, 256, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 3, 1, 384, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 3, 2, 128, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 3, 2, 256, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 3, 2, 384, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 3, 3, 128, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 3, 3, 256, 240, 240, 131,,,, +6, 6, 3, 6, 3, 3, 3, 3, 384, 240, 240, 131,,,, +6, 6, 4, 1, 1, 1, 1, 1, 128, 69, 47, 26,,,, +6, 6, 4, 1, 1, 1, 1, 1, 256, 69, 47, 26,,,, +6, 6, 4, 1, 1, 1, 1, 1, 384, 69, 47, 26,,,, +6, 6, 4, 1, 2, 2, 1, 1, 128, 69, 47, 26,,,, +6, 6, 4, 1, 2, 2, 1, 1, 256, 69, 47, 26,,,, +6, 6, 4, 1, 2, 2, 1, 1, 384, 69, 47, 26,,,, +6, 6, 4, 1, 2, 2, 1, 2, 128, 69, 47, 26,,,, +6, 6, 4, 1, 2, 2, 1, 2, 256, 69, 47, 26,,,, +6, 6, 4, 1, 2, 2, 1, 2, 384, 69, 47, 26,,,, +6, 6, 4, 1, 2, 2, 2, 1, 128, 69, 47, 26,,,, +6, 6, 4, 1, 2, 2, 2, 1, 256, 69, 47, 26,,,, +6, 6, 4, 1, 2, 2, 2, 1, 384, 69, 47, 26,,,, +6, 6, 4, 1, 2, 2, 2, 2, 128, 69, 47, 26,,,, +6, 6, 4, 1, 2, 2, 2, 2, 256, 69, 47, 26,,,, +6, 6, 4, 1, 2, 2, 2, 2, 384, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 1, 1, 128, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 1, 1, 256, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 1, 1, 384, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 1, 2, 128, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 1, 2, 256, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 1, 2, 384, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 1, 3, 128, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 1, 3, 256, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 1, 3, 384, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 2, 1, 128, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 2, 1, 256, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 2, 1, 384, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 2, 2, 128, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 2, 2, 256, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 2, 2, 384, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 2, 3, 128, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 2, 3, 256, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 2, 3, 384, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 3, 1, 128, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 3, 1, 256, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 3, 1, 384, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 3, 2, 128, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 3, 2, 256, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 3, 2, 384, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 3, 3, 128, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 3, 3, 256, 69, 47, 26,,,, +6, 6, 4, 1, 3, 3, 3, 3, 384, 69, 47, 26,,,, +6, 6, 4, 2, 1, 1, 1, 1, 128, 133, 89, 47,,,, +6, 6, 4, 2, 1, 1, 1, 1, 256, 133, 89, 47,,,, +6, 6, 4, 2, 1, 1, 1, 1, 384, 133, 89, 47,,,, +6, 6, 4, 2, 2, 2, 1, 1, 128, 133, 89, 47,,,, +6, 6, 4, 2, 2, 2, 1, 1, 256, 133, 89, 47,,,, +6, 6, 4, 2, 2, 2, 1, 1, 384, 133, 89, 47,,,, +6, 6, 4, 2, 2, 2, 1, 2, 128, 133, 89, 47,,,, +6, 6, 4, 2, 2, 2, 1, 2, 256, 133, 89, 47,,,, +6, 6, 4, 2, 2, 2, 1, 2, 384, 133, 89, 47,,,, +6, 6, 4, 2, 2, 2, 2, 1, 128, 133, 89, 47,,,, +6, 6, 4, 2, 2, 2, 2, 1, 256, 133, 89, 47,,,, +6, 6, 4, 2, 2, 2, 2, 1, 384, 133, 89, 47,,,, +6, 6, 4, 2, 2, 2, 2, 2, 128, 133, 89, 47,,,, +6, 6, 4, 2, 2, 2, 2, 2, 256, 133, 89, 47,,,, +6, 6, 4, 2, 2, 2, 2, 2, 384, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 1, 1, 128, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 1, 1, 256, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 1, 1, 384, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 1, 2, 128, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 1, 2, 256, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 1, 2, 384, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 1, 3, 128, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 1, 3, 256, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 1, 3, 384, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 2, 1, 128, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 2, 1, 256, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 2, 1, 384, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 2, 2, 128, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 2, 2, 256, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 2, 2, 384, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 2, 3, 128, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 2, 3, 256, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 2, 3, 384, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 3, 1, 128, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 3, 1, 256, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 3, 1, 384, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 3, 2, 128, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 3, 2, 256, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 3, 2, 384, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 3, 3, 128, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 3, 3, 256, 133, 89, 47,,,, +6, 6, 4, 2, 3, 3, 3, 3, 384, 133, 89, 47,,,, +6, 6, 4, 3, 1, 1, 1, 1, 128, 197, 131, 68,,,, +6, 6, 4, 3, 1, 1, 1, 1, 256, 197, 131, 68,,,, +6, 6, 4, 3, 1, 1, 1, 1, 384, 197, 131, 68,,,, +6, 6, 4, 3, 2, 2, 1, 1, 128, 197, 131, 68,,,, +6, 6, 4, 3, 2, 2, 1, 1, 256, 197, 131, 68,,,, +6, 6, 4, 3, 2, 2, 1, 1, 384, 197, 131, 68,,,, +6, 6, 4, 3, 2, 2, 1, 2, 128, 197, 131, 68,,,, +6, 6, 4, 3, 2, 2, 1, 2, 256, 197, 131, 68,,,, +6, 6, 4, 3, 2, 2, 1, 2, 384, 197, 131, 68,,,, +6, 6, 4, 3, 2, 2, 2, 1, 128, 197, 131, 68,,,, +6, 6, 4, 3, 2, 2, 2, 1, 256, 197, 131, 68,,,, +6, 6, 4, 3, 2, 2, 2, 1, 384, 197, 131, 68,,,, +6, 6, 4, 3, 2, 2, 2, 2, 128, 197, 131, 68,,,, +6, 6, 4, 3, 2, 2, 2, 2, 256, 197, 131, 68,,,, +6, 6, 4, 3, 2, 2, 2, 2, 384, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 1, 1, 128, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 1, 1, 256, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 1, 1, 384, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 1, 2, 128, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 1, 2, 256, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 1, 2, 384, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 1, 3, 128, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 1, 3, 256, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 1, 3, 384, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 2, 1, 128, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 2, 1, 256, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 2, 1, 384, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 2, 2, 128, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 2, 2, 256, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 2, 2, 384, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 2, 3, 128, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 2, 3, 256, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 2, 3, 384, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 3, 1, 128, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 3, 1, 256, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 3, 1, 384, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 3, 2, 128, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 3, 2, 256, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 3, 2, 384, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 3, 3, 128, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 3, 3, 256, 197, 131, 68,,,, +6, 6, 4, 3, 3, 3, 3, 3, 384, 197, 131, 68,,,, +6, 6, 4, 4, 1, 1, 1, 1, 128, 240, 173, 89,,,, +6, 6, 4, 4, 1, 1, 1, 1, 256, 240, 173, 89,,,, +6, 6, 4, 4, 1, 1, 1, 1, 384, 240, 173, 89,,,, +6, 6, 4, 4, 2, 2, 1, 1, 128, 240, 173, 89,,,, +6, 6, 4, 4, 2, 2, 1, 1, 256, 240, 173, 89,,,, +6, 6, 4, 4, 2, 2, 1, 1, 384, 240, 173, 89,,,, +6, 6, 4, 4, 2, 2, 1, 2, 128, 240, 173, 89,,,, +6, 6, 4, 4, 2, 2, 1, 2, 256, 240, 173, 89,,,, +6, 6, 4, 4, 2, 2, 1, 2, 384, 240, 173, 89,,,, +6, 6, 4, 4, 2, 2, 2, 1, 128, 240, 173, 89,,,, +6, 6, 4, 4, 2, 2, 2, 1, 256, 240, 173, 89,,,, +6, 6, 4, 4, 2, 2, 2, 1, 384, 240, 173, 89,,,, +6, 6, 4, 4, 2, 2, 2, 2, 128, 240, 173, 89,,,, +6, 6, 4, 4, 2, 2, 2, 2, 256, 240, 173, 89,,,, +6, 6, 4, 4, 2, 2, 2, 2, 384, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 1, 1, 128, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 1, 1, 256, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 1, 1, 384, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 1, 2, 128, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 1, 2, 256, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 1, 2, 384, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 1, 3, 128, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 1, 3, 256, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 1, 3, 384, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 2, 1, 128, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 2, 1, 256, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 2, 1, 384, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 2, 2, 128, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 2, 2, 256, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 2, 2, 384, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 2, 3, 128, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 2, 3, 256, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 2, 3, 384, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 3, 1, 128, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 3, 1, 256, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 3, 1, 384, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 3, 2, 128, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 3, 2, 256, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 3, 2, 384, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 3, 3, 128, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 3, 3, 256, 240, 173, 89,,,, +6, 6, 4, 4, 3, 3, 3, 3, 384, 240, 173, 89,,,, +6, 6, 4, 5, 1, 1, 1, 1, 128, 240, 215, 110,,,, +6, 6, 4, 5, 1, 1, 1, 1, 256, 240, 215, 110,,,, +6, 6, 4, 5, 1, 1, 1, 1, 384, 240, 215, 110,,,, +6, 6, 4, 5, 2, 2, 1, 1, 128, 240, 215, 110,,,, +6, 6, 4, 5, 2, 2, 1, 1, 256, 240, 215, 110,,,, +6, 6, 4, 5, 2, 2, 1, 1, 384, 240, 215, 110,,,, +6, 6, 4, 5, 2, 2, 1, 2, 128, 240, 215, 110,,,, +6, 6, 4, 5, 2, 2, 1, 2, 256, 240, 215, 110,,,, +6, 6, 4, 5, 2, 2, 1, 2, 384, 240, 215, 110,,,, +6, 6, 4, 5, 2, 2, 2, 1, 128, 240, 215, 110,,,, +6, 6, 4, 5, 2, 2, 2, 1, 256, 240, 215, 110,,,, +6, 6, 4, 5, 2, 2, 2, 1, 384, 240, 215, 110,,,, +6, 6, 4, 5, 2, 2, 2, 2, 128, 240, 215, 110,,,, +6, 6, 4, 5, 2, 2, 2, 2, 256, 240, 215, 110,,,, +6, 6, 4, 5, 2, 2, 2, 2, 384, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 1, 1, 128, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 1, 1, 256, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 1, 1, 384, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 1, 2, 128, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 1, 2, 256, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 1, 2, 384, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 1, 3, 128, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 1, 3, 256, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 1, 3, 384, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 2, 1, 128, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 2, 1, 256, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 2, 1, 384, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 2, 2, 128, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 2, 2, 256, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 2, 2, 384, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 2, 3, 128, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 2, 3, 256, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 2, 3, 384, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 3, 1, 128, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 3, 1, 256, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 3, 1, 384, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 3, 2, 128, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 3, 2, 256, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 3, 2, 384, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 3, 3, 128, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 3, 3, 256, 240, 215, 110,,,, +6, 6, 4, 5, 3, 3, 3, 3, 384, 240, 215, 110,,,, +6, 6, 4, 6, 1, 1, 1, 1, 128, 240, 240, 131,,,, +6, 6, 4, 6, 1, 1, 1, 1, 256, 240, 240, 131,,,, +6, 6, 4, 6, 1, 1, 1, 1, 384, 240, 240, 131,,,, +6, 6, 4, 6, 2, 2, 1, 1, 128, 240, 240, 131,,,, +6, 6, 4, 6, 2, 2, 1, 1, 256, 240, 240, 131,,,, +6, 6, 4, 6, 2, 2, 1, 1, 384, 240, 240, 131,,,, +6, 6, 4, 6, 2, 2, 1, 2, 128, 240, 240, 131,,,, +6, 6, 4, 6, 2, 2, 1, 2, 256, 240, 240, 131,,,, +6, 6, 4, 6, 2, 2, 1, 2, 384, 240, 240, 131,,,, +6, 6, 4, 6, 2, 2, 2, 1, 128, 240, 240, 131,,,, +6, 6, 4, 6, 2, 2, 2, 1, 256, 240, 240, 131,,,, +6, 6, 4, 6, 2, 2, 2, 1, 384, 240, 240, 131,,,, +6, 6, 4, 6, 2, 2, 2, 2, 128, 240, 240, 131,,,, +6, 6, 4, 6, 2, 2, 2, 2, 256, 240, 240, 131,,,, +6, 6, 4, 6, 2, 2, 2, 2, 384, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 1, 1, 128, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 1, 1, 256, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 1, 1, 384, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 1, 2, 128, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 1, 2, 256, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 1, 2, 384, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 1, 3, 128, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 1, 3, 256, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 1, 3, 384, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 2, 1, 128, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 2, 1, 256, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 2, 1, 384, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 2, 2, 128, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 2, 2, 256, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 2, 2, 384, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 2, 3, 128, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 2, 3, 256, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 2, 3, 384, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 3, 1, 128, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 3, 1, 256, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 3, 1, 384, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 3, 2, 128, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 3, 2, 256, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 3, 2, 384, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 3, 3, 128, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 3, 3, 256, 240, 240, 131,,,, +6, 6, 4, 6, 3, 3, 3, 3, 384, 240, 240, 131,,,, +6, 6, 5, 1, 1, 1, 1, 1, 128, 69, 47, 26,,,, +6, 6, 5, 1, 1, 1, 1, 1, 256, 69, 47, 26,,,, +6, 6, 5, 1, 1, 1, 1, 1, 384, 69, 47, 26,,,, +6, 6, 5, 1, 2, 2, 1, 1, 128, 69, 47, 26,,,, +6, 6, 5, 1, 2, 2, 1, 1, 256, 69, 47, 26,,,, +6, 6, 5, 1, 2, 2, 1, 1, 384, 69, 47, 26,,,, +6, 6, 5, 1, 2, 2, 1, 2, 128, 69, 47, 26,,,, +6, 6, 5, 1, 2, 2, 1, 2, 256, 69, 47, 26,,,, +6, 6, 5, 1, 2, 2, 1, 2, 384, 69, 47, 26,,,, +6, 6, 5, 1, 2, 2, 2, 1, 128, 69, 47, 26,,,, +6, 6, 5, 1, 2, 2, 2, 1, 256, 69, 47, 26,,,, +6, 6, 5, 1, 2, 2, 2, 1, 384, 69, 47, 26,,,, +6, 6, 5, 1, 2, 2, 2, 2, 128, 69, 47, 26,,,, +6, 6, 5, 1, 2, 2, 2, 2, 256, 69, 47, 26,,,, +6, 6, 5, 1, 2, 2, 2, 2, 384, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 1, 1, 128, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 1, 1, 256, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 1, 1, 384, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 1, 2, 128, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 1, 2, 256, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 1, 2, 384, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 1, 3, 128, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 1, 3, 256, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 1, 3, 384, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 2, 1, 128, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 2, 1, 256, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 2, 1, 384, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 2, 2, 128, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 2, 2, 256, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 2, 2, 384, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 2, 3, 128, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 2, 3, 256, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 2, 3, 384, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 3, 1, 128, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 3, 1, 256, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 3, 1, 384, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 3, 2, 128, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 3, 2, 256, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 3, 2, 384, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 3, 3, 128, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 3, 3, 256, 69, 47, 26,,,, +6, 6, 5, 1, 3, 3, 3, 3, 384, 69, 47, 26,,,, +6, 6, 5, 2, 1, 1, 1, 1, 128, 133, 89, 47,,,, +6, 6, 5, 2, 1, 1, 1, 1, 256, 133, 89, 47,,,, +6, 6, 5, 2, 1, 1, 1, 1, 384, 133, 89, 47,,,, +6, 6, 5, 2, 2, 2, 1, 1, 128, 133, 89, 47,,,, +6, 6, 5, 2, 2, 2, 1, 1, 256, 133, 89, 47,,,, +6, 6, 5, 2, 2, 2, 1, 1, 384, 133, 89, 47,,,, +6, 6, 5, 2, 2, 2, 1, 2, 128, 133, 89, 47,,,, +6, 6, 5, 2, 2, 2, 1, 2, 256, 133, 89, 47,,,, +6, 6, 5, 2, 2, 2, 1, 2, 384, 133, 89, 47,,,, +6, 6, 5, 2, 2, 2, 2, 1, 128, 133, 89, 47,,,, +6, 6, 5, 2, 2, 2, 2, 1, 256, 133, 89, 47,,,, +6, 6, 5, 2, 2, 2, 2, 1, 384, 133, 89, 47,,,, +6, 6, 5, 2, 2, 2, 2, 2, 128, 133, 89, 47,,,, +6, 6, 5, 2, 2, 2, 2, 2, 256, 133, 89, 47,,,, +6, 6, 5, 2, 2, 2, 2, 2, 384, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 1, 1, 128, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 1, 1, 256, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 1, 1, 384, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 1, 2, 128, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 1, 2, 256, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 1, 2, 384, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 1, 3, 128, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 1, 3, 256, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 1, 3, 384, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 2, 1, 128, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 2, 1, 256, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 2, 1, 384, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 2, 2, 128, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 2, 2, 256, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 2, 2, 384, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 2, 3, 128, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 2, 3, 256, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 2, 3, 384, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 3, 1, 128, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 3, 1, 256, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 3, 1, 384, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 3, 2, 128, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 3, 2, 256, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 3, 2, 384, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 3, 3, 128, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 3, 3, 256, 133, 89, 47,,,, +6, 6, 5, 2, 3, 3, 3, 3, 384, 133, 89, 47,,,, +6, 6, 5, 3, 1, 1, 1, 1, 128, 197, 131, 68,,,, +6, 6, 5, 3, 1, 1, 1, 1, 256, 197, 131, 68,,,, +6, 6, 5, 3, 1, 1, 1, 1, 384, 197, 131, 68,,,, +6, 6, 5, 3, 2, 2, 1, 1, 128, 197, 131, 68,,,, +6, 6, 5, 3, 2, 2, 1, 1, 256, 197, 131, 68,,,, +6, 6, 5, 3, 2, 2, 1, 1, 384, 197, 131, 68,,,, +6, 6, 5, 3, 2, 2, 1, 2, 128, 197, 131, 68,,,, +6, 6, 5, 3, 2, 2, 1, 2, 256, 197, 131, 68,,,, +6, 6, 5, 3, 2, 2, 1, 2, 384, 197, 131, 68,,,, +6, 6, 5, 3, 2, 2, 2, 1, 128, 197, 131, 68,,,, +6, 6, 5, 3, 2, 2, 2, 1, 256, 197, 131, 68,,,, +6, 6, 5, 3, 2, 2, 2, 1, 384, 197, 131, 68,,,, +6, 6, 5, 3, 2, 2, 2, 2, 128, 197, 131, 68,,,, +6, 6, 5, 3, 2, 2, 2, 2, 256, 197, 131, 68,,,, +6, 6, 5, 3, 2, 2, 2, 2, 384, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 1, 1, 128, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 1, 1, 256, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 1, 1, 384, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 1, 2, 128, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 1, 2, 256, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 1, 2, 384, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 1, 3, 128, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 1, 3, 256, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 1, 3, 384, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 2, 1, 128, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 2, 1, 256, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 2, 1, 384, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 2, 2, 128, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 2, 2, 256, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 2, 2, 384, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 2, 3, 128, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 2, 3, 256, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 2, 3, 384, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 3, 1, 128, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 3, 1, 256, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 3, 1, 384, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 3, 2, 128, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 3, 2, 256, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 3, 2, 384, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 3, 3, 128, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 3, 3, 256, 197, 131, 68,,,, +6, 6, 5, 3, 3, 3, 3, 3, 384, 197, 131, 68,,,, +6, 6, 5, 4, 1, 1, 1, 1, 128, 240, 173, 89,,,, +6, 6, 5, 4, 1, 1, 1, 1, 256, 240, 173, 89,,,, +6, 6, 5, 4, 1, 1, 1, 1, 384, 240, 173, 89,,,, +6, 6, 5, 4, 2, 2, 1, 1, 128, 240, 173, 89,,,, +6, 6, 5, 4, 2, 2, 1, 1, 256, 240, 173, 89,,,, +6, 6, 5, 4, 2, 2, 1, 1, 384, 240, 173, 89,,,, +6, 6, 5, 4, 2, 2, 1, 2, 128, 240, 173, 89,,,, +6, 6, 5, 4, 2, 2, 1, 2, 256, 240, 173, 89,,,, +6, 6, 5, 4, 2, 2, 1, 2, 384, 240, 173, 89,,,, +6, 6, 5, 4, 2, 2, 2, 1, 128, 240, 173, 89,,,, +6, 6, 5, 4, 2, 2, 2, 1, 256, 240, 173, 89,,,, +6, 6, 5, 4, 2, 2, 2, 1, 384, 240, 173, 89,,,, +6, 6, 5, 4, 2, 2, 2, 2, 128, 240, 173, 89,,,, +6, 6, 5, 4, 2, 2, 2, 2, 256, 240, 173, 89,,,, +6, 6, 5, 4, 2, 2, 2, 2, 384, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 1, 1, 128, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 1, 1, 256, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 1, 1, 384, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 1, 2, 128, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 1, 2, 256, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 1, 2, 384, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 1, 3, 128, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 1, 3, 256, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 1, 3, 384, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 2, 1, 128, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 2, 1, 256, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 2, 1, 384, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 2, 2, 128, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 2, 2, 256, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 2, 2, 384, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 2, 3, 128, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 2, 3, 256, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 2, 3, 384, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 3, 1, 128, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 3, 1, 256, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 3, 1, 384, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 3, 2, 128, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 3, 2, 256, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 3, 2, 384, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 3, 3, 128, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 3, 3, 256, 240, 173, 89,,,, +6, 6, 5, 4, 3, 3, 3, 3, 384, 240, 173, 89,,,, +6, 6, 5, 5, 1, 1, 1, 1, 128, 240, 215, 110,,,, +6, 6, 5, 5, 1, 1, 1, 1, 256, 240, 215, 110,,,, +6, 6, 5, 5, 1, 1, 1, 1, 384, 240, 215, 110,,,, +6, 6, 5, 5, 2, 2, 1, 1, 128, 240, 215, 110,,,, +6, 6, 5, 5, 2, 2, 1, 1, 256, 240, 215, 110,,,, +6, 6, 5, 5, 2, 2, 1, 1, 384, 240, 215, 110,,,, +6, 6, 5, 5, 2, 2, 1, 2, 128, 240, 215, 110,,,, +6, 6, 5, 5, 2, 2, 1, 2, 256, 240, 215, 110,,,, +6, 6, 5, 5, 2, 2, 1, 2, 384, 240, 215, 110,,,, +6, 6, 5, 5, 2, 2, 2, 1, 128, 240, 215, 110,,,, +6, 6, 5, 5, 2, 2, 2, 1, 256, 240, 215, 110,,,, +6, 6, 5, 5, 2, 2, 2, 1, 384, 240, 215, 110,,,, +6, 6, 5, 5, 2, 2, 2, 2, 128, 240, 215, 110,,,, +6, 6, 5, 5, 2, 2, 2, 2, 256, 240, 215, 110,,,, +6, 6, 5, 5, 2, 2, 2, 2, 384, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 1, 1, 128, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 1, 1, 256, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 1, 1, 384, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 1, 2, 128, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 1, 2, 256, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 1, 2, 384, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 1, 3, 128, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 1, 3, 256, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 1, 3, 384, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 2, 1, 128, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 2, 1, 256, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 2, 1, 384, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 2, 2, 128, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 2, 2, 256, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 2, 2, 384, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 2, 3, 128, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 2, 3, 256, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 2, 3, 384, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 3, 1, 128, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 3, 1, 256, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 3, 1, 384, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 3, 2, 128, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 3, 2, 256, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 3, 2, 384, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 3, 3, 128, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 3, 3, 256, 240, 215, 110,,,, +6, 6, 5, 5, 3, 3, 3, 3, 384, 240, 215, 110,,,, +6, 6, 5, 6, 1, 1, 1, 1, 128, 240, 240, 131,,,, +6, 6, 5, 6, 1, 1, 1, 1, 256, 240, 240, 131,,,, +6, 6, 5, 6, 1, 1, 1, 1, 384, 240, 240, 131,,,, +6, 6, 5, 6, 2, 2, 1, 1, 128, 240, 240, 131,,,, +6, 6, 5, 6, 2, 2, 1, 1, 256, 240, 240, 131,,,, +6, 6, 5, 6, 2, 2, 1, 1, 384, 240, 240, 131,,,, +6, 6, 5, 6, 2, 2, 1, 2, 128, 240, 240, 131,,,, +6, 6, 5, 6, 2, 2, 1, 2, 256, 240, 240, 131,,,, +6, 6, 5, 6, 2, 2, 1, 2, 384, 240, 240, 131,,,, +6, 6, 5, 6, 2, 2, 2, 1, 128, 240, 240, 131,,,, +6, 6, 5, 6, 2, 2, 2, 1, 256, 240, 240, 131,,,, +6, 6, 5, 6, 2, 2, 2, 1, 384, 240, 240, 131,,,, +6, 6, 5, 6, 2, 2, 2, 2, 128, 240, 240, 131,,,, +6, 6, 5, 6, 2, 2, 2, 2, 256, 240, 240, 131,,,, +6, 6, 5, 6, 2, 2, 2, 2, 384, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 1, 1, 128, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 1, 1, 256, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 1, 1, 384, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 1, 2, 128, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 1, 2, 256, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 1, 2, 384, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 1, 3, 128, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 1, 3, 256, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 1, 3, 384, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 2, 1, 128, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 2, 1, 256, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 2, 1, 384, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 2, 2, 128, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 2, 2, 256, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 2, 2, 384, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 2, 3, 128, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 2, 3, 256, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 2, 3, 384, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 3, 1, 128, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 3, 1, 256, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 3, 1, 384, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 3, 2, 128, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 3, 2, 256, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 3, 2, 384, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 3, 3, 128, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 3, 3, 256, 240, 240, 131,,,, +6, 6, 5, 6, 3, 3, 3, 3, 384, 240, 240, 131,,,, +6, 6, 6, 1, 1, 1, 1, 1, 128, 69, 47, 26,,,, +6, 6, 6, 1, 1, 1, 1, 1, 256, 69, 47, 26,,,, +6, 6, 6, 1, 1, 1, 1, 1, 384, 69, 47, 26,,,, +6, 6, 6, 1, 2, 2, 1, 1, 128, 69, 47, 26,,,, +6, 6, 6, 1, 2, 2, 1, 1, 256, 69, 47, 26,,,, +6, 6, 6, 1, 2, 2, 1, 1, 384, 69, 47, 26,,,, +6, 6, 6, 1, 2, 2, 1, 2, 128, 69, 47, 26,,,, +6, 6, 6, 1, 2, 2, 1, 2, 256, 69, 47, 26,,,, +6, 6, 6, 1, 2, 2, 1, 2, 384, 69, 47, 26,,,, +6, 6, 6, 1, 2, 2, 2, 1, 128, 69, 47, 26,,,, +6, 6, 6, 1, 2, 2, 2, 1, 256, 69, 47, 26,,,, +6, 6, 6, 1, 2, 2, 2, 1, 384, 69, 47, 26,,,, +6, 6, 6, 1, 2, 2, 2, 2, 128, 69, 47, 26,,,, +6, 6, 6, 1, 2, 2, 2, 2, 256, 69, 47, 26,,,, +6, 6, 6, 1, 2, 2, 2, 2, 384, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 1, 1, 128, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 1, 1, 256, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 1, 1, 384, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 1, 2, 128, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 1, 2, 256, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 1, 2, 384, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 1, 3, 128, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 1, 3, 256, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 1, 3, 384, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 2, 1, 128, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 2, 1, 256, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 2, 1, 384, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 2, 2, 128, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 2, 2, 256, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 2, 2, 384, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 2, 3, 128, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 2, 3, 256, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 2, 3, 384, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 3, 1, 128, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 3, 1, 256, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 3, 1, 384, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 3, 2, 128, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 3, 2, 256, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 3, 2, 384, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 3, 3, 128, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 3, 3, 256, 69, 47, 26,,,, +6, 6, 6, 1, 3, 3, 3, 3, 384, 69, 47, 26,,,, +6, 6, 6, 2, 1, 1, 1, 1, 128, 133, 89, 47,,,, +6, 6, 6, 2, 1, 1, 1, 1, 256, 133, 89, 47,,,, +6, 6, 6, 2, 1, 1, 1, 1, 384, 133, 89, 47,,,, +6, 6, 6, 2, 2, 2, 1, 1, 128, 133, 89, 47,,,, +6, 6, 6, 2, 2, 2, 1, 1, 256, 133, 89, 47,,,, +6, 6, 6, 2, 2, 2, 1, 1, 384, 133, 89, 47,,,, +6, 6, 6, 2, 2, 2, 1, 2, 128, 133, 89, 47,,,, +6, 6, 6, 2, 2, 2, 1, 2, 256, 133, 89, 47,,,, +6, 6, 6, 2, 2, 2, 1, 2, 384, 133, 89, 47,,,, +6, 6, 6, 2, 2, 2, 2, 1, 128, 133, 89, 47,,,, +6, 6, 6, 2, 2, 2, 2, 1, 256, 133, 89, 47,,,, +6, 6, 6, 2, 2, 2, 2, 1, 384, 133, 89, 47,,,, +6, 6, 6, 2, 2, 2, 2, 2, 128, 133, 89, 47,,,, +6, 6, 6, 2, 2, 2, 2, 2, 256, 133, 89, 47,,,, +6, 6, 6, 2, 2, 2, 2, 2, 384, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 1, 1, 128, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 1, 1, 256, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 1, 1, 384, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 1, 2, 128, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 1, 2, 256, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 1, 2, 384, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 1, 3, 128, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 1, 3, 256, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 1, 3, 384, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 2, 1, 128, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 2, 1, 256, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 2, 1, 384, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 2, 2, 128, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 2, 2, 256, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 2, 2, 384, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 2, 3, 128, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 2, 3, 256, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 2, 3, 384, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 3, 1, 128, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 3, 1, 256, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 3, 1, 384, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 3, 2, 128, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 3, 2, 256, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 3, 2, 384, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 3, 3, 128, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 3, 3, 256, 133, 89, 47,,,, +6, 6, 6, 2, 3, 3, 3, 3, 384, 133, 89, 47,,,, +6, 6, 6, 3, 1, 1, 1, 1, 128, 197, 131, 68,,,, +6, 6, 6, 3, 1, 1, 1, 1, 256, 197, 131, 68,,,, +6, 6, 6, 3, 1, 1, 1, 1, 384, 197, 131, 68,,,, +6, 6, 6, 3, 2, 2, 1, 1, 128, 197, 131, 68,,,, +6, 6, 6, 3, 2, 2, 1, 1, 256, 197, 131, 68,,,, +6, 6, 6, 3, 2, 2, 1, 1, 384, 197, 131, 68,,,, +6, 6, 6, 3, 2, 2, 1, 2, 128, 197, 131, 68,,,, +6, 6, 6, 3, 2, 2, 1, 2, 256, 197, 131, 68,,,, +6, 6, 6, 3, 2, 2, 1, 2, 384, 197, 131, 68,,,, +6, 6, 6, 3, 2, 2, 2, 1, 128, 197, 131, 68,,,, +6, 6, 6, 3, 2, 2, 2, 1, 256, 197, 131, 68,,,, +6, 6, 6, 3, 2, 2, 2, 1, 384, 197, 131, 68,,,, +6, 6, 6, 3, 2, 2, 2, 2, 128, 197, 131, 68,,,, +6, 6, 6, 3, 2, 2, 2, 2, 256, 197, 131, 68,,,, +6, 6, 6, 3, 2, 2, 2, 2, 384, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 1, 1, 128, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 1, 1, 256, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 1, 1, 384, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 1, 2, 128, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 1, 2, 256, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 1, 2, 384, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 1, 3, 128, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 1, 3, 256, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 1, 3, 384, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 2, 1, 128, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 2, 1, 256, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 2, 1, 384, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 2, 2, 128, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 2, 2, 256, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 2, 2, 384, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 2, 3, 128, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 2, 3, 256, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 2, 3, 384, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 3, 1, 128, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 3, 1, 256, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 3, 1, 384, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 3, 2, 128, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 3, 2, 256, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 3, 2, 384, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 3, 3, 128, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 3, 3, 256, 197, 131, 68,,,, +6, 6, 6, 3, 3, 3, 3, 3, 384, 197, 131, 68,,,, +6, 6, 6, 4, 1, 1, 1, 1, 128, 240, 173, 89,,,, +6, 6, 6, 4, 1, 1, 1, 1, 256, 240, 173, 89,,,, +6, 6, 6, 4, 1, 1, 1, 1, 384, 240, 173, 89,,,, +6, 6, 6, 4, 2, 2, 1, 1, 128, 240, 173, 89,,,, +6, 6, 6, 4, 2, 2, 1, 1, 256, 240, 173, 89,,,, +6, 6, 6, 4, 2, 2, 1, 1, 384, 240, 173, 89,,,, +6, 6, 6, 4, 2, 2, 1, 2, 128, 240, 173, 89,,,, +6, 6, 6, 4, 2, 2, 1, 2, 256, 240, 173, 89,,,, +6, 6, 6, 4, 2, 2, 1, 2, 384, 240, 173, 89,,,, +6, 6, 6, 4, 2, 2, 2, 1, 128, 240, 173, 89,,,, +6, 6, 6, 4, 2, 2, 2, 1, 256, 240, 173, 89,,,, +6, 6, 6, 4, 2, 2, 2, 1, 384, 240, 173, 89,,,, +6, 6, 6, 4, 2, 2, 2, 2, 128, 240, 173, 89,,,, +6, 6, 6, 4, 2, 2, 2, 2, 256, 240, 173, 89,,,, +6, 6, 6, 4, 2, 2, 2, 2, 384, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 1, 1, 128, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 1, 1, 256, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 1, 1, 384, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 1, 2, 128, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 1, 2, 256, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 1, 2, 384, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 1, 3, 128, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 1, 3, 256, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 1, 3, 384, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 2, 1, 128, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 2, 1, 256, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 2, 1, 384, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 2, 2, 128, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 2, 2, 256, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 2, 2, 384, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 2, 3, 128, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 2, 3, 256, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 2, 3, 384, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 3, 1, 128, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 3, 1, 256, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 3, 1, 384, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 3, 2, 128, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 3, 2, 256, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 3, 2, 384, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 3, 3, 128, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 3, 3, 256, 240, 173, 89,,,, +6, 6, 6, 4, 3, 3, 3, 3, 384, 240, 173, 89,,,, +6, 6, 6, 5, 1, 1, 1, 1, 128, 240, 215, 110,,,, +6, 6, 6, 5, 1, 1, 1, 1, 256, 240, 215, 110,,,, +6, 6, 6, 5, 1, 1, 1, 1, 384, 240, 215, 110,,,, +6, 6, 6, 5, 2, 2, 1, 1, 128, 240, 215, 110,,,, +6, 6, 6, 5, 2, 2, 1, 1, 256, 240, 215, 110,,,, +6, 6, 6, 5, 2, 2, 1, 1, 384, 240, 215, 110,,,, +6, 6, 6, 5, 2, 2, 1, 2, 128, 240, 215, 110,,,, +6, 6, 6, 5, 2, 2, 1, 2, 256, 240, 215, 110,,,, +6, 6, 6, 5, 2, 2, 1, 2, 384, 240, 215, 110,,,, +6, 6, 6, 5, 2, 2, 2, 1, 128, 240, 215, 110,,,, +6, 6, 6, 5, 2, 2, 2, 1, 256, 240, 215, 110,,,, +6, 6, 6, 5, 2, 2, 2, 1, 384, 240, 215, 110,,,, +6, 6, 6, 5, 2, 2, 2, 2, 128, 240, 215, 110,,,, +6, 6, 6, 5, 2, 2, 2, 2, 256, 240, 215, 110,,,, +6, 6, 6, 5, 2, 2, 2, 2, 384, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 1, 1, 128, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 1, 1, 256, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 1, 1, 384, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 1, 2, 128, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 1, 2, 256, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 1, 2, 384, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 1, 3, 128, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 1, 3, 256, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 1, 3, 384, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 2, 1, 128, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 2, 1, 256, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 2, 1, 384, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 2, 2, 128, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 2, 2, 256, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 2, 2, 384, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 2, 3, 128, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 2, 3, 256, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 2, 3, 384, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 3, 1, 128, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 3, 1, 256, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 3, 1, 384, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 3, 2, 128, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 3, 2, 256, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 3, 2, 384, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 3, 3, 128, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 3, 3, 256, 240, 215, 110,,,, +6, 6, 6, 5, 3, 3, 3, 3, 384, 240, 215, 110,,,, +6, 6, 6, 6, 1, 1, 1, 1, 128, 240, 240, 131,,,, +6, 6, 6, 6, 1, 1, 1, 1, 256, 240, 240, 131,,,, +6, 6, 6, 6, 1, 1, 1, 1, 384, 240, 240, 131,,,, +6, 6, 6, 6, 2, 2, 1, 1, 128, 240, 240, 131,,,, +6, 6, 6, 6, 2, 2, 1, 1, 256, 240, 240, 131,,,, +6, 6, 6, 6, 2, 2, 1, 1, 384, 240, 240, 131,,,, +6, 6, 6, 6, 2, 2, 1, 2, 128, 240, 240, 131,,,, +6, 6, 6, 6, 2, 2, 1, 2, 256, 240, 240, 131,,,, +6, 6, 6, 6, 2, 2, 1, 2, 384, 240, 240, 131,,,, +6, 6, 6, 6, 2, 2, 2, 1, 128, 240, 240, 131,,,, +6, 6, 6, 6, 2, 2, 2, 1, 256, 240, 240, 131,,,, +6, 6, 6, 6, 2, 2, 2, 1, 384, 240, 240, 131,,,, +6, 6, 6, 6, 2, 2, 2, 2, 128, 240, 240, 131,,,, +6, 6, 6, 6, 2, 2, 2, 2, 256, 240, 240, 131,,,, +6, 6, 6, 6, 2, 2, 2, 2, 384, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 1, 1, 128, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 1, 1, 256, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 1, 1, 384, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 1, 2, 128, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 1, 2, 256, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 1, 2, 384, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 1, 3, 128, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 1, 3, 256, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 1, 3, 384, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 2, 1, 128, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 2, 1, 256, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 2, 1, 384, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 2, 2, 128, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 2, 2, 256, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 2, 2, 384, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 2, 3, 128, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 2, 3, 256, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 2, 3, 384, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 3, 1, 128, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 3, 1, 256, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 3, 1, 384, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 3, 2, 128, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 3, 2, 256, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 3, 2, 384, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 3, 3, 128, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 3, 3, 256, 240, 240, 131,,,, +6, 6, 6, 6, 3, 3, 3, 3, 384, 240, 240, 131,,,, +6, 7, 1, 1, 1, 1, 1, 1, 128, 70, 48, 27,,,, +6, 7, 1, 1, 1, 1, 1, 1, 256, 70, 48, 27,,,, +6, 7, 1, 1, 1, 1, 1, 1, 384, 70, 48, 27,,,, +6, 7, 1, 1, 2, 2, 1, 1, 128, 70, 48, 27,,,, +6, 7, 1, 1, 2, 2, 1, 1, 256, 70, 48, 27,,,, +6, 7, 1, 1, 2, 2, 1, 1, 384, 70, 48, 27,,,, +6, 7, 1, 1, 2, 2, 1, 2, 128, 70, 48, 27,,,, +6, 7, 1, 1, 2, 2, 1, 2, 256, 70, 48, 27,,,, +6, 7, 1, 1, 2, 2, 1, 2, 384, 70, 48, 27,,,, +6, 7, 1, 1, 2, 2, 2, 1, 128, 70, 48, 27,,,, +6, 7, 1, 1, 2, 2, 2, 1, 256, 70, 48, 27,,,, +6, 7, 1, 1, 2, 2, 2, 1, 384, 70, 48, 27,,,, +6, 7, 1, 1, 2, 2, 2, 2, 128, 70, 48, 27,,,, +6, 7, 1, 1, 2, 2, 2, 2, 256, 70, 48, 27,,,, +6, 7, 1, 1, 2, 2, 2, 2, 384, 70, 48, 27,,,, +6, 7, 1, 1, 3, 3, 1, 1, 128, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 1, 1, 256, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 1, 1, 384, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 1, 2, 128, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 1, 2, 256, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 1, 2, 384, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 1, 3, 128, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 1, 3, 256, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 1, 3, 384, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 2, 1, 128, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 2, 1, 256, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 2, 1, 384, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 2, 2, 128, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 2, 2, 256, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 2, 2, 384, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 2, 3, 128, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 2, 3, 256, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 2, 3, 384, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 3, 1, 128, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 3, 1, 256, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 3, 1, 384, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 3, 2, 128, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 3, 2, 256, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 3, 2, 384, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 3, 3, 128, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 3, 3, 256, 69, 48, 27,,,, +6, 7, 1, 1, 3, 3, 3, 3, 384, 69, 48, 27,,,, +6, 7, 1, 2, 1, 1, 1, 1, 128, 134, 90, 48,,,, +6, 7, 1, 2, 1, 1, 1, 1, 256, 134, 90, 48,,,, +6, 7, 1, 2, 1, 1, 1, 1, 384, 134, 90, 48,,,, +6, 7, 1, 2, 2, 2, 1, 1, 128, 134, 90, 48,,,, +6, 7, 1, 2, 2, 2, 1, 1, 256, 134, 90, 48,,,, +6, 7, 1, 2, 2, 2, 1, 1, 384, 134, 90, 48,,,, +6, 7, 1, 2, 2, 2, 1, 2, 128, 134, 90, 48,,,, +6, 7, 1, 2, 2, 2, 1, 2, 256, 134, 90, 48,,,, +6, 7, 1, 2, 2, 2, 1, 2, 384, 134, 90, 48,,,, +6, 7, 1, 2, 2, 2, 2, 1, 128, 134, 90, 48,,,, +6, 7, 1, 2, 2, 2, 2, 1, 256, 134, 90, 48,,,, +6, 7, 1, 2, 2, 2, 2, 1, 384, 134, 90, 48,,,, +6, 7, 1, 2, 2, 2, 2, 2, 128, 134, 90, 48,,,, +6, 7, 1, 2, 2, 2, 2, 2, 256, 134, 90, 48,,,, +6, 7, 1, 2, 2, 2, 2, 2, 384, 134, 90, 48,,,, +6, 7, 1, 2, 3, 3, 1, 1, 128, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 1, 1, 256, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 1, 1, 384, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 1, 2, 128, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 1, 2, 256, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 1, 2, 384, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 1, 3, 128, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 1, 3, 256, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 1, 3, 384, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 2, 1, 128, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 2, 1, 256, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 2, 1, 384, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 2, 2, 128, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 2, 2, 256, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 2, 2, 384, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 2, 3, 128, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 2, 3, 256, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 2, 3, 384, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 3, 1, 128, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 3, 1, 256, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 3, 1, 384, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 3, 2, 128, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 3, 2, 256, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 3, 2, 384, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 3, 3, 128, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 3, 3, 256, 132, 90, 48,,,, +6, 7, 1, 2, 3, 3, 3, 3, 384, 132, 90, 48,,,, +6, 7, 1, 3, 1, 1, 1, 1, 128, 198, 132, 69,,,, +6, 7, 1, 3, 1, 1, 1, 1, 256, 198, 132, 69,,,, +6, 7, 1, 3, 1, 1, 1, 1, 384, 198, 132, 69,,,, +6, 7, 1, 3, 2, 2, 1, 1, 128, 198, 132, 69,,,, +6, 7, 1, 3, 2, 2, 1, 1, 256, 198, 132, 69,,,, +6, 7, 1, 3, 2, 2, 1, 1, 384, 198, 132, 69,,,, +6, 7, 1, 3, 2, 2, 1, 2, 128, 198, 132, 69,,,, +6, 7, 1, 3, 2, 2, 1, 2, 256, 198, 132, 69,,,, +6, 7, 1, 3, 2, 2, 1, 2, 384, 198, 132, 69,,,, +6, 7, 1, 3, 2, 2, 2, 1, 128, 198, 132, 69,,,, +6, 7, 1, 3, 2, 2, 2, 1, 256, 198, 132, 69,,,, +6, 7, 1, 3, 2, 2, 2, 1, 384, 198, 132, 69,,,, +6, 7, 1, 3, 2, 2, 2, 2, 128, 198, 132, 69,,,, +6, 7, 1, 3, 2, 2, 2, 2, 256, 198, 132, 69,,,, +6, 7, 1, 3, 2, 2, 2, 2, 384, 198, 132, 69,,,, +6, 7, 1, 3, 3, 3, 1, 1, 128, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 1, 1, 256, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 1, 1, 384, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 1, 2, 128, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 1, 2, 256, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 1, 2, 384, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 1, 3, 128, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 1, 3, 256, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 1, 3, 384, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 2, 1, 128, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 2, 1, 256, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 2, 1, 384, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 2, 2, 128, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 2, 2, 256, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 2, 2, 384, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 2, 3, 128, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 2, 3, 256, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 2, 3, 384, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 3, 1, 128, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 3, 1, 256, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 3, 1, 384, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 3, 2, 128, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 3, 2, 256, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 3, 2, 384, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 3, 3, 128, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 3, 3, 256, 195, 132, 69,,,, +6, 7, 1, 3, 3, 3, 3, 3, 384, 195, 132, 69,,,, +6, 7, 1, 4, 1, 1, 1, 1, 128, 240, 174, 90,,,, +6, 7, 1, 4, 1, 1, 1, 1, 256, 240, 174, 90,,,, +6, 7, 1, 4, 1, 1, 1, 1, 384, 240, 174, 90,,,, +6, 7, 1, 4, 2, 2, 1, 1, 128, 240, 174, 90,,,, +6, 7, 1, 4, 2, 2, 1, 1, 256, 240, 174, 90,,,, +6, 7, 1, 4, 2, 2, 1, 1, 384, 240, 174, 90,,,, +6, 7, 1, 4, 2, 2, 1, 2, 128, 240, 174, 90,,,, +6, 7, 1, 4, 2, 2, 1, 2, 256, 240, 174, 90,,,, +6, 7, 1, 4, 2, 2, 1, 2, 384, 240, 174, 90,,,, +6, 7, 1, 4, 2, 2, 2, 1, 128, 240, 174, 90,,,, +6, 7, 1, 4, 2, 2, 2, 1, 256, 240, 174, 90,,,, +6, 7, 1, 4, 2, 2, 2, 1, 384, 240, 174, 90,,,, +6, 7, 1, 4, 2, 2, 2, 2, 128, 240, 174, 90,,,, +6, 7, 1, 4, 2, 2, 2, 2, 256, 240, 174, 90,,,, +6, 7, 1, 4, 2, 2, 2, 2, 384, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 1, 1, 128, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 1, 1, 256, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 1, 1, 384, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 1, 2, 128, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 1, 2, 256, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 1, 2, 384, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 1, 3, 128, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 1, 3, 256, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 1, 3, 384, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 2, 1, 128, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 2, 1, 256, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 2, 1, 384, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 2, 2, 128, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 2, 2, 256, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 2, 2, 384, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 2, 3, 128, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 2, 3, 256, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 2, 3, 384, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 3, 1, 128, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 3, 1, 256, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 3, 1, 384, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 3, 2, 128, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 3, 2, 256, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 3, 2, 384, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 3, 3, 128, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 3, 3, 256, 240, 174, 90,,,, +6, 7, 1, 4, 3, 3, 3, 3, 384, 240, 174, 90,,,, +6, 7, 1, 5, 1, 1, 1, 1, 128, 240, 216, 111,,,, +6, 7, 1, 5, 1, 1, 1, 1, 256, 240, 216, 111,,,, +6, 7, 1, 5, 1, 1, 1, 1, 384, 240, 216, 111,,,, +6, 7, 1, 5, 2, 2, 1, 1, 128, 240, 216, 111,,,, +6, 7, 1, 5, 2, 2, 1, 1, 256, 240, 216, 111,,,, +6, 7, 1, 5, 2, 2, 1, 1, 384, 240, 216, 111,,,, +6, 7, 1, 5, 2, 2, 1, 2, 128, 240, 216, 111,,,, +6, 7, 1, 5, 2, 2, 1, 2, 256, 240, 216, 111,,,, +6, 7, 1, 5, 2, 2, 1, 2, 384, 240, 216, 111,,,, +6, 7, 1, 5, 2, 2, 2, 1, 128, 240, 216, 111,,,, +6, 7, 1, 5, 2, 2, 2, 1, 256, 240, 216, 111,,,, +6, 7, 1, 5, 2, 2, 2, 1, 384, 240, 216, 111,,,, +6, 7, 1, 5, 2, 2, 2, 2, 128, 240, 216, 111,,,, +6, 7, 1, 5, 2, 2, 2, 2, 256, 240, 216, 111,,,, +6, 7, 1, 5, 2, 2, 2, 2, 384, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 1, 1, 128, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 1, 1, 256, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 1, 1, 384, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 1, 2, 128, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 1, 2, 256, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 1, 2, 384, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 1, 3, 128, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 1, 3, 256, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 1, 3, 384, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 2, 1, 128, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 2, 1, 256, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 2, 1, 384, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 2, 2, 128, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 2, 2, 256, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 2, 2, 384, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 2, 3, 128, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 2, 3, 256, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 2, 3, 384, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 3, 1, 128, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 3, 1, 256, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 3, 1, 384, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 3, 2, 128, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 3, 2, 256, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 3, 2, 384, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 3, 3, 128, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 3, 3, 256, 240, 216, 111,,,, +6, 7, 1, 5, 3, 3, 3, 3, 384, 240, 216, 111,,,, +6, 7, 1, 6, 1, 1, 1, 1, 128, 240, 240, 132,,,, +6, 7, 1, 6, 1, 1, 1, 1, 256, 240, 240, 132,,,, +6, 7, 1, 6, 1, 1, 1, 1, 384, 240, 240, 132,,,, +6, 7, 1, 6, 2, 2, 1, 1, 128, 240, 240, 132,,,, +6, 7, 1, 6, 2, 2, 1, 1, 256, 240, 240, 132,,,, +6, 7, 1, 6, 2, 2, 1, 1, 384, 240, 240, 132,,,, +6, 7, 1, 6, 2, 2, 1, 2, 128, 240, 240, 132,,,, +6, 7, 1, 6, 2, 2, 1, 2, 256, 240, 240, 132,,,, +6, 7, 1, 6, 2, 2, 1, 2, 384, 240, 240, 132,,,, +6, 7, 1, 6, 2, 2, 2, 1, 128, 240, 240, 132,,,, +6, 7, 1, 6, 2, 2, 2, 1, 256, 240, 240, 132,,,, +6, 7, 1, 6, 2, 2, 2, 1, 384, 240, 240, 132,,,, +6, 7, 1, 6, 2, 2, 2, 2, 128, 240, 240, 132,,,, +6, 7, 1, 6, 2, 2, 2, 2, 256, 240, 240, 132,,,, +6, 7, 1, 6, 2, 2, 2, 2, 384, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 1, 1, 128, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 1, 1, 256, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 1, 1, 384, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 1, 2, 128, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 1, 2, 256, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 1, 2, 384, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 1, 3, 128, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 1, 3, 256, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 1, 3, 384, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 2, 1, 128, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 2, 1, 256, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 2, 1, 384, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 2, 2, 128, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 2, 2, 256, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 2, 2, 384, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 2, 3, 128, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 2, 3, 256, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 2, 3, 384, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 3, 1, 128, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 3, 1, 256, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 3, 1, 384, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 3, 2, 128, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 3, 2, 256, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 3, 2, 384, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 3, 3, 128, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 3, 3, 256, 240, 240, 132,,,, +6, 7, 1, 6, 3, 3, 3, 3, 384, 240, 240, 132,,,, +6, 7, 1, 7, 1, 1, 1, 1, 128, 240, 240, 153,,,, +6, 7, 1, 7, 1, 1, 1, 1, 256, 240, 240, 153,,,, +6, 7, 1, 7, 1, 1, 1, 1, 384, 240, 240, 153,,,, +6, 7, 1, 7, 2, 2, 1, 1, 128, 240, 240, 153,,,, +6, 7, 1, 7, 2, 2, 1, 1, 256, 240, 240, 153,,,, +6, 7, 1, 7, 2, 2, 1, 1, 384, 240, 240, 153,,,, +6, 7, 1, 7, 2, 2, 1, 2, 128, 240, 240, 153,,,, +6, 7, 1, 7, 2, 2, 1, 2, 256, 240, 240, 153,,,, +6, 7, 1, 7, 2, 2, 1, 2, 384, 240, 240, 153,,,, +6, 7, 1, 7, 2, 2, 2, 1, 128, 240, 240, 153,,,, +6, 7, 1, 7, 2, 2, 2, 1, 256, 240, 240, 153,,,, +6, 7, 1, 7, 2, 2, 2, 1, 384, 240, 240, 153,,,, +6, 7, 1, 7, 2, 2, 2, 2, 128, 240, 240, 153,,,, +6, 7, 1, 7, 2, 2, 2, 2, 256, 240, 240, 153,,,, +6, 7, 1, 7, 2, 2, 2, 2, 384, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 1, 1, 128, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 1, 1, 256, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 1, 1, 384, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 1, 2, 128, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 1, 2, 256, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 1, 2, 384, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 1, 3, 128, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 1, 3, 256, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 1, 3, 384, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 2, 1, 128, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 2, 1, 256, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 2, 1, 384, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 2, 2, 128, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 2, 2, 256, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 2, 2, 384, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 2, 3, 128, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 2, 3, 256, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 2, 3, 384, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 3, 1, 128, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 3, 1, 256, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 3, 1, 384, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 3, 2, 128, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 3, 2, 256, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 3, 2, 384, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 3, 3, 128, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 3, 3, 256, 240, 240, 153,,,, +6, 7, 1, 7, 3, 3, 3, 3, 384, 240, 240, 153,,,, +6, 7, 2, 1, 1, 1, 1, 1, 128, 70, 48, 27,,,, +6, 7, 2, 1, 1, 1, 1, 1, 256, 70, 48, 27,,,, +6, 7, 2, 1, 1, 1, 1, 1, 384, 70, 48, 27,,,, +6, 7, 2, 1, 2, 2, 1, 1, 128, 70, 48, 27,,,, +6, 7, 2, 1, 2, 2, 1, 1, 256, 70, 48, 27,,,, +6, 7, 2, 1, 2, 2, 1, 1, 384, 70, 48, 27,,,, +6, 7, 2, 1, 2, 2, 1, 2, 128, 70, 48, 27,,,, +6, 7, 2, 1, 2, 2, 1, 2, 256, 70, 48, 27,,,, +6, 7, 2, 1, 2, 2, 1, 2, 384, 70, 48, 27,,,, +6, 7, 2, 1, 2, 2, 2, 1, 128, 70, 48, 27,,,, +6, 7, 2, 1, 2, 2, 2, 1, 256, 70, 48, 27,,,, +6, 7, 2, 1, 2, 2, 2, 1, 384, 70, 48, 27,,,, +6, 7, 2, 1, 2, 2, 2, 2, 128, 70, 48, 27,,,, +6, 7, 2, 1, 2, 2, 2, 2, 256, 70, 48, 27,,,, +6, 7, 2, 1, 2, 2, 2, 2, 384, 70, 48, 27,,,, +6, 7, 2, 1, 3, 3, 1, 1, 128, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 1, 1, 256, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 1, 1, 384, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 1, 2, 128, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 1, 2, 256, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 1, 2, 384, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 1, 3, 128, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 1, 3, 256, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 1, 3, 384, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 2, 1, 128, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 2, 1, 256, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 2, 1, 384, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 2, 2, 128, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 2, 2, 256, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 2, 2, 384, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 2, 3, 128, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 2, 3, 256, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 2, 3, 384, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 3, 1, 128, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 3, 1, 256, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 3, 1, 384, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 3, 2, 128, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 3, 2, 256, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 3, 2, 384, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 3, 3, 128, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 3, 3, 256, 69, 48, 27,,,, +6, 7, 2, 1, 3, 3, 3, 3, 384, 69, 48, 27,,,, +6, 7, 2, 2, 1, 1, 1, 1, 128, 134, 90, 48,,,, +6, 7, 2, 2, 1, 1, 1, 1, 256, 134, 90, 48,,,, +6, 7, 2, 2, 1, 1, 1, 1, 384, 134, 90, 48,,,, +6, 7, 2, 2, 2, 2, 1, 1, 128, 134, 90, 48,,,, +6, 7, 2, 2, 2, 2, 1, 1, 256, 134, 90, 48,,,, +6, 7, 2, 2, 2, 2, 1, 1, 384, 134, 90, 48,,,, +6, 7, 2, 2, 2, 2, 1, 2, 128, 134, 90, 48,,,, +6, 7, 2, 2, 2, 2, 1, 2, 256, 134, 90, 48,,,, +6, 7, 2, 2, 2, 2, 1, 2, 384, 134, 90, 48,,,, +6, 7, 2, 2, 2, 2, 2, 1, 128, 134, 90, 48,,,, +6, 7, 2, 2, 2, 2, 2, 1, 256, 134, 90, 48,,,, +6, 7, 2, 2, 2, 2, 2, 1, 384, 134, 90, 48,,,, +6, 7, 2, 2, 2, 2, 2, 2, 128, 134, 90, 48,,,, +6, 7, 2, 2, 2, 2, 2, 2, 256, 134, 90, 48,,,, +6, 7, 2, 2, 2, 2, 2, 2, 384, 134, 90, 48,,,, +6, 7, 2, 2, 3, 3, 1, 1, 128, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 1, 1, 256, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 1, 1, 384, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 1, 2, 128, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 1, 2, 256, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 1, 2, 384, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 1, 3, 128, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 1, 3, 256, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 1, 3, 384, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 2, 1, 128, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 2, 1, 256, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 2, 1, 384, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 2, 2, 128, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 2, 2, 256, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 2, 2, 384, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 2, 3, 128, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 2, 3, 256, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 2, 3, 384, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 3, 1, 128, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 3, 1, 256, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 3, 1, 384, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 3, 2, 128, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 3, 2, 256, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 3, 2, 384, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 3, 3, 128, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 3, 3, 256, 132, 90, 48,,,, +6, 7, 2, 2, 3, 3, 3, 3, 384, 132, 90, 48,,,, +6, 7, 2, 3, 1, 1, 1, 1, 128, 198, 132, 69,,,, +6, 7, 2, 3, 1, 1, 1, 1, 256, 198, 132, 69,,,, +6, 7, 2, 3, 1, 1, 1, 1, 384, 198, 132, 69,,,, +6, 7, 2, 3, 2, 2, 1, 1, 128, 198, 132, 69,,,, +6, 7, 2, 3, 2, 2, 1, 1, 256, 198, 132, 69,,,, +6, 7, 2, 3, 2, 2, 1, 1, 384, 198, 132, 69,,,, +6, 7, 2, 3, 2, 2, 1, 2, 128, 198, 132, 69,,,, +6, 7, 2, 3, 2, 2, 1, 2, 256, 198, 132, 69,,,, +6, 7, 2, 3, 2, 2, 1, 2, 384, 198, 132, 69,,,, +6, 7, 2, 3, 2, 2, 2, 1, 128, 198, 132, 69,,,, +6, 7, 2, 3, 2, 2, 2, 1, 256, 198, 132, 69,,,, +6, 7, 2, 3, 2, 2, 2, 1, 384, 198, 132, 69,,,, +6, 7, 2, 3, 2, 2, 2, 2, 128, 198, 132, 69,,,, +6, 7, 2, 3, 2, 2, 2, 2, 256, 198, 132, 69,,,, +6, 7, 2, 3, 2, 2, 2, 2, 384, 198, 132, 69,,,, +6, 7, 2, 3, 3, 3, 1, 1, 128, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 1, 1, 256, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 1, 1, 384, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 1, 2, 128, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 1, 2, 256, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 1, 2, 384, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 1, 3, 128, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 1, 3, 256, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 1, 3, 384, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 2, 1, 128, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 2, 1, 256, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 2, 1, 384, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 2, 2, 128, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 2, 2, 256, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 2, 2, 384, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 2, 3, 128, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 2, 3, 256, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 2, 3, 384, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 3, 1, 128, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 3, 1, 256, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 3, 1, 384, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 3, 2, 128, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 3, 2, 256, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 3, 2, 384, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 3, 3, 128, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 3, 3, 256, 195, 132, 69,,,, +6, 7, 2, 3, 3, 3, 3, 3, 384, 195, 132, 69,,,, +6, 7, 2, 4, 1, 1, 1, 1, 128, 240, 174, 90,,,, +6, 7, 2, 4, 1, 1, 1, 1, 256, 240, 174, 90,,,, +6, 7, 2, 4, 1, 1, 1, 1, 384, 240, 174, 90,,,, +6, 7, 2, 4, 2, 2, 1, 1, 128, 240, 174, 90,,,, +6, 7, 2, 4, 2, 2, 1, 1, 256, 240, 174, 90,,,, +6, 7, 2, 4, 2, 2, 1, 1, 384, 240, 174, 90,,,, +6, 7, 2, 4, 2, 2, 1, 2, 128, 240, 174, 90,,,, +6, 7, 2, 4, 2, 2, 1, 2, 256, 240, 174, 90,,,, +6, 7, 2, 4, 2, 2, 1, 2, 384, 240, 174, 90,,,, +6, 7, 2, 4, 2, 2, 2, 1, 128, 240, 174, 90,,,, +6, 7, 2, 4, 2, 2, 2, 1, 256, 240, 174, 90,,,, +6, 7, 2, 4, 2, 2, 2, 1, 384, 240, 174, 90,,,, +6, 7, 2, 4, 2, 2, 2, 2, 128, 240, 174, 90,,,, +6, 7, 2, 4, 2, 2, 2, 2, 256, 240, 174, 90,,,, +6, 7, 2, 4, 2, 2, 2, 2, 384, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 1, 1, 128, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 1, 1, 256, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 1, 1, 384, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 1, 2, 128, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 1, 2, 256, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 1, 2, 384, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 1, 3, 128, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 1, 3, 256, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 1, 3, 384, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 2, 1, 128, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 2, 1, 256, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 2, 1, 384, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 2, 2, 128, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 2, 2, 256, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 2, 2, 384, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 2, 3, 128, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 2, 3, 256, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 2, 3, 384, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 3, 1, 128, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 3, 1, 256, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 3, 1, 384, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 3, 2, 128, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 3, 2, 256, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 3, 2, 384, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 3, 3, 128, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 3, 3, 256, 240, 174, 90,,,, +6, 7, 2, 4, 3, 3, 3, 3, 384, 240, 174, 90,,,, +6, 7, 2, 5, 1, 1, 1, 1, 128, 240, 216, 111,,,, +6, 7, 2, 5, 1, 1, 1, 1, 256, 240, 216, 111,,,, +6, 7, 2, 5, 1, 1, 1, 1, 384, 240, 216, 111,,,, +6, 7, 2, 5, 2, 2, 1, 1, 128, 240, 216, 111,,,, +6, 7, 2, 5, 2, 2, 1, 1, 256, 240, 216, 111,,,, +6, 7, 2, 5, 2, 2, 1, 1, 384, 240, 216, 111,,,, +6, 7, 2, 5, 2, 2, 1, 2, 128, 240, 216, 111,,,, +6, 7, 2, 5, 2, 2, 1, 2, 256, 240, 216, 111,,,, +6, 7, 2, 5, 2, 2, 1, 2, 384, 240, 216, 111,,,, +6, 7, 2, 5, 2, 2, 2, 1, 128, 240, 216, 111,,,, +6, 7, 2, 5, 2, 2, 2, 1, 256, 240, 216, 111,,,, +6, 7, 2, 5, 2, 2, 2, 1, 384, 240, 216, 111,,,, +6, 7, 2, 5, 2, 2, 2, 2, 128, 240, 216, 111,,,, +6, 7, 2, 5, 2, 2, 2, 2, 256, 240, 216, 111,,,, +6, 7, 2, 5, 2, 2, 2, 2, 384, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 1, 1, 128, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 1, 1, 256, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 1, 1, 384, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 1, 2, 128, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 1, 2, 256, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 1, 2, 384, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 1, 3, 128, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 1, 3, 256, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 1, 3, 384, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 2, 1, 128, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 2, 1, 256, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 2, 1, 384, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 2, 2, 128, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 2, 2, 256, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 2, 2, 384, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 2, 3, 128, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 2, 3, 256, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 2, 3, 384, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 3, 1, 128, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 3, 1, 256, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 3, 1, 384, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 3, 2, 128, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 3, 2, 256, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 3, 2, 384, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 3, 3, 128, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 3, 3, 256, 240, 216, 111,,,, +6, 7, 2, 5, 3, 3, 3, 3, 384, 240, 216, 111,,,, +6, 7, 2, 6, 1, 1, 1, 1, 128, 240, 240, 132,,,, +6, 7, 2, 6, 1, 1, 1, 1, 256, 240, 240, 132,,,, +6, 7, 2, 6, 1, 1, 1, 1, 384, 240, 240, 132,,,, +6, 7, 2, 6, 2, 2, 1, 1, 128, 240, 240, 132,,,, +6, 7, 2, 6, 2, 2, 1, 1, 256, 240, 240, 132,,,, +6, 7, 2, 6, 2, 2, 1, 1, 384, 240, 240, 132,,,, +6, 7, 2, 6, 2, 2, 1, 2, 128, 240, 240, 132,,,, +6, 7, 2, 6, 2, 2, 1, 2, 256, 240, 240, 132,,,, +6, 7, 2, 6, 2, 2, 1, 2, 384, 240, 240, 132,,,, +6, 7, 2, 6, 2, 2, 2, 1, 128, 240, 240, 132,,,, +6, 7, 2, 6, 2, 2, 2, 1, 256, 240, 240, 132,,,, +6, 7, 2, 6, 2, 2, 2, 1, 384, 240, 240, 132,,,, +6, 7, 2, 6, 2, 2, 2, 2, 128, 240, 240, 132,,,, +6, 7, 2, 6, 2, 2, 2, 2, 256, 240, 240, 132,,,, +6, 7, 2, 6, 2, 2, 2, 2, 384, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 1, 1, 128, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 1, 1, 256, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 1, 1, 384, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 1, 2, 128, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 1, 2, 256, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 1, 2, 384, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 1, 3, 128, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 1, 3, 256, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 1, 3, 384, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 2, 1, 128, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 2, 1, 256, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 2, 1, 384, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 2, 2, 128, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 2, 2, 256, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 2, 2, 384, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 2, 3, 128, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 2, 3, 256, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 2, 3, 384, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 3, 1, 128, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 3, 1, 256, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 3, 1, 384, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 3, 2, 128, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 3, 2, 256, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 3, 2, 384, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 3, 3, 128, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 3, 3, 256, 240, 240, 132,,,, +6, 7, 2, 6, 3, 3, 3, 3, 384, 240, 240, 132,,,, +6, 7, 2, 7, 1, 1, 1, 1, 128, 240, 240, 153,,,, +6, 7, 2, 7, 1, 1, 1, 1, 256, 240, 240, 153,,,, +6, 7, 2, 7, 1, 1, 1, 1, 384, 240, 240, 153,,,, +6, 7, 2, 7, 2, 2, 1, 1, 128, 240, 240, 153,,,, +6, 7, 2, 7, 2, 2, 1, 1, 256, 240, 240, 153,,,, +6, 7, 2, 7, 2, 2, 1, 1, 384, 240, 240, 153,,,, +6, 7, 2, 7, 2, 2, 1, 2, 128, 240, 240, 153,,,, +6, 7, 2, 7, 2, 2, 1, 2, 256, 240, 240, 153,,,, +6, 7, 2, 7, 2, 2, 1, 2, 384, 240, 240, 153,,,, +6, 7, 2, 7, 2, 2, 2, 1, 128, 240, 240, 153,,,, +6, 7, 2, 7, 2, 2, 2, 1, 256, 240, 240, 153,,,, +6, 7, 2, 7, 2, 2, 2, 1, 384, 240, 240, 153,,,, +6, 7, 2, 7, 2, 2, 2, 2, 128, 240, 240, 153,,,, +6, 7, 2, 7, 2, 2, 2, 2, 256, 240, 240, 153,,,, +6, 7, 2, 7, 2, 2, 2, 2, 384, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 1, 1, 128, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 1, 1, 256, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 1, 1, 384, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 1, 2, 128, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 1, 2, 256, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 1, 2, 384, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 1, 3, 128, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 1, 3, 256, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 1, 3, 384, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 2, 1, 128, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 2, 1, 256, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 2, 1, 384, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 2, 2, 128, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 2, 2, 256, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 2, 2, 384, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 2, 3, 128, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 2, 3, 256, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 2, 3, 384, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 3, 1, 128, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 3, 1, 256, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 3, 1, 384, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 3, 2, 128, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 3, 2, 256, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 3, 2, 384, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 3, 3, 128, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 3, 3, 256, 240, 240, 153,,,, +6, 7, 2, 7, 3, 3, 3, 3, 384, 240, 240, 153,,,, +6, 7, 3, 1, 1, 1, 1, 1, 128, 70, 48, 27,,,, +6, 7, 3, 1, 1, 1, 1, 1, 256, 70, 48, 27,,,, +6, 7, 3, 1, 1, 1, 1, 1, 384, 70, 48, 27,,,, +6, 7, 3, 1, 2, 2, 1, 1, 128, 70, 48, 27,,,, +6, 7, 3, 1, 2, 2, 1, 1, 256, 70, 48, 27,,,, +6, 7, 3, 1, 2, 2, 1, 1, 384, 70, 48, 27,,,, +6, 7, 3, 1, 2, 2, 1, 2, 128, 70, 48, 27,,,, +6, 7, 3, 1, 2, 2, 1, 2, 256, 70, 48, 27,,,, +6, 7, 3, 1, 2, 2, 1, 2, 384, 70, 48, 27,,,, +6, 7, 3, 1, 2, 2, 2, 1, 128, 70, 48, 27,,,, +6, 7, 3, 1, 2, 2, 2, 1, 256, 70, 48, 27,,,, +6, 7, 3, 1, 2, 2, 2, 1, 384, 70, 48, 27,,,, +6, 7, 3, 1, 2, 2, 2, 2, 128, 70, 48, 27,,,, +6, 7, 3, 1, 2, 2, 2, 2, 256, 70, 48, 27,,,, +6, 7, 3, 1, 2, 2, 2, 2, 384, 70, 48, 27,,,, +6, 7, 3, 1, 3, 3, 1, 1, 128, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 1, 1, 256, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 1, 1, 384, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 1, 2, 128, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 1, 2, 256, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 1, 2, 384, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 1, 3, 128, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 1, 3, 256, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 1, 3, 384, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 2, 1, 128, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 2, 1, 256, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 2, 1, 384, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 2, 2, 128, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 2, 2, 256, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 2, 2, 384, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 2, 3, 128, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 2, 3, 256, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 2, 3, 384, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 3, 1, 128, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 3, 1, 256, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 3, 1, 384, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 3, 2, 128, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 3, 2, 256, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 3, 2, 384, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 3, 3, 128, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 3, 3, 256, 69, 48, 27,,,, +6, 7, 3, 1, 3, 3, 3, 3, 384, 69, 48, 27,,,, +6, 7, 3, 2, 1, 1, 1, 1, 128, 134, 90, 48,,,, +6, 7, 3, 2, 1, 1, 1, 1, 256, 134, 90, 48,,,, +6, 7, 3, 2, 1, 1, 1, 1, 384, 134, 90, 48,,,, +6, 7, 3, 2, 2, 2, 1, 1, 128, 134, 90, 48,,,, +6, 7, 3, 2, 2, 2, 1, 1, 256, 134, 90, 48,,,, +6, 7, 3, 2, 2, 2, 1, 1, 384, 134, 90, 48,,,, +6, 7, 3, 2, 2, 2, 1, 2, 128, 134, 90, 48,,,, +6, 7, 3, 2, 2, 2, 1, 2, 256, 134, 90, 48,,,, +6, 7, 3, 2, 2, 2, 1, 2, 384, 134, 90, 48,,,, +6, 7, 3, 2, 2, 2, 2, 1, 128, 134, 90, 48,,,, +6, 7, 3, 2, 2, 2, 2, 1, 256, 134, 90, 48,,,, +6, 7, 3, 2, 2, 2, 2, 1, 384, 134, 90, 48,,,, +6, 7, 3, 2, 2, 2, 2, 2, 128, 134, 90, 48,,,, +6, 7, 3, 2, 2, 2, 2, 2, 256, 134, 90, 48,,,, +6, 7, 3, 2, 2, 2, 2, 2, 384, 134, 90, 48,,,, +6, 7, 3, 2, 3, 3, 1, 1, 128, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 1, 1, 256, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 1, 1, 384, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 1, 2, 128, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 1, 2, 256, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 1, 2, 384, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 1, 3, 128, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 1, 3, 256, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 1, 3, 384, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 2, 1, 128, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 2, 1, 256, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 2, 1, 384, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 2, 2, 128, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 2, 2, 256, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 2, 2, 384, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 2, 3, 128, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 2, 3, 256, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 2, 3, 384, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 3, 1, 128, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 3, 1, 256, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 3, 1, 384, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 3, 2, 128, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 3, 2, 256, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 3, 2, 384, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 3, 3, 128, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 3, 3, 256, 132, 90, 48,,,, +6, 7, 3, 2, 3, 3, 3, 3, 384, 132, 90, 48,,,, +6, 7, 3, 3, 1, 1, 1, 1, 128, 198, 132, 69,,,, +6, 7, 3, 3, 1, 1, 1, 1, 256, 198, 132, 69,,,, +6, 7, 3, 3, 1, 1, 1, 1, 384, 198, 132, 69,,,, +6, 7, 3, 3, 2, 2, 1, 1, 128, 198, 132, 69,,,, +6, 7, 3, 3, 2, 2, 1, 1, 256, 198, 132, 69,,,, +6, 7, 3, 3, 2, 2, 1, 1, 384, 198, 132, 69,,,, +6, 7, 3, 3, 2, 2, 1, 2, 128, 198, 132, 69,,,, +6, 7, 3, 3, 2, 2, 1, 2, 256, 198, 132, 69,,,, +6, 7, 3, 3, 2, 2, 1, 2, 384, 198, 132, 69,,,, +6, 7, 3, 3, 2, 2, 2, 1, 128, 198, 132, 69,,,, +6, 7, 3, 3, 2, 2, 2, 1, 256, 198, 132, 69,,,, +6, 7, 3, 3, 2, 2, 2, 1, 384, 198, 132, 69,,,, +6, 7, 3, 3, 2, 2, 2, 2, 128, 198, 132, 69,,,, +6, 7, 3, 3, 2, 2, 2, 2, 256, 198, 132, 69,,,, +6, 7, 3, 3, 2, 2, 2, 2, 384, 198, 132, 69,,,, +6, 7, 3, 3, 3, 3, 1, 1, 128, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 1, 1, 256, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 1, 1, 384, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 1, 2, 128, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 1, 2, 256, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 1, 2, 384, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 1, 3, 128, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 1, 3, 256, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 1, 3, 384, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 2, 1, 128, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 2, 1, 256, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 2, 1, 384, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 2, 2, 128, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 2, 2, 256, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 2, 2, 384, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 2, 3, 128, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 2, 3, 256, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 2, 3, 384, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 3, 1, 128, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 3, 1, 256, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 3, 1, 384, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 3, 2, 128, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 3, 2, 256, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 3, 2, 384, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 3, 3, 128, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 3, 3, 256, 195, 132, 69,,,, +6, 7, 3, 3, 3, 3, 3, 3, 384, 195, 132, 69,,,, +6, 7, 3, 4, 1, 1, 1, 1, 128, 240, 174, 90,,,, +6, 7, 3, 4, 1, 1, 1, 1, 256, 240, 174, 90,,,, +6, 7, 3, 4, 1, 1, 1, 1, 384, 240, 174, 90,,,, +6, 7, 3, 4, 2, 2, 1, 1, 128, 240, 174, 90,,,, +6, 7, 3, 4, 2, 2, 1, 1, 256, 240, 174, 90,,,, +6, 7, 3, 4, 2, 2, 1, 1, 384, 240, 174, 90,,,, +6, 7, 3, 4, 2, 2, 1, 2, 128, 240, 174, 90,,,, +6, 7, 3, 4, 2, 2, 1, 2, 256, 240, 174, 90,,,, +6, 7, 3, 4, 2, 2, 1, 2, 384, 240, 174, 90,,,, +6, 7, 3, 4, 2, 2, 2, 1, 128, 240, 174, 90,,,, +6, 7, 3, 4, 2, 2, 2, 1, 256, 240, 174, 90,,,, +6, 7, 3, 4, 2, 2, 2, 1, 384, 240, 174, 90,,,, +6, 7, 3, 4, 2, 2, 2, 2, 128, 240, 174, 90,,,, +6, 7, 3, 4, 2, 2, 2, 2, 256, 240, 174, 90,,,, +6, 7, 3, 4, 2, 2, 2, 2, 384, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 1, 1, 128, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 1, 1, 256, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 1, 1, 384, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 1, 2, 128, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 1, 2, 256, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 1, 2, 384, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 1, 3, 128, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 1, 3, 256, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 1, 3, 384, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 2, 1, 128, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 2, 1, 256, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 2, 1, 384, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 2, 2, 128, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 2, 2, 256, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 2, 2, 384, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 2, 3, 128, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 2, 3, 256, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 2, 3, 384, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 3, 1, 128, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 3, 1, 256, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 3, 1, 384, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 3, 2, 128, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 3, 2, 256, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 3, 2, 384, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 3, 3, 128, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 3, 3, 256, 240, 174, 90,,,, +6, 7, 3, 4, 3, 3, 3, 3, 384, 240, 174, 90,,,, +6, 7, 3, 5, 1, 1, 1, 1, 128, 240, 216, 111,,,, +6, 7, 3, 5, 1, 1, 1, 1, 256, 240, 216, 111,,,, +6, 7, 3, 5, 1, 1, 1, 1, 384, 240, 216, 111,,,, +6, 7, 3, 5, 2, 2, 1, 1, 128, 240, 216, 111,,,, +6, 7, 3, 5, 2, 2, 1, 1, 256, 240, 216, 111,,,, +6, 7, 3, 5, 2, 2, 1, 1, 384, 240, 216, 111,,,, +6, 7, 3, 5, 2, 2, 1, 2, 128, 240, 216, 111,,,, +6, 7, 3, 5, 2, 2, 1, 2, 256, 240, 216, 111,,,, +6, 7, 3, 5, 2, 2, 1, 2, 384, 240, 216, 111,,,, +6, 7, 3, 5, 2, 2, 2, 1, 128, 240, 216, 111,,,, +6, 7, 3, 5, 2, 2, 2, 1, 256, 240, 216, 111,,,, +6, 7, 3, 5, 2, 2, 2, 1, 384, 240, 216, 111,,,, +6, 7, 3, 5, 2, 2, 2, 2, 128, 240, 216, 111,,,, +6, 7, 3, 5, 2, 2, 2, 2, 256, 240, 216, 111,,,, +6, 7, 3, 5, 2, 2, 2, 2, 384, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 1, 1, 128, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 1, 1, 256, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 1, 1, 384, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 1, 2, 128, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 1, 2, 256, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 1, 2, 384, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 1, 3, 128, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 1, 3, 256, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 1, 3, 384, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 2, 1, 128, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 2, 1, 256, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 2, 1, 384, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 2, 2, 128, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 2, 2, 256, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 2, 2, 384, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 2, 3, 128, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 2, 3, 256, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 2, 3, 384, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 3, 1, 128, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 3, 1, 256, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 3, 1, 384, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 3, 2, 128, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 3, 2, 256, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 3, 2, 384, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 3, 3, 128, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 3, 3, 256, 240, 216, 111,,,, +6, 7, 3, 5, 3, 3, 3, 3, 384, 240, 216, 111,,,, +6, 7, 3, 6, 1, 1, 1, 1, 128, 240, 240, 132,,,, +6, 7, 3, 6, 1, 1, 1, 1, 256, 240, 240, 132,,,, +6, 7, 3, 6, 1, 1, 1, 1, 384, 240, 240, 132,,,, +6, 7, 3, 6, 2, 2, 1, 1, 128, 240, 240, 132,,,, +6, 7, 3, 6, 2, 2, 1, 1, 256, 240, 240, 132,,,, +6, 7, 3, 6, 2, 2, 1, 1, 384, 240, 240, 132,,,, +6, 7, 3, 6, 2, 2, 1, 2, 128, 240, 240, 132,,,, +6, 7, 3, 6, 2, 2, 1, 2, 256, 240, 240, 132,,,, +6, 7, 3, 6, 2, 2, 1, 2, 384, 240, 240, 132,,,, +6, 7, 3, 6, 2, 2, 2, 1, 128, 240, 240, 132,,,, +6, 7, 3, 6, 2, 2, 2, 1, 256, 240, 240, 132,,,, +6, 7, 3, 6, 2, 2, 2, 1, 384, 240, 240, 132,,,, +6, 7, 3, 6, 2, 2, 2, 2, 128, 240, 240, 132,,,, +6, 7, 3, 6, 2, 2, 2, 2, 256, 240, 240, 132,,,, +6, 7, 3, 6, 2, 2, 2, 2, 384, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 1, 1, 128, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 1, 1, 256, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 1, 1, 384, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 1, 2, 128, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 1, 2, 256, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 1, 2, 384, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 1, 3, 128, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 1, 3, 256, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 1, 3, 384, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 2, 1, 128, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 2, 1, 256, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 2, 1, 384, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 2, 2, 128, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 2, 2, 256, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 2, 2, 384, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 2, 3, 128, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 2, 3, 256, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 2, 3, 384, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 3, 1, 128, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 3, 1, 256, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 3, 1, 384, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 3, 2, 128, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 3, 2, 256, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 3, 2, 384, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 3, 3, 128, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 3, 3, 256, 240, 240, 132,,,, +6, 7, 3, 6, 3, 3, 3, 3, 384, 240, 240, 132,,,, +6, 7, 3, 7, 1, 1, 1, 1, 128, 240, 240, 153,,,, +6, 7, 3, 7, 1, 1, 1, 1, 256, 240, 240, 153,,,, +6, 7, 3, 7, 1, 1, 1, 1, 384, 240, 240, 153,,,, +6, 7, 3, 7, 2, 2, 1, 1, 128, 240, 240, 153,,,, +6, 7, 3, 7, 2, 2, 1, 1, 256, 240, 240, 153,,,, +6, 7, 3, 7, 2, 2, 1, 1, 384, 240, 240, 153,,,, +6, 7, 3, 7, 2, 2, 1, 2, 128, 240, 240, 153,,,, +6, 7, 3, 7, 2, 2, 1, 2, 256, 240, 240, 153,,,, +6, 7, 3, 7, 2, 2, 1, 2, 384, 240, 240, 153,,,, +6, 7, 3, 7, 2, 2, 2, 1, 128, 240, 240, 153,,,, +6, 7, 3, 7, 2, 2, 2, 1, 256, 240, 240, 153,,,, +6, 7, 3, 7, 2, 2, 2, 1, 384, 240, 240, 153,,,, +6, 7, 3, 7, 2, 2, 2, 2, 128, 240, 240, 153,,,, +6, 7, 3, 7, 2, 2, 2, 2, 256, 240, 240, 153,,,, +6, 7, 3, 7, 2, 2, 2, 2, 384, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 1, 1, 128, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 1, 1, 256, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 1, 1, 384, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 1, 2, 128, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 1, 2, 256, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 1, 2, 384, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 1, 3, 128, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 1, 3, 256, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 1, 3, 384, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 2, 1, 128, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 2, 1, 256, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 2, 1, 384, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 2, 2, 128, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 2, 2, 256, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 2, 2, 384, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 2, 3, 128, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 2, 3, 256, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 2, 3, 384, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 3, 1, 128, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 3, 1, 256, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 3, 1, 384, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 3, 2, 128, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 3, 2, 256, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 3, 2, 384, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 3, 3, 128, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 3, 3, 256, 240, 240, 153,,,, +6, 7, 3, 7, 3, 3, 3, 3, 384, 240, 240, 153,,,, +6, 7, 4, 1, 1, 1, 1, 1, 128, 70, 48, 27,,,, +6, 7, 4, 1, 1, 1, 1, 1, 256, 70, 48, 27,,,, +6, 7, 4, 1, 1, 1, 1, 1, 384, 70, 48, 27,,,, +6, 7, 4, 1, 2, 2, 1, 1, 128, 70, 48, 27,,,, +6, 7, 4, 1, 2, 2, 1, 1, 256, 70, 48, 27,,,, +6, 7, 4, 1, 2, 2, 1, 1, 384, 70, 48, 27,,,, +6, 7, 4, 1, 2, 2, 1, 2, 128, 70, 48, 27,,,, +6, 7, 4, 1, 2, 2, 1, 2, 256, 70, 48, 27,,,, +6, 7, 4, 1, 2, 2, 1, 2, 384, 70, 48, 27,,,, +6, 7, 4, 1, 2, 2, 2, 1, 128, 70, 48, 27,,,, +6, 7, 4, 1, 2, 2, 2, 1, 256, 70, 48, 27,,,, +6, 7, 4, 1, 2, 2, 2, 1, 384, 70, 48, 27,,,, +6, 7, 4, 1, 2, 2, 2, 2, 128, 70, 48, 27,,,, +6, 7, 4, 1, 2, 2, 2, 2, 256, 70, 48, 27,,,, +6, 7, 4, 1, 2, 2, 2, 2, 384, 70, 48, 27,,,, +6, 7, 4, 1, 3, 3, 1, 1, 128, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 1, 1, 256, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 1, 1, 384, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 1, 2, 128, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 1, 2, 256, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 1, 2, 384, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 1, 3, 128, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 1, 3, 256, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 1, 3, 384, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 2, 1, 128, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 2, 1, 256, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 2, 1, 384, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 2, 2, 128, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 2, 2, 256, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 2, 2, 384, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 2, 3, 128, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 2, 3, 256, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 2, 3, 384, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 3, 1, 128, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 3, 1, 256, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 3, 1, 384, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 3, 2, 128, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 3, 2, 256, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 3, 2, 384, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 3, 3, 128, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 3, 3, 256, 69, 48, 27,,,, +6, 7, 4, 1, 3, 3, 3, 3, 384, 69, 48, 27,,,, +6, 7, 4, 2, 1, 1, 1, 1, 128, 134, 90, 48,,,, +6, 7, 4, 2, 1, 1, 1, 1, 256, 134, 90, 48,,,, +6, 7, 4, 2, 1, 1, 1, 1, 384, 134, 90, 48,,,, +6, 7, 4, 2, 2, 2, 1, 1, 128, 134, 90, 48,,,, +6, 7, 4, 2, 2, 2, 1, 1, 256, 134, 90, 48,,,, +6, 7, 4, 2, 2, 2, 1, 1, 384, 134, 90, 48,,,, +6, 7, 4, 2, 2, 2, 1, 2, 128, 134, 90, 48,,,, +6, 7, 4, 2, 2, 2, 1, 2, 256, 134, 90, 48,,,, +6, 7, 4, 2, 2, 2, 1, 2, 384, 134, 90, 48,,,, +6, 7, 4, 2, 2, 2, 2, 1, 128, 134, 90, 48,,,, +6, 7, 4, 2, 2, 2, 2, 1, 256, 134, 90, 48,,,, +6, 7, 4, 2, 2, 2, 2, 1, 384, 134, 90, 48,,,, +6, 7, 4, 2, 2, 2, 2, 2, 128, 134, 90, 48,,,, +6, 7, 4, 2, 2, 2, 2, 2, 256, 134, 90, 48,,,, +6, 7, 4, 2, 2, 2, 2, 2, 384, 134, 90, 48,,,, +6, 7, 4, 2, 3, 3, 1, 1, 128, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 1, 1, 256, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 1, 1, 384, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 1, 2, 128, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 1, 2, 256, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 1, 2, 384, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 1, 3, 128, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 1, 3, 256, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 1, 3, 384, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 2, 1, 128, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 2, 1, 256, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 2, 1, 384, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 2, 2, 128, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 2, 2, 256, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 2, 2, 384, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 2, 3, 128, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 2, 3, 256, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 2, 3, 384, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 3, 1, 128, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 3, 1, 256, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 3, 1, 384, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 3, 2, 128, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 3, 2, 256, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 3, 2, 384, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 3, 3, 128, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 3, 3, 256, 132, 90, 48,,,, +6, 7, 4, 2, 3, 3, 3, 3, 384, 132, 90, 48,,,, +6, 7, 4, 3, 1, 1, 1, 1, 128, 198, 132, 69,,,, +6, 7, 4, 3, 1, 1, 1, 1, 256, 198, 132, 69,,,, +6, 7, 4, 3, 1, 1, 1, 1, 384, 198, 132, 69,,,, +6, 7, 4, 3, 2, 2, 1, 1, 128, 198, 132, 69,,,, +6, 7, 4, 3, 2, 2, 1, 1, 256, 198, 132, 69,,,, +6, 7, 4, 3, 2, 2, 1, 1, 384, 198, 132, 69,,,, +6, 7, 4, 3, 2, 2, 1, 2, 128, 198, 132, 69,,,, +6, 7, 4, 3, 2, 2, 1, 2, 256, 198, 132, 69,,,, +6, 7, 4, 3, 2, 2, 1, 2, 384, 198, 132, 69,,,, +6, 7, 4, 3, 2, 2, 2, 1, 128, 198, 132, 69,,,, +6, 7, 4, 3, 2, 2, 2, 1, 256, 198, 132, 69,,,, +6, 7, 4, 3, 2, 2, 2, 1, 384, 198, 132, 69,,,, +6, 7, 4, 3, 2, 2, 2, 2, 128, 198, 132, 69,,,, +6, 7, 4, 3, 2, 2, 2, 2, 256, 198, 132, 69,,,, +6, 7, 4, 3, 2, 2, 2, 2, 384, 198, 132, 69,,,, +6, 7, 4, 3, 3, 3, 1, 1, 128, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 1, 1, 256, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 1, 1, 384, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 1, 2, 128, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 1, 2, 256, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 1, 2, 384, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 1, 3, 128, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 1, 3, 256, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 1, 3, 384, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 2, 1, 128, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 2, 1, 256, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 2, 1, 384, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 2, 2, 128, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 2, 2, 256, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 2, 2, 384, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 2, 3, 128, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 2, 3, 256, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 2, 3, 384, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 3, 1, 128, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 3, 1, 256, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 3, 1, 384, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 3, 2, 128, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 3, 2, 256, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 3, 2, 384, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 3, 3, 128, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 3, 3, 256, 195, 132, 69,,,, +6, 7, 4, 3, 3, 3, 3, 3, 384, 195, 132, 69,,,, +6, 7, 4, 4, 1, 1, 1, 1, 128, 240, 174, 90,,,, +6, 7, 4, 4, 1, 1, 1, 1, 256, 240, 174, 90,,,, +6, 7, 4, 4, 1, 1, 1, 1, 384, 240, 174, 90,,,, +6, 7, 4, 4, 2, 2, 1, 1, 128, 240, 174, 90,,,, +6, 7, 4, 4, 2, 2, 1, 1, 256, 240, 174, 90,,,, +6, 7, 4, 4, 2, 2, 1, 1, 384, 240, 174, 90,,,, +6, 7, 4, 4, 2, 2, 1, 2, 128, 240, 174, 90,,,, +6, 7, 4, 4, 2, 2, 1, 2, 256, 240, 174, 90,,,, +6, 7, 4, 4, 2, 2, 1, 2, 384, 240, 174, 90,,,, +6, 7, 4, 4, 2, 2, 2, 1, 128, 240, 174, 90,,,, +6, 7, 4, 4, 2, 2, 2, 1, 256, 240, 174, 90,,,, +6, 7, 4, 4, 2, 2, 2, 1, 384, 240, 174, 90,,,, +6, 7, 4, 4, 2, 2, 2, 2, 128, 240, 174, 90,,,, +6, 7, 4, 4, 2, 2, 2, 2, 256, 240, 174, 90,,,, +6, 7, 4, 4, 2, 2, 2, 2, 384, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 1, 1, 128, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 1, 1, 256, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 1, 1, 384, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 1, 2, 128, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 1, 2, 256, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 1, 2, 384, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 1, 3, 128, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 1, 3, 256, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 1, 3, 384, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 2, 1, 128, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 2, 1, 256, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 2, 1, 384, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 2, 2, 128, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 2, 2, 256, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 2, 2, 384, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 2, 3, 128, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 2, 3, 256, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 2, 3, 384, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 3, 1, 128, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 3, 1, 256, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 3, 1, 384, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 3, 2, 128, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 3, 2, 256, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 3, 2, 384, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 3, 3, 128, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 3, 3, 256, 240, 174, 90,,,, +6, 7, 4, 4, 3, 3, 3, 3, 384, 240, 174, 90,,,, +6, 7, 4, 5, 1, 1, 1, 1, 128, 240, 216, 111,,,, +6, 7, 4, 5, 1, 1, 1, 1, 256, 240, 216, 111,,,, +6, 7, 4, 5, 1, 1, 1, 1, 384, 240, 216, 111,,,, +6, 7, 4, 5, 2, 2, 1, 1, 128, 240, 216, 111,,,, +6, 7, 4, 5, 2, 2, 1, 1, 256, 240, 216, 111,,,, +6, 7, 4, 5, 2, 2, 1, 1, 384, 240, 216, 111,,,, +6, 7, 4, 5, 2, 2, 1, 2, 128, 240, 216, 111,,,, +6, 7, 4, 5, 2, 2, 1, 2, 256, 240, 216, 111,,,, +6, 7, 4, 5, 2, 2, 1, 2, 384, 240, 216, 111,,,, +6, 7, 4, 5, 2, 2, 2, 1, 128, 240, 216, 111,,,, +6, 7, 4, 5, 2, 2, 2, 1, 256, 240, 216, 111,,,, +6, 7, 4, 5, 2, 2, 2, 1, 384, 240, 216, 111,,,, +6, 7, 4, 5, 2, 2, 2, 2, 128, 240, 216, 111,,,, +6, 7, 4, 5, 2, 2, 2, 2, 256, 240, 216, 111,,,, +6, 7, 4, 5, 2, 2, 2, 2, 384, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 1, 1, 128, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 1, 1, 256, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 1, 1, 384, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 1, 2, 128, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 1, 2, 256, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 1, 2, 384, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 1, 3, 128, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 1, 3, 256, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 1, 3, 384, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 2, 1, 128, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 2, 1, 256, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 2, 1, 384, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 2, 2, 128, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 2, 2, 256, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 2, 2, 384, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 2, 3, 128, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 2, 3, 256, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 2, 3, 384, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 3, 1, 128, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 3, 1, 256, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 3, 1, 384, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 3, 2, 128, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 3, 2, 256, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 3, 2, 384, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 3, 3, 128, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 3, 3, 256, 240, 216, 111,,,, +6, 7, 4, 5, 3, 3, 3, 3, 384, 240, 216, 111,,,, +6, 7, 4, 6, 1, 1, 1, 1, 128, 240, 240, 132,,,, +6, 7, 4, 6, 1, 1, 1, 1, 256, 240, 240, 132,,,, +6, 7, 4, 6, 1, 1, 1, 1, 384, 240, 240, 132,,,, +6, 7, 4, 6, 2, 2, 1, 1, 128, 240, 240, 132,,,, +6, 7, 4, 6, 2, 2, 1, 1, 256, 240, 240, 132,,,, +6, 7, 4, 6, 2, 2, 1, 1, 384, 240, 240, 132,,,, +6, 7, 4, 6, 2, 2, 1, 2, 128, 240, 240, 132,,,, +6, 7, 4, 6, 2, 2, 1, 2, 256, 240, 240, 132,,,, +6, 7, 4, 6, 2, 2, 1, 2, 384, 240, 240, 132,,,, +6, 7, 4, 6, 2, 2, 2, 1, 128, 240, 240, 132,,,, +6, 7, 4, 6, 2, 2, 2, 1, 256, 240, 240, 132,,,, +6, 7, 4, 6, 2, 2, 2, 1, 384, 240, 240, 132,,,, +6, 7, 4, 6, 2, 2, 2, 2, 128, 240, 240, 132,,,, +6, 7, 4, 6, 2, 2, 2, 2, 256, 240, 240, 132,,,, +6, 7, 4, 6, 2, 2, 2, 2, 384, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 1, 1, 128, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 1, 1, 256, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 1, 1, 384, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 1, 2, 128, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 1, 2, 256, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 1, 2, 384, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 1, 3, 128, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 1, 3, 256, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 1, 3, 384, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 2, 1, 128, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 2, 1, 256, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 2, 1, 384, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 2, 2, 128, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 2, 2, 256, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 2, 2, 384, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 2, 3, 128, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 2, 3, 256, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 2, 3, 384, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 3, 1, 128, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 3, 1, 256, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 3, 1, 384, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 3, 2, 128, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 3, 2, 256, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 3, 2, 384, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 3, 3, 128, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 3, 3, 256, 240, 240, 132,,,, +6, 7, 4, 6, 3, 3, 3, 3, 384, 240, 240, 132,,,, +6, 7, 4, 7, 1, 1, 1, 1, 128, 240, 240, 153,,,, +6, 7, 4, 7, 1, 1, 1, 1, 256, 240, 240, 153,,,, +6, 7, 4, 7, 1, 1, 1, 1, 384, 240, 240, 153,,,, +6, 7, 4, 7, 2, 2, 1, 1, 128, 240, 240, 153,,,, +6, 7, 4, 7, 2, 2, 1, 1, 256, 240, 240, 153,,,, +6, 7, 4, 7, 2, 2, 1, 1, 384, 240, 240, 153,,,, +6, 7, 4, 7, 2, 2, 1, 2, 128, 240, 240, 153,,,, +6, 7, 4, 7, 2, 2, 1, 2, 256, 240, 240, 153,,,, +6, 7, 4, 7, 2, 2, 1, 2, 384, 240, 240, 153,,,, +6, 7, 4, 7, 2, 2, 2, 1, 128, 240, 240, 153,,,, +6, 7, 4, 7, 2, 2, 2, 1, 256, 240, 240, 153,,,, +6, 7, 4, 7, 2, 2, 2, 1, 384, 240, 240, 153,,,, +6, 7, 4, 7, 2, 2, 2, 2, 128, 240, 240, 153,,,, +6, 7, 4, 7, 2, 2, 2, 2, 256, 240, 240, 153,,,, +6, 7, 4, 7, 2, 2, 2, 2, 384, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 1, 1, 128, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 1, 1, 256, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 1, 1, 384, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 1, 2, 128, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 1, 2, 256, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 1, 2, 384, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 1, 3, 128, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 1, 3, 256, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 1, 3, 384, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 2, 1, 128, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 2, 1, 256, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 2, 1, 384, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 2, 2, 128, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 2, 2, 256, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 2, 2, 384, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 2, 3, 128, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 2, 3, 256, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 2, 3, 384, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 3, 1, 128, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 3, 1, 256, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 3, 1, 384, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 3, 2, 128, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 3, 2, 256, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 3, 2, 384, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 3, 3, 128, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 3, 3, 256, 240, 240, 153,,,, +6, 7, 4, 7, 3, 3, 3, 3, 384, 240, 240, 153,,,, +6, 7, 5, 1, 1, 1, 1, 1, 128, 70, 48, 27,,,, +6, 7, 5, 1, 1, 1, 1, 1, 256, 70, 48, 27,,,, +6, 7, 5, 1, 1, 1, 1, 1, 384, 70, 48, 27,,,, +6, 7, 5, 1, 2, 2, 1, 1, 128, 70, 48, 27,,,, +6, 7, 5, 1, 2, 2, 1, 1, 256, 70, 48, 27,,,, +6, 7, 5, 1, 2, 2, 1, 1, 384, 70, 48, 27,,,, +6, 7, 5, 1, 2, 2, 1, 2, 128, 70, 48, 27,,,, +6, 7, 5, 1, 2, 2, 1, 2, 256, 70, 48, 27,,,, +6, 7, 5, 1, 2, 2, 1, 2, 384, 70, 48, 27,,,, +6, 7, 5, 1, 2, 2, 2, 1, 128, 70, 48, 27,,,, +6, 7, 5, 1, 2, 2, 2, 1, 256, 70, 48, 27,,,, +6, 7, 5, 1, 2, 2, 2, 1, 384, 70, 48, 27,,,, +6, 7, 5, 1, 2, 2, 2, 2, 128, 70, 48, 27,,,, +6, 7, 5, 1, 2, 2, 2, 2, 256, 70, 48, 27,,,, +6, 7, 5, 1, 2, 2, 2, 2, 384, 70, 48, 27,,,, +6, 7, 5, 1, 3, 3, 1, 1, 128, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 1, 1, 256, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 1, 1, 384, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 1, 2, 128, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 1, 2, 256, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 1, 2, 384, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 1, 3, 128, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 1, 3, 256, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 1, 3, 384, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 2, 1, 128, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 2, 1, 256, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 2, 1, 384, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 2, 2, 128, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 2, 2, 256, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 2, 2, 384, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 2, 3, 128, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 2, 3, 256, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 2, 3, 384, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 3, 1, 128, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 3, 1, 256, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 3, 1, 384, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 3, 2, 128, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 3, 2, 256, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 3, 2, 384, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 3, 3, 128, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 3, 3, 256, 69, 48, 27,,,, +6, 7, 5, 1, 3, 3, 3, 3, 384, 69, 48, 27,,,, +6, 7, 5, 2, 1, 1, 1, 1, 128, 134, 90, 48,,,, +6, 7, 5, 2, 1, 1, 1, 1, 256, 134, 90, 48,,,, +6, 7, 5, 2, 1, 1, 1, 1, 384, 134, 90, 48,,,, +6, 7, 5, 2, 2, 2, 1, 1, 128, 134, 90, 48,,,, +6, 7, 5, 2, 2, 2, 1, 1, 256, 134, 90, 48,,,, +6, 7, 5, 2, 2, 2, 1, 1, 384, 134, 90, 48,,,, +6, 7, 5, 2, 2, 2, 1, 2, 128, 134, 90, 48,,,, +6, 7, 5, 2, 2, 2, 1, 2, 256, 134, 90, 48,,,, +6, 7, 5, 2, 2, 2, 1, 2, 384, 134, 90, 48,,,, +6, 7, 5, 2, 2, 2, 2, 1, 128, 134, 90, 48,,,, +6, 7, 5, 2, 2, 2, 2, 1, 256, 134, 90, 48,,,, +6, 7, 5, 2, 2, 2, 2, 1, 384, 134, 90, 48,,,, +6, 7, 5, 2, 2, 2, 2, 2, 128, 134, 90, 48,,,, +6, 7, 5, 2, 2, 2, 2, 2, 256, 134, 90, 48,,,, +6, 7, 5, 2, 2, 2, 2, 2, 384, 134, 90, 48,,,, +6, 7, 5, 2, 3, 3, 1, 1, 128, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 1, 1, 256, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 1, 1, 384, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 1, 2, 128, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 1, 2, 256, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 1, 2, 384, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 1, 3, 128, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 1, 3, 256, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 1, 3, 384, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 2, 1, 128, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 2, 1, 256, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 2, 1, 384, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 2, 2, 128, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 2, 2, 256, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 2, 2, 384, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 2, 3, 128, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 2, 3, 256, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 2, 3, 384, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 3, 1, 128, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 3, 1, 256, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 3, 1, 384, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 3, 2, 128, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 3, 2, 256, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 3, 2, 384, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 3, 3, 128, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 3, 3, 256, 132, 90, 48,,,, +6, 7, 5, 2, 3, 3, 3, 3, 384, 132, 90, 48,,,, +6, 7, 5, 3, 1, 1, 1, 1, 128, 198, 132, 69,,,, +6, 7, 5, 3, 1, 1, 1, 1, 256, 198, 132, 69,,,, +6, 7, 5, 3, 1, 1, 1, 1, 384, 198, 132, 69,,,, +6, 7, 5, 3, 2, 2, 1, 1, 128, 198, 132, 69,,,, +6, 7, 5, 3, 2, 2, 1, 1, 256, 198, 132, 69,,,, +6, 7, 5, 3, 2, 2, 1, 1, 384, 198, 132, 69,,,, +6, 7, 5, 3, 2, 2, 1, 2, 128, 198, 132, 69,,,, +6, 7, 5, 3, 2, 2, 1, 2, 256, 198, 132, 69,,,, +6, 7, 5, 3, 2, 2, 1, 2, 384, 198, 132, 69,,,, +6, 7, 5, 3, 2, 2, 2, 1, 128, 198, 132, 69,,,, +6, 7, 5, 3, 2, 2, 2, 1, 256, 198, 132, 69,,,, +6, 7, 5, 3, 2, 2, 2, 1, 384, 198, 132, 69,,,, +6, 7, 5, 3, 2, 2, 2, 2, 128, 198, 132, 69,,,, +6, 7, 5, 3, 2, 2, 2, 2, 256, 198, 132, 69,,,, +6, 7, 5, 3, 2, 2, 2, 2, 384, 198, 132, 69,,,, +6, 7, 5, 3, 3, 3, 1, 1, 128, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 1, 1, 256, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 1, 1, 384, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 1, 2, 128, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 1, 2, 256, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 1, 2, 384, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 1, 3, 128, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 1, 3, 256, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 1, 3, 384, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 2, 1, 128, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 2, 1, 256, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 2, 1, 384, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 2, 2, 128, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 2, 2, 256, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 2, 2, 384, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 2, 3, 128, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 2, 3, 256, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 2, 3, 384, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 3, 1, 128, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 3, 1, 256, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 3, 1, 384, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 3, 2, 128, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 3, 2, 256, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 3, 2, 384, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 3, 3, 128, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 3, 3, 256, 195, 132, 69,,,, +6, 7, 5, 3, 3, 3, 3, 3, 384, 195, 132, 69,,,, +6, 7, 5, 4, 1, 1, 1, 1, 128, 240, 174, 90,,,, +6, 7, 5, 4, 1, 1, 1, 1, 256, 240, 174, 90,,,, +6, 7, 5, 4, 1, 1, 1, 1, 384, 240, 174, 90,,,, +6, 7, 5, 4, 2, 2, 1, 1, 128, 240, 174, 90,,,, +6, 7, 5, 4, 2, 2, 1, 1, 256, 240, 174, 90,,,, +6, 7, 5, 4, 2, 2, 1, 1, 384, 240, 174, 90,,,, +6, 7, 5, 4, 2, 2, 1, 2, 128, 240, 174, 90,,,, +6, 7, 5, 4, 2, 2, 1, 2, 256, 240, 174, 90,,,, +6, 7, 5, 4, 2, 2, 1, 2, 384, 240, 174, 90,,,, +6, 7, 5, 4, 2, 2, 2, 1, 128, 240, 174, 90,,,, +6, 7, 5, 4, 2, 2, 2, 1, 256, 240, 174, 90,,,, +6, 7, 5, 4, 2, 2, 2, 1, 384, 240, 174, 90,,,, +6, 7, 5, 4, 2, 2, 2, 2, 128, 240, 174, 90,,,, +6, 7, 5, 4, 2, 2, 2, 2, 256, 240, 174, 90,,,, +6, 7, 5, 4, 2, 2, 2, 2, 384, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 1, 1, 128, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 1, 1, 256, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 1, 1, 384, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 1, 2, 128, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 1, 2, 256, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 1, 2, 384, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 1, 3, 128, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 1, 3, 256, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 1, 3, 384, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 2, 1, 128, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 2, 1, 256, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 2, 1, 384, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 2, 2, 128, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 2, 2, 256, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 2, 2, 384, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 2, 3, 128, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 2, 3, 256, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 2, 3, 384, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 3, 1, 128, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 3, 1, 256, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 3, 1, 384, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 3, 2, 128, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 3, 2, 256, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 3, 2, 384, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 3, 3, 128, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 3, 3, 256, 240, 174, 90,,,, +6, 7, 5, 4, 3, 3, 3, 3, 384, 240, 174, 90,,,, +6, 7, 5, 5, 1, 1, 1, 1, 128, 240, 216, 111,,,, +6, 7, 5, 5, 1, 1, 1, 1, 256, 240, 216, 111,,,, +6, 7, 5, 5, 1, 1, 1, 1, 384, 240, 216, 111,,,, +6, 7, 5, 5, 2, 2, 1, 1, 128, 240, 216, 111,,,, +6, 7, 5, 5, 2, 2, 1, 1, 256, 240, 216, 111,,,, +6, 7, 5, 5, 2, 2, 1, 1, 384, 240, 216, 111,,,, +6, 7, 5, 5, 2, 2, 1, 2, 128, 240, 216, 111,,,, +6, 7, 5, 5, 2, 2, 1, 2, 256, 240, 216, 111,,,, +6, 7, 5, 5, 2, 2, 1, 2, 384, 240, 216, 111,,,, +6, 7, 5, 5, 2, 2, 2, 1, 128, 240, 216, 111,,,, +6, 7, 5, 5, 2, 2, 2, 1, 256, 240, 216, 111,,,, +6, 7, 5, 5, 2, 2, 2, 1, 384, 240, 216, 111,,,, +6, 7, 5, 5, 2, 2, 2, 2, 128, 240, 216, 111,,,, +6, 7, 5, 5, 2, 2, 2, 2, 256, 240, 216, 111,,,, +6, 7, 5, 5, 2, 2, 2, 2, 384, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 1, 1, 128, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 1, 1, 256, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 1, 1, 384, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 1, 2, 128, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 1, 2, 256, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 1, 2, 384, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 1, 3, 128, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 1, 3, 256, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 1, 3, 384, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 2, 1, 128, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 2, 1, 256, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 2, 1, 384, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 2, 2, 128, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 2, 2, 256, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 2, 2, 384, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 2, 3, 128, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 2, 3, 256, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 2, 3, 384, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 3, 1, 128, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 3, 1, 256, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 3, 1, 384, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 3, 2, 128, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 3, 2, 256, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 3, 2, 384, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 3, 3, 128, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 3, 3, 256, 240, 216, 111,,,, +6, 7, 5, 5, 3, 3, 3, 3, 384, 240, 216, 111,,,, +6, 7, 5, 6, 1, 1, 1, 1, 128, 240, 240, 132,,,, +6, 7, 5, 6, 1, 1, 1, 1, 256, 240, 240, 132,,,, +6, 7, 5, 6, 1, 1, 1, 1, 384, 240, 240, 132,,,, +6, 7, 5, 6, 2, 2, 1, 1, 128, 240, 240, 132,,,, +6, 7, 5, 6, 2, 2, 1, 1, 256, 240, 240, 132,,,, +6, 7, 5, 6, 2, 2, 1, 1, 384, 240, 240, 132,,,, +6, 7, 5, 6, 2, 2, 1, 2, 128, 240, 240, 132,,,, +6, 7, 5, 6, 2, 2, 1, 2, 256, 240, 240, 132,,,, +6, 7, 5, 6, 2, 2, 1, 2, 384, 240, 240, 132,,,, +6, 7, 5, 6, 2, 2, 2, 1, 128, 240, 240, 132,,,, +6, 7, 5, 6, 2, 2, 2, 1, 256, 240, 240, 132,,,, +6, 7, 5, 6, 2, 2, 2, 1, 384, 240, 240, 132,,,, +6, 7, 5, 6, 2, 2, 2, 2, 128, 240, 240, 132,,,, +6, 7, 5, 6, 2, 2, 2, 2, 256, 240, 240, 132,,,, +6, 7, 5, 6, 2, 2, 2, 2, 384, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 1, 1, 128, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 1, 1, 256, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 1, 1, 384, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 1, 2, 128, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 1, 2, 256, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 1, 2, 384, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 1, 3, 128, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 1, 3, 256, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 1, 3, 384, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 2, 1, 128, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 2, 1, 256, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 2, 1, 384, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 2, 2, 128, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 2, 2, 256, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 2, 2, 384, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 2, 3, 128, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 2, 3, 256, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 2, 3, 384, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 3, 1, 128, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 3, 1, 256, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 3, 1, 384, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 3, 2, 128, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 3, 2, 256, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 3, 2, 384, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 3, 3, 128, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 3, 3, 256, 240, 240, 132,,,, +6, 7, 5, 6, 3, 3, 3, 3, 384, 240, 240, 132,,,, +6, 7, 5, 7, 1, 1, 1, 1, 128, 240, 240, 153,,,, +6, 7, 5, 7, 1, 1, 1, 1, 256, 240, 240, 153,,,, +6, 7, 5, 7, 1, 1, 1, 1, 384, 240, 240, 153,,,, +6, 7, 5, 7, 2, 2, 1, 1, 128, 240, 240, 153,,,, +6, 7, 5, 7, 2, 2, 1, 1, 256, 240, 240, 153,,,, +6, 7, 5, 7, 2, 2, 1, 1, 384, 240, 240, 153,,,, +6, 7, 5, 7, 2, 2, 1, 2, 128, 240, 240, 153,,,, +6, 7, 5, 7, 2, 2, 1, 2, 256, 240, 240, 153,,,, +6, 7, 5, 7, 2, 2, 1, 2, 384, 240, 240, 153,,,, +6, 7, 5, 7, 2, 2, 2, 1, 128, 240, 240, 153,,,, +6, 7, 5, 7, 2, 2, 2, 1, 256, 240, 240, 153,,,, +6, 7, 5, 7, 2, 2, 2, 1, 384, 240, 240, 153,,,, +6, 7, 5, 7, 2, 2, 2, 2, 128, 240, 240, 153,,,, +6, 7, 5, 7, 2, 2, 2, 2, 256, 240, 240, 153,,,, +6, 7, 5, 7, 2, 2, 2, 2, 384, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 1, 1, 128, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 1, 1, 256, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 1, 1, 384, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 1, 2, 128, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 1, 2, 256, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 1, 2, 384, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 1, 3, 128, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 1, 3, 256, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 1, 3, 384, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 2, 1, 128, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 2, 1, 256, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 2, 1, 384, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 2, 2, 128, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 2, 2, 256, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 2, 2, 384, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 2, 3, 128, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 2, 3, 256, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 2, 3, 384, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 3, 1, 128, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 3, 1, 256, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 3, 1, 384, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 3, 2, 128, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 3, 2, 256, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 3, 2, 384, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 3, 3, 128, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 3, 3, 256, 240, 240, 153,,,, +6, 7, 5, 7, 3, 3, 3, 3, 384, 240, 240, 153,,,, +6, 7, 6, 1, 1, 1, 1, 1, 128, 70, 48, 27,,,, +6, 7, 6, 1, 1, 1, 1, 1, 256, 70, 48, 27,,,, +6, 7, 6, 1, 1, 1, 1, 1, 384, 70, 48, 27,,,, +6, 7, 6, 1, 2, 2, 1, 1, 128, 70, 48, 27,,,, +6, 7, 6, 1, 2, 2, 1, 1, 256, 70, 48, 27,,,, +6, 7, 6, 1, 2, 2, 1, 1, 384, 70, 48, 27,,,, +6, 7, 6, 1, 2, 2, 1, 2, 128, 70, 48, 27,,,, +6, 7, 6, 1, 2, 2, 1, 2, 256, 70, 48, 27,,,, +6, 7, 6, 1, 2, 2, 1, 2, 384, 70, 48, 27,,,, +6, 7, 6, 1, 2, 2, 2, 1, 128, 70, 48, 27,,,, +6, 7, 6, 1, 2, 2, 2, 1, 256, 70, 48, 27,,,, +6, 7, 6, 1, 2, 2, 2, 1, 384, 70, 48, 27,,,, +6, 7, 6, 1, 2, 2, 2, 2, 128, 70, 48, 27,,,, +6, 7, 6, 1, 2, 2, 2, 2, 256, 70, 48, 27,,,, +6, 7, 6, 1, 2, 2, 2, 2, 384, 70, 48, 27,,,, +6, 7, 6, 1, 3, 3, 1, 1, 128, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 1, 1, 256, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 1, 1, 384, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 1, 2, 128, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 1, 2, 256, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 1, 2, 384, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 1, 3, 128, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 1, 3, 256, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 1, 3, 384, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 2, 1, 128, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 2, 1, 256, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 2, 1, 384, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 2, 2, 128, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 2, 2, 256, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 2, 2, 384, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 2, 3, 128, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 2, 3, 256, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 2, 3, 384, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 3, 1, 128, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 3, 1, 256, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 3, 1, 384, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 3, 2, 128, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 3, 2, 256, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 3, 2, 384, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 3, 3, 128, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 3, 3, 256, 69, 48, 27,,,, +6, 7, 6, 1, 3, 3, 3, 3, 384, 69, 48, 27,,,, +6, 7, 6, 2, 1, 1, 1, 1, 128, 134, 90, 48,,,, +6, 7, 6, 2, 1, 1, 1, 1, 256, 134, 90, 48,,,, +6, 7, 6, 2, 1, 1, 1, 1, 384, 134, 90, 48,,,, +6, 7, 6, 2, 2, 2, 1, 1, 128, 134, 90, 48,,,, +6, 7, 6, 2, 2, 2, 1, 1, 256, 134, 90, 48,,,, +6, 7, 6, 2, 2, 2, 1, 1, 384, 134, 90, 48,,,, +6, 7, 6, 2, 2, 2, 1, 2, 128, 134, 90, 48,,,, +6, 7, 6, 2, 2, 2, 1, 2, 256, 134, 90, 48,,,, +6, 7, 6, 2, 2, 2, 1, 2, 384, 134, 90, 48,,,, +6, 7, 6, 2, 2, 2, 2, 1, 128, 134, 90, 48,,,, +6, 7, 6, 2, 2, 2, 2, 1, 256, 134, 90, 48,,,, +6, 7, 6, 2, 2, 2, 2, 1, 384, 134, 90, 48,,,, +6, 7, 6, 2, 2, 2, 2, 2, 128, 134, 90, 48,,,, +6, 7, 6, 2, 2, 2, 2, 2, 256, 134, 90, 48,,,, +6, 7, 6, 2, 2, 2, 2, 2, 384, 134, 90, 48,,,, +6, 7, 6, 2, 3, 3, 1, 1, 128, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 1, 1, 256, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 1, 1, 384, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 1, 2, 128, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 1, 2, 256, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 1, 2, 384, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 1, 3, 128, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 1, 3, 256, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 1, 3, 384, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 2, 1, 128, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 2, 1, 256, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 2, 1, 384, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 2, 2, 128, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 2, 2, 256, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 2, 2, 384, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 2, 3, 128, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 2, 3, 256, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 2, 3, 384, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 3, 1, 128, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 3, 1, 256, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 3, 1, 384, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 3, 2, 128, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 3, 2, 256, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 3, 2, 384, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 3, 3, 128, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 3, 3, 256, 132, 90, 48,,,, +6, 7, 6, 2, 3, 3, 3, 3, 384, 132, 90, 48,,,, +6, 7, 6, 3, 1, 1, 1, 1, 128, 198, 132, 69,,,, +6, 7, 6, 3, 1, 1, 1, 1, 256, 198, 132, 69,,,, +6, 7, 6, 3, 1, 1, 1, 1, 384, 198, 132, 69,,,, +6, 7, 6, 3, 2, 2, 1, 1, 128, 198, 132, 69,,,, +6, 7, 6, 3, 2, 2, 1, 1, 256, 198, 132, 69,,,, +6, 7, 6, 3, 2, 2, 1, 1, 384, 198, 132, 69,,,, +6, 7, 6, 3, 2, 2, 1, 2, 128, 198, 132, 69,,,, +6, 7, 6, 3, 2, 2, 1, 2, 256, 198, 132, 69,,,, +6, 7, 6, 3, 2, 2, 1, 2, 384, 198, 132, 69,,,, +6, 7, 6, 3, 2, 2, 2, 1, 128, 198, 132, 69,,,, +6, 7, 6, 3, 2, 2, 2, 1, 256, 198, 132, 69,,,, +6, 7, 6, 3, 2, 2, 2, 1, 384, 198, 132, 69,,,, +6, 7, 6, 3, 2, 2, 2, 2, 128, 198, 132, 69,,,, +6, 7, 6, 3, 2, 2, 2, 2, 256, 198, 132, 69,,,, +6, 7, 6, 3, 2, 2, 2, 2, 384, 198, 132, 69,,,, +6, 7, 6, 3, 3, 3, 1, 1, 128, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 1, 1, 256, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 1, 1, 384, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 1, 2, 128, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 1, 2, 256, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 1, 2, 384, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 1, 3, 128, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 1, 3, 256, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 1, 3, 384, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 2, 1, 128, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 2, 1, 256, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 2, 1, 384, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 2, 2, 128, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 2, 2, 256, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 2, 2, 384, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 2, 3, 128, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 2, 3, 256, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 2, 3, 384, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 3, 1, 128, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 3, 1, 256, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 3, 1, 384, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 3, 2, 128, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 3, 2, 256, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 3, 2, 384, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 3, 3, 128, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 3, 3, 256, 195, 132, 69,,,, +6, 7, 6, 3, 3, 3, 3, 3, 384, 195, 132, 69,,,, +6, 7, 6, 4, 1, 1, 1, 1, 128, 240, 174, 90,,,, +6, 7, 6, 4, 1, 1, 1, 1, 256, 240, 174, 90,,,, +6, 7, 6, 4, 1, 1, 1, 1, 384, 240, 174, 90,,,, +6, 7, 6, 4, 2, 2, 1, 1, 128, 240, 174, 90,,,, +6, 7, 6, 4, 2, 2, 1, 1, 256, 240, 174, 90,,,, +6, 7, 6, 4, 2, 2, 1, 1, 384, 240, 174, 90,,,, +6, 7, 6, 4, 2, 2, 1, 2, 128, 240, 174, 90,,,, +6, 7, 6, 4, 2, 2, 1, 2, 256, 240, 174, 90,,,, +6, 7, 6, 4, 2, 2, 1, 2, 384, 240, 174, 90,,,, +6, 7, 6, 4, 2, 2, 2, 1, 128, 240, 174, 90,,,, +6, 7, 6, 4, 2, 2, 2, 1, 256, 240, 174, 90,,,, +6, 7, 6, 4, 2, 2, 2, 1, 384, 240, 174, 90,,,, +6, 7, 6, 4, 2, 2, 2, 2, 128, 240, 174, 90,,,, +6, 7, 6, 4, 2, 2, 2, 2, 256, 240, 174, 90,,,, +6, 7, 6, 4, 2, 2, 2, 2, 384, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 1, 1, 128, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 1, 1, 256, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 1, 1, 384, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 1, 2, 128, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 1, 2, 256, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 1, 2, 384, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 1, 3, 128, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 1, 3, 256, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 1, 3, 384, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 2, 1, 128, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 2, 1, 256, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 2, 1, 384, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 2, 2, 128, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 2, 2, 256, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 2, 2, 384, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 2, 3, 128, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 2, 3, 256, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 2, 3, 384, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 3, 1, 128, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 3, 1, 256, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 3, 1, 384, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 3, 2, 128, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 3, 2, 256, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 3, 2, 384, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 3, 3, 128, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 3, 3, 256, 240, 174, 90,,,, +6, 7, 6, 4, 3, 3, 3, 3, 384, 240, 174, 90,,,, +6, 7, 6, 5, 1, 1, 1, 1, 128, 240, 216, 111,,,, +6, 7, 6, 5, 1, 1, 1, 1, 256, 240, 216, 111,,,, +6, 7, 6, 5, 1, 1, 1, 1, 384, 240, 216, 111,,,, +6, 7, 6, 5, 2, 2, 1, 1, 128, 240, 216, 111,,,, +6, 7, 6, 5, 2, 2, 1, 1, 256, 240, 216, 111,,,, +6, 7, 6, 5, 2, 2, 1, 1, 384, 240, 216, 111,,,, +6, 7, 6, 5, 2, 2, 1, 2, 128, 240, 216, 111,,,, +6, 7, 6, 5, 2, 2, 1, 2, 256, 240, 216, 111,,,, +6, 7, 6, 5, 2, 2, 1, 2, 384, 240, 216, 111,,,, +6, 7, 6, 5, 2, 2, 2, 1, 128, 240, 216, 111,,,, +6, 7, 6, 5, 2, 2, 2, 1, 256, 240, 216, 111,,,, +6, 7, 6, 5, 2, 2, 2, 1, 384, 240, 216, 111,,,, +6, 7, 6, 5, 2, 2, 2, 2, 128, 240, 216, 111,,,, +6, 7, 6, 5, 2, 2, 2, 2, 256, 240, 216, 111,,,, +6, 7, 6, 5, 2, 2, 2, 2, 384, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 1, 1, 128, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 1, 1, 256, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 1, 1, 384, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 1, 2, 128, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 1, 2, 256, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 1, 2, 384, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 1, 3, 128, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 1, 3, 256, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 1, 3, 384, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 2, 1, 128, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 2, 1, 256, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 2, 1, 384, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 2, 2, 128, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 2, 2, 256, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 2, 2, 384, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 2, 3, 128, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 2, 3, 256, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 2, 3, 384, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 3, 1, 128, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 3, 1, 256, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 3, 1, 384, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 3, 2, 128, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 3, 2, 256, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 3, 2, 384, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 3, 3, 128, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 3, 3, 256, 240, 216, 111,,,, +6, 7, 6, 5, 3, 3, 3, 3, 384, 240, 216, 111,,,, +6, 7, 6, 6, 1, 1, 1, 1, 128, 240, 240, 132,,,, +6, 7, 6, 6, 1, 1, 1, 1, 256, 240, 240, 132,,,, +6, 7, 6, 6, 1, 1, 1, 1, 384, 240, 240, 132,,,, +6, 7, 6, 6, 2, 2, 1, 1, 128, 240, 240, 132,,,, +6, 7, 6, 6, 2, 2, 1, 1, 256, 240, 240, 132,,,, +6, 7, 6, 6, 2, 2, 1, 1, 384, 240, 240, 132,,,, +6, 7, 6, 6, 2, 2, 1, 2, 128, 240, 240, 132,,,, +6, 7, 6, 6, 2, 2, 1, 2, 256, 240, 240, 132,,,, +6, 7, 6, 6, 2, 2, 1, 2, 384, 240, 240, 132,,,, +6, 7, 6, 6, 2, 2, 2, 1, 128, 240, 240, 132,,,, +6, 7, 6, 6, 2, 2, 2, 1, 256, 240, 240, 132,,,, +6, 7, 6, 6, 2, 2, 2, 1, 384, 240, 240, 132,,,, +6, 7, 6, 6, 2, 2, 2, 2, 128, 240, 240, 132,,,, +6, 7, 6, 6, 2, 2, 2, 2, 256, 240, 240, 132,,,, +6, 7, 6, 6, 2, 2, 2, 2, 384, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 1, 1, 128, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 1, 1, 256, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 1, 1, 384, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 1, 2, 128, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 1, 2, 256, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 1, 2, 384, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 1, 3, 128, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 1, 3, 256, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 1, 3, 384, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 2, 1, 128, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 2, 1, 256, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 2, 1, 384, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 2, 2, 128, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 2, 2, 256, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 2, 2, 384, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 2, 3, 128, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 2, 3, 256, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 2, 3, 384, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 3, 1, 128, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 3, 1, 256, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 3, 1, 384, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 3, 2, 128, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 3, 2, 256, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 3, 2, 384, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 3, 3, 128, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 3, 3, 256, 240, 240, 132,,,, +6, 7, 6, 6, 3, 3, 3, 3, 384, 240, 240, 132,,,, +6, 7, 6, 7, 1, 1, 1, 1, 128, 240, 240, 153,,,, +6, 7, 6, 7, 1, 1, 1, 1, 256, 240, 240, 153,,,, +6, 7, 6, 7, 1, 1, 1, 1, 384, 240, 240, 153,,,, +6, 7, 6, 7, 2, 2, 1, 1, 128, 240, 240, 153,,,, +6, 7, 6, 7, 2, 2, 1, 1, 256, 240, 240, 153,,,, +6, 7, 6, 7, 2, 2, 1, 1, 384, 240, 240, 153,,,, +6, 7, 6, 7, 2, 2, 1, 2, 128, 240, 240, 153,,,, +6, 7, 6, 7, 2, 2, 1, 2, 256, 240, 240, 153,,,, +6, 7, 6, 7, 2, 2, 1, 2, 384, 240, 240, 153,,,, +6, 7, 6, 7, 2, 2, 2, 1, 128, 240, 240, 153,,,, +6, 7, 6, 7, 2, 2, 2, 1, 256, 240, 240, 153,,,, +6, 7, 6, 7, 2, 2, 2, 1, 384, 240, 240, 153,,,, +6, 7, 6, 7, 2, 2, 2, 2, 128, 240, 240, 153,,,, +6, 7, 6, 7, 2, 2, 2, 2, 256, 240, 240, 153,,,, +6, 7, 6, 7, 2, 2, 2, 2, 384, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 1, 1, 128, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 1, 1, 256, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 1, 1, 384, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 1, 2, 128, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 1, 2, 256, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 1, 2, 384, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 1, 3, 128, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 1, 3, 256, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 1, 3, 384, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 2, 1, 128, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 2, 1, 256, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 2, 1, 384, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 2, 2, 128, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 2, 2, 256, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 2, 2, 384, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 2, 3, 128, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 2, 3, 256, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 2, 3, 384, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 3, 1, 128, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 3, 1, 256, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 3, 1, 384, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 3, 2, 128, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 3, 2, 256, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 3, 2, 384, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 3, 3, 128, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 3, 3, 256, 240, 240, 153,,,, +6, 7, 6, 7, 3, 3, 3, 3, 384, 240, 240, 153,,,, +7, 1, 1, 1, 1, 1, 1, 1, 128, 54, 54, 54, 54, 36,, +7, 1, 1, 1, 1, 1, 1, 1, 256, 54, 54, 54, 54, 36,, +7, 1, 1, 1, 1, 1, 1, 1, 384, 54, 54, 54, 54, 36,, +7, 1, 2, 1, 1, 1, 1, 1, 128, 54, 54, 54, 54, 36,, +7, 1, 2, 1, 1, 1, 1, 1, 256, 54, 54, 54, 54, 36,, +7, 1, 2, 1, 1, 1, 1, 1, 384, 54, 54, 54, 54, 36,, +7, 1, 3, 1, 1, 1, 1, 1, 128, 54, 54, 54, 54, 36,, +7, 1, 3, 1, 1, 1, 1, 1, 256, 54, 54, 54, 54, 36,, +7, 1, 3, 1, 1, 1, 1, 1, 384, 54, 54, 54, 54, 36,, +7, 1, 4, 1, 1, 1, 1, 1, 128, 54, 54, 54, 54, 36,, +7, 1, 4, 1, 1, 1, 1, 1, 256, 54, 54, 54, 54, 36,, +7, 1, 4, 1, 1, 1, 1, 1, 384, 54, 54, 54, 54, 36,, +7, 1, 5, 1, 1, 1, 1, 1, 128, 54, 54, 54, 54, 36,, +7, 1, 5, 1, 1, 1, 1, 1, 256, 54, 54, 54, 54, 36,, +7, 1, 5, 1, 1, 1, 1, 1, 384, 54, 54, 54, 54, 36,, +7, 1, 6, 1, 1, 1, 1, 1, 128, 54, 54, 54, 54, 36,, +7, 1, 6, 1, 1, 1, 1, 1, 256, 54, 54, 54, 54, 36,, +7, 1, 6, 1, 1, 1, 1, 1, 384, 54, 54, 54, 54, 36,, +7, 1, 7, 1, 1, 1, 1, 1, 128, 54, 54, 54, 54, 36,, +7, 1, 7, 1, 1, 1, 1, 1, 256, 54, 54, 54, 54, 36,, +7, 1, 7, 1, 1, 1, 1, 1, 384, 54, 54, 54, 54, 36,, +7, 2, 1, 1, 1, 1, 1, 1, 128, 55, 55, 55, 37,,, +7, 2, 1, 1, 1, 1, 1, 1, 256, 55, 55, 55, 37,,, +7, 2, 1, 1, 1, 1, 1, 1, 384, 55, 55, 55, 37,,, +7, 2, 1, 1, 2, 2, 1, 1, 128, 55, 55, 53, 37,,, +7, 2, 1, 1, 2, 2, 1, 1, 256, 55, 55, 53, 37,,, +7, 2, 1, 1, 2, 2, 1, 1, 384, 55, 55, 53, 37,,, +7, 2, 1, 1, 2, 2, 1, 2, 128, 55, 55, 53, 37,,, +7, 2, 1, 1, 2, 2, 1, 2, 256, 55, 55, 53, 37,,, +7, 2, 1, 1, 2, 2, 1, 2, 384, 55, 55, 53, 37,,, +7, 2, 1, 1, 2, 2, 2, 1, 128, 55, 55, 53, 37,,, +7, 2, 1, 1, 2, 2, 2, 1, 256, 55, 55, 53, 37,,, +7, 2, 1, 1, 2, 2, 2, 1, 384, 55, 55, 53, 37,,, +7, 2, 1, 1, 2, 2, 2, 2, 128, 55, 55, 53, 37,,, +7, 2, 1, 1, 2, 2, 2, 2, 256, 55, 55, 53, 37,,, +7, 2, 1, 1, 2, 2, 2, 2, 384, 55, 55, 53, 37,,, +7, 2, 1, 2, 1, 1, 1, 1, 128, 109, 109, 109, 73,,, +7, 2, 1, 2, 1, 1, 1, 1, 256, 109, 109, 109, 73,,, +7, 2, 1, 2, 1, 1, 1, 1, 384, 109, 109, 109, 73,,, +7, 2, 1, 2, 2, 2, 1, 1, 128, 109, 109, 105, 73,,, +7, 2, 1, 2, 2, 2, 1, 1, 256, 109, 109, 105, 73,,, +7, 2, 1, 2, 2, 2, 1, 1, 384, 109, 109, 105, 73,,, +7, 2, 1, 2, 2, 2, 1, 2, 128, 109, 109, 105, 73,,, +7, 2, 1, 2, 2, 2, 1, 2, 256, 109, 109, 105, 73,,, +7, 2, 1, 2, 2, 2, 1, 2, 384, 109, 109, 105, 73,,, +7, 2, 1, 2, 2, 2, 2, 1, 128, 109, 109, 105, 73,,, +7, 2, 1, 2, 2, 2, 2, 1, 256, 109, 109, 105, 73,,, +7, 2, 1, 2, 2, 2, 2, 1, 384, 109, 109, 105, 73,,, +7, 2, 1, 2, 2, 2, 2, 2, 128, 109, 109, 105, 73,,, +7, 2, 1, 2, 2, 2, 2, 2, 256, 109, 109, 105, 73,,, +7, 2, 1, 2, 2, 2, 2, 2, 384, 109, 109, 105, 73,,, +7, 2, 2, 1, 1, 1, 1, 1, 128, 55, 55, 55, 37,,, +7, 2, 2, 1, 1, 1, 1, 1, 256, 55, 55, 55, 37,,, +7, 2, 2, 1, 1, 1, 1, 1, 384, 55, 55, 55, 37,,, +7, 2, 2, 1, 2, 2, 1, 1, 128, 55, 55, 53, 37,,, +7, 2, 2, 1, 2, 2, 1, 1, 256, 55, 55, 53, 37,,, +7, 2, 2, 1, 2, 2, 1, 1, 384, 55, 55, 53, 37,,, +7, 2, 2, 1, 2, 2, 1, 2, 128, 55, 55, 53, 37,,, +7, 2, 2, 1, 2, 2, 1, 2, 256, 55, 55, 53, 37,,, +7, 2, 2, 1, 2, 2, 1, 2, 384, 55, 55, 53, 37,,, +7, 2, 2, 1, 2, 2, 2, 1, 128, 55, 55, 53, 37,,, +7, 2, 2, 1, 2, 2, 2, 1, 256, 55, 55, 53, 37,,, +7, 2, 2, 1, 2, 2, 2, 1, 384, 55, 55, 53, 37,,, +7, 2, 2, 1, 2, 2, 2, 2, 128, 55, 55, 53, 37,,, +7, 2, 2, 1, 2, 2, 2, 2, 256, 55, 55, 53, 37,,, +7, 2, 2, 1, 2, 2, 2, 2, 384, 55, 55, 53, 37,,, +7, 2, 2, 2, 1, 1, 1, 1, 128, 109, 109, 109, 73,,, +7, 2, 2, 2, 1, 1, 1, 1, 256, 109, 109, 109, 73,,, +7, 2, 2, 2, 1, 1, 1, 1, 384, 109, 109, 109, 73,,, +7, 2, 2, 2, 2, 2, 1, 1, 128, 109, 109, 105, 73,,, +7, 2, 2, 2, 2, 2, 1, 1, 256, 109, 109, 105, 73,,, +7, 2, 2, 2, 2, 2, 1, 1, 384, 109, 109, 105, 73,,, +7, 2, 2, 2, 2, 2, 1, 2, 128, 109, 109, 105, 73,,, +7, 2, 2, 2, 2, 2, 1, 2, 256, 109, 109, 105, 73,,, +7, 2, 2, 2, 2, 2, 1, 2, 384, 109, 109, 105, 73,,, +7, 2, 2, 2, 2, 2, 2, 1, 128, 109, 109, 105, 73,,, +7, 2, 2, 2, 2, 2, 2, 1, 256, 109, 109, 105, 73,,, +7, 2, 2, 2, 2, 2, 2, 1, 384, 109, 109, 105, 73,,, +7, 2, 2, 2, 2, 2, 2, 2, 128, 109, 109, 105, 73,,, +7, 2, 2, 2, 2, 2, 2, 2, 256, 109, 109, 105, 73,,, +7, 2, 2, 2, 2, 2, 2, 2, 384, 109, 109, 105, 73,,, +7, 2, 3, 1, 1, 1, 1, 1, 128, 55, 55, 55, 37,,, +7, 2, 3, 1, 1, 1, 1, 1, 256, 55, 55, 55, 37,,, +7, 2, 3, 1, 1, 1, 1, 1, 384, 55, 55, 55, 37,,, +7, 2, 3, 1, 2, 2, 1, 1, 128, 55, 55, 53, 37,,, +7, 2, 3, 1, 2, 2, 1, 1, 256, 55, 55, 53, 37,,, +7, 2, 3, 1, 2, 2, 1, 1, 384, 55, 55, 53, 37,,, +7, 2, 3, 1, 2, 2, 1, 2, 128, 55, 55, 53, 37,,, +7, 2, 3, 1, 2, 2, 1, 2, 256, 55, 55, 53, 37,,, +7, 2, 3, 1, 2, 2, 1, 2, 384, 55, 55, 53, 37,,, +7, 2, 3, 1, 2, 2, 2, 1, 128, 55, 55, 53, 37,,, +7, 2, 3, 1, 2, 2, 2, 1, 256, 55, 55, 53, 37,,, +7, 2, 3, 1, 2, 2, 2, 1, 384, 55, 55, 53, 37,,, +7, 2, 3, 1, 2, 2, 2, 2, 128, 55, 55, 53, 37,,, +7, 2, 3, 1, 2, 2, 2, 2, 256, 55, 55, 53, 37,,, +7, 2, 3, 1, 2, 2, 2, 2, 384, 55, 55, 53, 37,,, +7, 2, 3, 2, 1, 1, 1, 1, 128, 109, 109, 109, 73,,, +7, 2, 3, 2, 1, 1, 1, 1, 256, 109, 109, 109, 73,,, +7, 2, 3, 2, 1, 1, 1, 1, 384, 109, 109, 109, 73,,, +7, 2, 3, 2, 2, 2, 1, 1, 128, 109, 109, 105, 73,,, +7, 2, 3, 2, 2, 2, 1, 1, 256, 109, 109, 105, 73,,, +7, 2, 3, 2, 2, 2, 1, 1, 384, 109, 109, 105, 73,,, +7, 2, 3, 2, 2, 2, 1, 2, 128, 109, 109, 105, 73,,, +7, 2, 3, 2, 2, 2, 1, 2, 256, 109, 109, 105, 73,,, +7, 2, 3, 2, 2, 2, 1, 2, 384, 109, 109, 105, 73,,, +7, 2, 3, 2, 2, 2, 2, 1, 128, 109, 109, 105, 73,,, +7, 2, 3, 2, 2, 2, 2, 1, 256, 109, 109, 105, 73,,, +7, 2, 3, 2, 2, 2, 2, 1, 384, 109, 109, 105, 73,,, +7, 2, 3, 2, 2, 2, 2, 2, 128, 109, 109, 105, 73,,, +7, 2, 3, 2, 2, 2, 2, 2, 256, 109, 109, 105, 73,,, +7, 2, 3, 2, 2, 2, 2, 2, 384, 109, 109, 105, 73,,, +7, 2, 4, 1, 1, 1, 1, 1, 128, 55, 55, 55, 37,,, +7, 2, 4, 1, 1, 1, 1, 1, 256, 55, 55, 55, 37,,, +7, 2, 4, 1, 1, 1, 1, 1, 384, 55, 55, 55, 37,,, +7, 2, 4, 1, 2, 2, 1, 1, 128, 55, 55, 53, 37,,, +7, 2, 4, 1, 2, 2, 1, 1, 256, 55, 55, 53, 37,,, +7, 2, 4, 1, 2, 2, 1, 1, 384, 55, 55, 53, 37,,, +7, 2, 4, 1, 2, 2, 1, 2, 128, 55, 55, 53, 37,,, +7, 2, 4, 1, 2, 2, 1, 2, 256, 55, 55, 53, 37,,, +7, 2, 4, 1, 2, 2, 1, 2, 384, 55, 55, 53, 37,,, +7, 2, 4, 1, 2, 2, 2, 1, 128, 55, 55, 53, 37,,, +7, 2, 4, 1, 2, 2, 2, 1, 256, 55, 55, 53, 37,,, +7, 2, 4, 1, 2, 2, 2, 1, 384, 55, 55, 53, 37,,, +7, 2, 4, 1, 2, 2, 2, 2, 128, 55, 55, 53, 37,,, +7, 2, 4, 1, 2, 2, 2, 2, 256, 55, 55, 53, 37,,, +7, 2, 4, 1, 2, 2, 2, 2, 384, 55, 55, 53, 37,,, +7, 2, 4, 2, 1, 1, 1, 1, 128, 109, 109, 109, 73,,, +7, 2, 4, 2, 1, 1, 1, 1, 256, 109, 109, 109, 73,,, +7, 2, 4, 2, 1, 1, 1, 1, 384, 109, 109, 109, 73,,, +7, 2, 4, 2, 2, 2, 1, 1, 128, 109, 109, 105, 73,,, +7, 2, 4, 2, 2, 2, 1, 1, 256, 109, 109, 105, 73,,, +7, 2, 4, 2, 2, 2, 1, 1, 384, 109, 109, 105, 73,,, +7, 2, 4, 2, 2, 2, 1, 2, 128, 109, 109, 105, 73,,, +7, 2, 4, 2, 2, 2, 1, 2, 256, 109, 109, 105, 73,,, +7, 2, 4, 2, 2, 2, 1, 2, 384, 109, 109, 105, 73,,, +7, 2, 4, 2, 2, 2, 2, 1, 128, 109, 109, 105, 73,,, +7, 2, 4, 2, 2, 2, 2, 1, 256, 109, 109, 105, 73,,, +7, 2, 4, 2, 2, 2, 2, 1, 384, 109, 109, 105, 73,,, +7, 2, 4, 2, 2, 2, 2, 2, 128, 109, 109, 105, 73,,, +7, 2, 4, 2, 2, 2, 2, 2, 256, 109, 109, 105, 73,,, +7, 2, 4, 2, 2, 2, 2, 2, 384, 109, 109, 105, 73,,, +7, 2, 5, 1, 1, 1, 1, 1, 128, 55, 55, 55, 37,,, +7, 2, 5, 1, 1, 1, 1, 1, 256, 55, 55, 55, 37,,, +7, 2, 5, 1, 1, 1, 1, 1, 384, 55, 55, 55, 37,,, +7, 2, 5, 1, 2, 2, 1, 1, 128, 55, 55, 53, 37,,, +7, 2, 5, 1, 2, 2, 1, 1, 256, 55, 55, 53, 37,,, +7, 2, 5, 1, 2, 2, 1, 1, 384, 55, 55, 53, 37,,, +7, 2, 5, 1, 2, 2, 1, 2, 128, 55, 55, 53, 37,,, +7, 2, 5, 1, 2, 2, 1, 2, 256, 55, 55, 53, 37,,, +7, 2, 5, 1, 2, 2, 1, 2, 384, 55, 55, 53, 37,,, +7, 2, 5, 1, 2, 2, 2, 1, 128, 55, 55, 53, 37,,, +7, 2, 5, 1, 2, 2, 2, 1, 256, 55, 55, 53, 37,,, +7, 2, 5, 1, 2, 2, 2, 1, 384, 55, 55, 53, 37,,, +7, 2, 5, 1, 2, 2, 2, 2, 128, 55, 55, 53, 37,,, +7, 2, 5, 1, 2, 2, 2, 2, 256, 55, 55, 53, 37,,, +7, 2, 5, 1, 2, 2, 2, 2, 384, 55, 55, 53, 37,,, +7, 2, 5, 2, 1, 1, 1, 1, 128, 109, 109, 109, 73,,, +7, 2, 5, 2, 1, 1, 1, 1, 256, 109, 109, 109, 73,,, +7, 2, 5, 2, 1, 1, 1, 1, 384, 109, 109, 109, 73,,, +7, 2, 5, 2, 2, 2, 1, 1, 128, 109, 109, 105, 73,,, +7, 2, 5, 2, 2, 2, 1, 1, 256, 109, 109, 105, 73,,, +7, 2, 5, 2, 2, 2, 1, 1, 384, 109, 109, 105, 73,,, +7, 2, 5, 2, 2, 2, 1, 2, 128, 109, 109, 105, 73,,, +7, 2, 5, 2, 2, 2, 1, 2, 256, 109, 109, 105, 73,,, +7, 2, 5, 2, 2, 2, 1, 2, 384, 109, 109, 105, 73,,, +7, 2, 5, 2, 2, 2, 2, 1, 128, 109, 109, 105, 73,,, +7, 2, 5, 2, 2, 2, 2, 1, 256, 109, 109, 105, 73,,, +7, 2, 5, 2, 2, 2, 2, 1, 384, 109, 109, 105, 73,,, +7, 2, 5, 2, 2, 2, 2, 2, 128, 109, 109, 105, 73,,, +7, 2, 5, 2, 2, 2, 2, 2, 256, 109, 109, 105, 73,,, +7, 2, 5, 2, 2, 2, 2, 2, 384, 109, 109, 105, 73,,, +7, 2, 6, 1, 1, 1, 1, 1, 128, 55, 55, 55, 37,,, +7, 2, 6, 1, 1, 1, 1, 1, 256, 55, 55, 55, 37,,, +7, 2, 6, 1, 1, 1, 1, 1, 384, 55, 55, 55, 37,,, +7, 2, 6, 1, 2, 2, 1, 1, 128, 55, 55, 53, 37,,, +7, 2, 6, 1, 2, 2, 1, 1, 256, 55, 55, 53, 37,,, +7, 2, 6, 1, 2, 2, 1, 1, 384, 55, 55, 53, 37,,, +7, 2, 6, 1, 2, 2, 1, 2, 128, 55, 55, 53, 37,,, +7, 2, 6, 1, 2, 2, 1, 2, 256, 55, 55, 53, 37,,, +7, 2, 6, 1, 2, 2, 1, 2, 384, 55, 55, 53, 37,,, +7, 2, 6, 1, 2, 2, 2, 1, 128, 55, 55, 53, 37,,, +7, 2, 6, 1, 2, 2, 2, 1, 256, 55, 55, 53, 37,,, +7, 2, 6, 1, 2, 2, 2, 1, 384, 55, 55, 53, 37,,, +7, 2, 6, 1, 2, 2, 2, 2, 128, 55, 55, 53, 37,,, +7, 2, 6, 1, 2, 2, 2, 2, 256, 55, 55, 53, 37,,, +7, 2, 6, 1, 2, 2, 2, 2, 384, 55, 55, 53, 37,,, +7, 2, 6, 2, 1, 1, 1, 1, 128, 109, 109, 109, 73,,, +7, 2, 6, 2, 1, 1, 1, 1, 256, 109, 109, 109, 73,,, +7, 2, 6, 2, 1, 1, 1, 1, 384, 109, 109, 109, 73,,, +7, 2, 6, 2, 2, 2, 1, 1, 128, 109, 109, 105, 73,,, +7, 2, 6, 2, 2, 2, 1, 1, 256, 109, 109, 105, 73,,, +7, 2, 6, 2, 2, 2, 1, 1, 384, 109, 109, 105, 73,,, +7, 2, 6, 2, 2, 2, 1, 2, 128, 109, 109, 105, 73,,, +7, 2, 6, 2, 2, 2, 1, 2, 256, 109, 109, 105, 73,,, +7, 2, 6, 2, 2, 2, 1, 2, 384, 109, 109, 105, 73,,, +7, 2, 6, 2, 2, 2, 2, 1, 128, 109, 109, 105, 73,,, +7, 2, 6, 2, 2, 2, 2, 1, 256, 109, 109, 105, 73,,, +7, 2, 6, 2, 2, 2, 2, 1, 384, 109, 109, 105, 73,,, +7, 2, 6, 2, 2, 2, 2, 2, 128, 109, 109, 105, 73,,, +7, 2, 6, 2, 2, 2, 2, 2, 256, 109, 109, 105, 73,,, +7, 2, 6, 2, 2, 2, 2, 2, 384, 109, 109, 105, 73,,, +7, 2, 7, 1, 1, 1, 1, 1, 128, 55, 55, 55, 37,,, +7, 2, 7, 1, 1, 1, 1, 1, 256, 55, 55, 55, 37,,, +7, 2, 7, 1, 1, 1, 1, 1, 384, 55, 55, 55, 37,,, +7, 2, 7, 1, 2, 2, 1, 1, 128, 55, 55, 53, 37,,, +7, 2, 7, 1, 2, 2, 1, 1, 256, 55, 55, 53, 37,,, +7, 2, 7, 1, 2, 2, 1, 1, 384, 55, 55, 53, 37,,, +7, 2, 7, 1, 2, 2, 1, 2, 128, 55, 55, 53, 37,,, +7, 2, 7, 1, 2, 2, 1, 2, 256, 55, 55, 53, 37,,, +7, 2, 7, 1, 2, 2, 1, 2, 384, 55, 55, 53, 37,,, +7, 2, 7, 1, 2, 2, 2, 1, 128, 55, 55, 53, 37,,, +7, 2, 7, 1, 2, 2, 2, 1, 256, 55, 55, 53, 37,,, +7, 2, 7, 1, 2, 2, 2, 1, 384, 55, 55, 53, 37,,, +7, 2, 7, 1, 2, 2, 2, 2, 128, 55, 55, 53, 37,,, +7, 2, 7, 1, 2, 2, 2, 2, 256, 55, 55, 53, 37,,, +7, 2, 7, 1, 2, 2, 2, 2, 384, 55, 55, 53, 37,,, +7, 2, 7, 2, 1, 1, 1, 1, 128, 109, 109, 109, 73,,, +7, 2, 7, 2, 1, 1, 1, 1, 256, 109, 109, 109, 73,,, +7, 2, 7, 2, 1, 1, 1, 1, 384, 109, 109, 109, 73,,, +7, 2, 7, 2, 2, 2, 1, 1, 128, 109, 109, 105, 73,,, +7, 2, 7, 2, 2, 2, 1, 1, 256, 109, 109, 105, 73,,, +7, 2, 7, 2, 2, 2, 1, 1, 384, 109, 109, 105, 73,,, +7, 2, 7, 2, 2, 2, 1, 2, 128, 109, 109, 105, 73,,, +7, 2, 7, 2, 2, 2, 1, 2, 256, 109, 109, 105, 73,,, +7, 2, 7, 2, 2, 2, 1, 2, 384, 109, 109, 105, 73,,, +7, 2, 7, 2, 2, 2, 2, 1, 128, 109, 109, 105, 73,,, +7, 2, 7, 2, 2, 2, 2, 1, 256, 109, 109, 105, 73,,, +7, 2, 7, 2, 2, 2, 2, 1, 384, 109, 109, 105, 73,,, +7, 2, 7, 2, 2, 2, 2, 2, 128, 109, 109, 105, 73,,, +7, 2, 7, 2, 2, 2, 2, 2, 256, 109, 109, 105, 73,,, +7, 2, 7, 2, 2, 2, 2, 2, 384, 109, 109, 105, 73,,, +7, 3, 1, 1, 1, 1, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 1, 1, 1, 1, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 1, 1, 1, 1, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 1, 1, 2, 2, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 1, 1, 2, 2, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 1, 1, 2, 2, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 1, 1, 2, 2, 1, 2, 128, 56, 56, 38, 20,,, +7, 3, 1, 1, 2, 2, 1, 2, 256, 56, 56, 38, 20,,, +7, 3, 1, 1, 2, 2, 1, 2, 384, 56, 56, 38, 20,,, +7, 3, 1, 1, 2, 2, 2, 1, 128, 56, 56, 38, 20,,, +7, 3, 1, 1, 2, 2, 2, 1, 256, 56, 56, 38, 20,,, +7, 3, 1, 1, 2, 2, 2, 1, 384, 56, 56, 38, 20,,, +7, 3, 1, 1, 2, 2, 2, 2, 128, 56, 56, 38, 20,,, +7, 3, 1, 1, 2, 2, 2, 2, 256, 56, 56, 38, 20,,, +7, 3, 1, 1, 2, 2, 2, 2, 384, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 1, 2, 128, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 1, 2, 256, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 1, 2, 384, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 1, 3, 128, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 1, 3, 256, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 1, 3, 384, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 2, 1, 128, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 2, 1, 256, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 2, 1, 384, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 2, 2, 128, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 2, 2, 256, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 2, 2, 384, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 2, 3, 128, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 2, 3, 256, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 2, 3, 384, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 3, 1, 128, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 3, 1, 256, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 3, 1, 384, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 3, 2, 128, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 3, 2, 256, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 3, 2, 384, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 3, 3, 128, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 3, 3, 256, 56, 56, 38, 20,,, +7, 3, 1, 1, 3, 3, 3, 3, 384, 56, 56, 38, 20,,, +7, 3, 1, 2, 1, 1, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 1, 2, 1, 1, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 1, 2, 1, 1, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 1, 2, 2, 2, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 1, 2, 2, 2, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 1, 2, 2, 2, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 1, 2, 2, 2, 1, 2, 128, 110, 110, 74, 38,,, +7, 3, 1, 2, 2, 2, 1, 2, 256, 110, 110, 74, 38,,, +7, 3, 1, 2, 2, 2, 1, 2, 384, 110, 110, 74, 38,,, +7, 3, 1, 2, 2, 2, 2, 1, 128, 110, 110, 74, 38,,, +7, 3, 1, 2, 2, 2, 2, 1, 256, 110, 110, 74, 38,,, +7, 3, 1, 2, 2, 2, 2, 1, 384, 110, 110, 74, 38,,, +7, 3, 1, 2, 2, 2, 2, 2, 128, 110, 110, 74, 38,,, +7, 3, 1, 2, 2, 2, 2, 2, 256, 110, 110, 74, 38,,, +7, 3, 1, 2, 2, 2, 2, 2, 384, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 1, 2, 128, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 1, 2, 256, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 1, 2, 384, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 1, 3, 128, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 1, 3, 256, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 1, 3, 384, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 2, 1, 128, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 2, 1, 256, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 2, 1, 384, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 2, 2, 128, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 2, 2, 256, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 2, 2, 384, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 2, 3, 128, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 2, 3, 256, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 2, 3, 384, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 3, 1, 128, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 3, 1, 256, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 3, 1, 384, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 3, 2, 128, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 3, 2, 256, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 3, 2, 384, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 3, 3, 128, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 3, 3, 256, 110, 110, 74, 38,,, +7, 3, 1, 2, 3, 3, 3, 3, 384, 110, 110, 74, 38,,, +7, 3, 1, 3, 1, 1, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 1, 3, 1, 1, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 1, 3, 1, 1, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 1, 3, 2, 2, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 1, 3, 2, 2, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 1, 3, 2, 2, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 1, 3, 2, 2, 1, 2, 128, 164, 164, 110, 56,,, +7, 3, 1, 3, 2, 2, 1, 2, 256, 164, 164, 110, 56,,, +7, 3, 1, 3, 2, 2, 1, 2, 384, 164, 164, 110, 56,,, +7, 3, 1, 3, 2, 2, 2, 1, 128, 164, 164, 110, 56,,, +7, 3, 1, 3, 2, 2, 2, 1, 256, 164, 164, 110, 56,,, +7, 3, 1, 3, 2, 2, 2, 1, 384, 164, 164, 110, 56,,, +7, 3, 1, 3, 2, 2, 2, 2, 128, 164, 164, 110, 56,,, +7, 3, 1, 3, 2, 2, 2, 2, 256, 164, 164, 110, 56,,, +7, 3, 1, 3, 2, 2, 2, 2, 384, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 1, 2, 128, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 1, 2, 256, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 1, 2, 384, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 1, 3, 128, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 1, 3, 256, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 1, 3, 384, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 2, 1, 128, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 2, 1, 256, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 2, 1, 384, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 2, 2, 128, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 2, 2, 256, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 2, 2, 384, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 2, 3, 128, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 2, 3, 256, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 2, 3, 384, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 3, 1, 128, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 3, 1, 256, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 3, 1, 384, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 3, 2, 128, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 3, 2, 256, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 3, 2, 384, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 3, 3, 128, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 3, 3, 256, 164, 164, 110, 56,,, +7, 3, 1, 3, 3, 3, 3, 3, 384, 164, 164, 110, 56,,, +7, 3, 2, 1, 1, 1, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 2, 1, 1, 1, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 2, 1, 1, 1, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 2, 1, 2, 2, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 2, 1, 2, 2, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 2, 1, 2, 2, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 2, 1, 2, 2, 1, 2, 128, 56, 56, 38, 20,,, +7, 3, 2, 1, 2, 2, 1, 2, 256, 56, 56, 38, 20,,, +7, 3, 2, 1, 2, 2, 1, 2, 384, 56, 56, 38, 20,,, +7, 3, 2, 1, 2, 2, 2, 1, 128, 56, 56, 38, 20,,, +7, 3, 2, 1, 2, 2, 2, 1, 256, 56, 56, 38, 20,,, +7, 3, 2, 1, 2, 2, 2, 1, 384, 56, 56, 38, 20,,, +7, 3, 2, 1, 2, 2, 2, 2, 128, 56, 56, 38, 20,,, +7, 3, 2, 1, 2, 2, 2, 2, 256, 56, 56, 38, 20,,, +7, 3, 2, 1, 2, 2, 2, 2, 384, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 1, 2, 128, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 1, 2, 256, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 1, 2, 384, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 1, 3, 128, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 1, 3, 256, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 1, 3, 384, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 2, 1, 128, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 2, 1, 256, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 2, 1, 384, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 2, 2, 128, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 2, 2, 256, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 2, 2, 384, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 2, 3, 128, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 2, 3, 256, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 2, 3, 384, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 3, 1, 128, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 3, 1, 256, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 3, 1, 384, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 3, 2, 128, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 3, 2, 256, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 3, 2, 384, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 3, 3, 128, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 3, 3, 256, 56, 56, 38, 20,,, +7, 3, 2, 1, 3, 3, 3, 3, 384, 56, 56, 38, 20,,, +7, 3, 2, 2, 1, 1, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 2, 2, 1, 1, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 2, 2, 1, 1, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 2, 2, 2, 2, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 2, 2, 2, 2, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 2, 2, 2, 2, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 2, 2, 2, 2, 1, 2, 128, 110, 110, 74, 38,,, +7, 3, 2, 2, 2, 2, 1, 2, 256, 110, 110, 74, 38,,, +7, 3, 2, 2, 2, 2, 1, 2, 384, 110, 110, 74, 38,,, +7, 3, 2, 2, 2, 2, 2, 1, 128, 110, 110, 74, 38,,, +7, 3, 2, 2, 2, 2, 2, 1, 256, 110, 110, 74, 38,,, +7, 3, 2, 2, 2, 2, 2, 1, 384, 110, 110, 74, 38,,, +7, 3, 2, 2, 2, 2, 2, 2, 128, 110, 110, 74, 38,,, +7, 3, 2, 2, 2, 2, 2, 2, 256, 110, 110, 74, 38,,, +7, 3, 2, 2, 2, 2, 2, 2, 384, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 1, 2, 128, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 1, 2, 256, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 1, 2, 384, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 1, 3, 128, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 1, 3, 256, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 1, 3, 384, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 2, 1, 128, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 2, 1, 256, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 2, 1, 384, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 2, 2, 128, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 2, 2, 256, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 2, 2, 384, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 2, 3, 128, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 2, 3, 256, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 2, 3, 384, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 3, 1, 128, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 3, 1, 256, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 3, 1, 384, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 3, 2, 128, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 3, 2, 256, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 3, 2, 384, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 3, 3, 128, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 3, 3, 256, 110, 110, 74, 38,,, +7, 3, 2, 2, 3, 3, 3, 3, 384, 110, 110, 74, 38,,, +7, 3, 2, 3, 1, 1, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 2, 3, 1, 1, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 2, 3, 1, 1, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 2, 3, 2, 2, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 2, 3, 2, 2, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 2, 3, 2, 2, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 2, 3, 2, 2, 1, 2, 128, 164, 164, 110, 56,,, +7, 3, 2, 3, 2, 2, 1, 2, 256, 164, 164, 110, 56,,, +7, 3, 2, 3, 2, 2, 1, 2, 384, 164, 164, 110, 56,,, +7, 3, 2, 3, 2, 2, 2, 1, 128, 164, 164, 110, 56,,, +7, 3, 2, 3, 2, 2, 2, 1, 256, 164, 164, 110, 56,,, +7, 3, 2, 3, 2, 2, 2, 1, 384, 164, 164, 110, 56,,, +7, 3, 2, 3, 2, 2, 2, 2, 128, 164, 164, 110, 56,,, +7, 3, 2, 3, 2, 2, 2, 2, 256, 164, 164, 110, 56,,, +7, 3, 2, 3, 2, 2, 2, 2, 384, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 1, 2, 128, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 1, 2, 256, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 1, 2, 384, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 1, 3, 128, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 1, 3, 256, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 1, 3, 384, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 2, 1, 128, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 2, 1, 256, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 2, 1, 384, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 2, 2, 128, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 2, 2, 256, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 2, 2, 384, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 2, 3, 128, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 2, 3, 256, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 2, 3, 384, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 3, 1, 128, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 3, 1, 256, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 3, 1, 384, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 3, 2, 128, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 3, 2, 256, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 3, 2, 384, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 3, 3, 128, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 3, 3, 256, 164, 164, 110, 56,,, +7, 3, 2, 3, 3, 3, 3, 3, 384, 164, 164, 110, 56,,, +7, 3, 3, 1, 1, 1, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 3, 1, 1, 1, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 3, 1, 1, 1, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 3, 1, 2, 2, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 3, 1, 2, 2, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 3, 1, 2, 2, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 3, 1, 2, 2, 1, 2, 128, 56, 56, 38, 20,,, +7, 3, 3, 1, 2, 2, 1, 2, 256, 56, 56, 38, 20,,, +7, 3, 3, 1, 2, 2, 1, 2, 384, 56, 56, 38, 20,,, +7, 3, 3, 1, 2, 2, 2, 1, 128, 56, 56, 38, 20,,, +7, 3, 3, 1, 2, 2, 2, 1, 256, 56, 56, 38, 20,,, +7, 3, 3, 1, 2, 2, 2, 1, 384, 56, 56, 38, 20,,, +7, 3, 3, 1, 2, 2, 2, 2, 128, 56, 56, 38, 20,,, +7, 3, 3, 1, 2, 2, 2, 2, 256, 56, 56, 38, 20,,, +7, 3, 3, 1, 2, 2, 2, 2, 384, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 1, 2, 128, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 1, 2, 256, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 1, 2, 384, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 1, 3, 128, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 1, 3, 256, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 1, 3, 384, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 2, 1, 128, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 2, 1, 256, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 2, 1, 384, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 2, 2, 128, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 2, 2, 256, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 2, 2, 384, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 2, 3, 128, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 2, 3, 256, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 2, 3, 384, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 3, 1, 128, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 3, 1, 256, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 3, 1, 384, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 3, 2, 128, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 3, 2, 256, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 3, 2, 384, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 3, 3, 128, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 3, 3, 256, 56, 56, 38, 20,,, +7, 3, 3, 1, 3, 3, 3, 3, 384, 56, 56, 38, 20,,, +7, 3, 3, 2, 1, 1, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 3, 2, 1, 1, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 3, 2, 1, 1, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 3, 2, 2, 2, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 3, 2, 2, 2, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 3, 2, 2, 2, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 3, 2, 2, 2, 1, 2, 128, 110, 110, 74, 38,,, +7, 3, 3, 2, 2, 2, 1, 2, 256, 110, 110, 74, 38,,, +7, 3, 3, 2, 2, 2, 1, 2, 384, 110, 110, 74, 38,,, +7, 3, 3, 2, 2, 2, 2, 1, 128, 110, 110, 74, 38,,, +7, 3, 3, 2, 2, 2, 2, 1, 256, 110, 110, 74, 38,,, +7, 3, 3, 2, 2, 2, 2, 1, 384, 110, 110, 74, 38,,, +7, 3, 3, 2, 2, 2, 2, 2, 128, 110, 110, 74, 38,,, +7, 3, 3, 2, 2, 2, 2, 2, 256, 110, 110, 74, 38,,, +7, 3, 3, 2, 2, 2, 2, 2, 384, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 1, 2, 128, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 1, 2, 256, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 1, 2, 384, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 1, 3, 128, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 1, 3, 256, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 1, 3, 384, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 2, 1, 128, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 2, 1, 256, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 2, 1, 384, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 2, 2, 128, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 2, 2, 256, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 2, 2, 384, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 2, 3, 128, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 2, 3, 256, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 2, 3, 384, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 3, 1, 128, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 3, 1, 256, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 3, 1, 384, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 3, 2, 128, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 3, 2, 256, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 3, 2, 384, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 3, 3, 128, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 3, 3, 256, 110, 110, 74, 38,,, +7, 3, 3, 2, 3, 3, 3, 3, 384, 110, 110, 74, 38,,, +7, 3, 3, 3, 1, 1, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 3, 3, 1, 1, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 3, 3, 1, 1, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 3, 3, 2, 2, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 3, 3, 2, 2, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 3, 3, 2, 2, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 3, 3, 2, 2, 1, 2, 128, 164, 164, 110, 56,,, +7, 3, 3, 3, 2, 2, 1, 2, 256, 164, 164, 110, 56,,, +7, 3, 3, 3, 2, 2, 1, 2, 384, 164, 164, 110, 56,,, +7, 3, 3, 3, 2, 2, 2, 1, 128, 164, 164, 110, 56,,, +7, 3, 3, 3, 2, 2, 2, 1, 256, 164, 164, 110, 56,,, +7, 3, 3, 3, 2, 2, 2, 1, 384, 164, 164, 110, 56,,, +7, 3, 3, 3, 2, 2, 2, 2, 128, 164, 164, 110, 56,,, +7, 3, 3, 3, 2, 2, 2, 2, 256, 164, 164, 110, 56,,, +7, 3, 3, 3, 2, 2, 2, 2, 384, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 1, 2, 128, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 1, 2, 256, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 1, 2, 384, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 1, 3, 128, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 1, 3, 256, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 1, 3, 384, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 2, 1, 128, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 2, 1, 256, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 2, 1, 384, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 2, 2, 128, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 2, 2, 256, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 2, 2, 384, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 2, 3, 128, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 2, 3, 256, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 2, 3, 384, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 3, 1, 128, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 3, 1, 256, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 3, 1, 384, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 3, 2, 128, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 3, 2, 256, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 3, 2, 384, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 3, 3, 128, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 3, 3, 256, 164, 164, 110, 56,,, +7, 3, 3, 3, 3, 3, 3, 3, 384, 164, 164, 110, 56,,, +7, 3, 4, 1, 1, 1, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 4, 1, 1, 1, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 4, 1, 1, 1, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 4, 1, 2, 2, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 4, 1, 2, 2, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 4, 1, 2, 2, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 4, 1, 2, 2, 1, 2, 128, 56, 56, 38, 20,,, +7, 3, 4, 1, 2, 2, 1, 2, 256, 56, 56, 38, 20,,, +7, 3, 4, 1, 2, 2, 1, 2, 384, 56, 56, 38, 20,,, +7, 3, 4, 1, 2, 2, 2, 1, 128, 56, 56, 38, 20,,, +7, 3, 4, 1, 2, 2, 2, 1, 256, 56, 56, 38, 20,,, +7, 3, 4, 1, 2, 2, 2, 1, 384, 56, 56, 38, 20,,, +7, 3, 4, 1, 2, 2, 2, 2, 128, 56, 56, 38, 20,,, +7, 3, 4, 1, 2, 2, 2, 2, 256, 56, 56, 38, 20,,, +7, 3, 4, 1, 2, 2, 2, 2, 384, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 1, 2, 128, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 1, 2, 256, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 1, 2, 384, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 1, 3, 128, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 1, 3, 256, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 1, 3, 384, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 2, 1, 128, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 2, 1, 256, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 2, 1, 384, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 2, 2, 128, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 2, 2, 256, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 2, 2, 384, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 2, 3, 128, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 2, 3, 256, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 2, 3, 384, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 3, 1, 128, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 3, 1, 256, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 3, 1, 384, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 3, 2, 128, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 3, 2, 256, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 3, 2, 384, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 3, 3, 128, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 3, 3, 256, 56, 56, 38, 20,,, +7, 3, 4, 1, 3, 3, 3, 3, 384, 56, 56, 38, 20,,, +7, 3, 4, 2, 1, 1, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 4, 2, 1, 1, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 4, 2, 1, 1, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 4, 2, 2, 2, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 4, 2, 2, 2, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 4, 2, 2, 2, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 4, 2, 2, 2, 1, 2, 128, 110, 110, 74, 38,,, +7, 3, 4, 2, 2, 2, 1, 2, 256, 110, 110, 74, 38,,, +7, 3, 4, 2, 2, 2, 1, 2, 384, 110, 110, 74, 38,,, +7, 3, 4, 2, 2, 2, 2, 1, 128, 110, 110, 74, 38,,, +7, 3, 4, 2, 2, 2, 2, 1, 256, 110, 110, 74, 38,,, +7, 3, 4, 2, 2, 2, 2, 1, 384, 110, 110, 74, 38,,, +7, 3, 4, 2, 2, 2, 2, 2, 128, 110, 110, 74, 38,,, +7, 3, 4, 2, 2, 2, 2, 2, 256, 110, 110, 74, 38,,, +7, 3, 4, 2, 2, 2, 2, 2, 384, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 1, 2, 128, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 1, 2, 256, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 1, 2, 384, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 1, 3, 128, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 1, 3, 256, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 1, 3, 384, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 2, 1, 128, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 2, 1, 256, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 2, 1, 384, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 2, 2, 128, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 2, 2, 256, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 2, 2, 384, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 2, 3, 128, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 2, 3, 256, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 2, 3, 384, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 3, 1, 128, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 3, 1, 256, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 3, 1, 384, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 3, 2, 128, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 3, 2, 256, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 3, 2, 384, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 3, 3, 128, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 3, 3, 256, 110, 110, 74, 38,,, +7, 3, 4, 2, 3, 3, 3, 3, 384, 110, 110, 74, 38,,, +7, 3, 4, 3, 1, 1, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 4, 3, 1, 1, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 4, 3, 1, 1, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 4, 3, 2, 2, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 4, 3, 2, 2, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 4, 3, 2, 2, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 4, 3, 2, 2, 1, 2, 128, 164, 164, 110, 56,,, +7, 3, 4, 3, 2, 2, 1, 2, 256, 164, 164, 110, 56,,, +7, 3, 4, 3, 2, 2, 1, 2, 384, 164, 164, 110, 56,,, +7, 3, 4, 3, 2, 2, 2, 1, 128, 164, 164, 110, 56,,, +7, 3, 4, 3, 2, 2, 2, 1, 256, 164, 164, 110, 56,,, +7, 3, 4, 3, 2, 2, 2, 1, 384, 164, 164, 110, 56,,, +7, 3, 4, 3, 2, 2, 2, 2, 128, 164, 164, 110, 56,,, +7, 3, 4, 3, 2, 2, 2, 2, 256, 164, 164, 110, 56,,, +7, 3, 4, 3, 2, 2, 2, 2, 384, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 1, 2, 128, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 1, 2, 256, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 1, 2, 384, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 1, 3, 128, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 1, 3, 256, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 1, 3, 384, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 2, 1, 128, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 2, 1, 256, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 2, 1, 384, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 2, 2, 128, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 2, 2, 256, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 2, 2, 384, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 2, 3, 128, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 2, 3, 256, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 2, 3, 384, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 3, 1, 128, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 3, 1, 256, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 3, 1, 384, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 3, 2, 128, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 3, 2, 256, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 3, 2, 384, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 3, 3, 128, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 3, 3, 256, 164, 164, 110, 56,,, +7, 3, 4, 3, 3, 3, 3, 3, 384, 164, 164, 110, 56,,, +7, 3, 5, 1, 1, 1, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 5, 1, 1, 1, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 5, 1, 1, 1, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 5, 1, 2, 2, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 5, 1, 2, 2, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 5, 1, 2, 2, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 5, 1, 2, 2, 1, 2, 128, 56, 56, 38, 20,,, +7, 3, 5, 1, 2, 2, 1, 2, 256, 56, 56, 38, 20,,, +7, 3, 5, 1, 2, 2, 1, 2, 384, 56, 56, 38, 20,,, +7, 3, 5, 1, 2, 2, 2, 1, 128, 56, 56, 38, 20,,, +7, 3, 5, 1, 2, 2, 2, 1, 256, 56, 56, 38, 20,,, +7, 3, 5, 1, 2, 2, 2, 1, 384, 56, 56, 38, 20,,, +7, 3, 5, 1, 2, 2, 2, 2, 128, 56, 56, 38, 20,,, +7, 3, 5, 1, 2, 2, 2, 2, 256, 56, 56, 38, 20,,, +7, 3, 5, 1, 2, 2, 2, 2, 384, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 1, 2, 128, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 1, 2, 256, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 1, 2, 384, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 1, 3, 128, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 1, 3, 256, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 1, 3, 384, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 2, 1, 128, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 2, 1, 256, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 2, 1, 384, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 2, 2, 128, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 2, 2, 256, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 2, 2, 384, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 2, 3, 128, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 2, 3, 256, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 2, 3, 384, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 3, 1, 128, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 3, 1, 256, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 3, 1, 384, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 3, 2, 128, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 3, 2, 256, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 3, 2, 384, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 3, 3, 128, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 3, 3, 256, 56, 56, 38, 20,,, +7, 3, 5, 1, 3, 3, 3, 3, 384, 56, 56, 38, 20,,, +7, 3, 5, 2, 1, 1, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 5, 2, 1, 1, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 5, 2, 1, 1, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 5, 2, 2, 2, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 5, 2, 2, 2, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 5, 2, 2, 2, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 5, 2, 2, 2, 1, 2, 128, 110, 110, 74, 38,,, +7, 3, 5, 2, 2, 2, 1, 2, 256, 110, 110, 74, 38,,, +7, 3, 5, 2, 2, 2, 1, 2, 384, 110, 110, 74, 38,,, +7, 3, 5, 2, 2, 2, 2, 1, 128, 110, 110, 74, 38,,, +7, 3, 5, 2, 2, 2, 2, 1, 256, 110, 110, 74, 38,,, +7, 3, 5, 2, 2, 2, 2, 1, 384, 110, 110, 74, 38,,, +7, 3, 5, 2, 2, 2, 2, 2, 128, 110, 110, 74, 38,,, +7, 3, 5, 2, 2, 2, 2, 2, 256, 110, 110, 74, 38,,, +7, 3, 5, 2, 2, 2, 2, 2, 384, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 1, 2, 128, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 1, 2, 256, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 1, 2, 384, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 1, 3, 128, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 1, 3, 256, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 1, 3, 384, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 2, 1, 128, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 2, 1, 256, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 2, 1, 384, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 2, 2, 128, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 2, 2, 256, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 2, 2, 384, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 2, 3, 128, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 2, 3, 256, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 2, 3, 384, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 3, 1, 128, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 3, 1, 256, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 3, 1, 384, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 3, 2, 128, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 3, 2, 256, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 3, 2, 384, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 3, 3, 128, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 3, 3, 256, 110, 110, 74, 38,,, +7, 3, 5, 2, 3, 3, 3, 3, 384, 110, 110, 74, 38,,, +7, 3, 5, 3, 1, 1, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 5, 3, 1, 1, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 5, 3, 1, 1, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 5, 3, 2, 2, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 5, 3, 2, 2, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 5, 3, 2, 2, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 5, 3, 2, 2, 1, 2, 128, 164, 164, 110, 56,,, +7, 3, 5, 3, 2, 2, 1, 2, 256, 164, 164, 110, 56,,, +7, 3, 5, 3, 2, 2, 1, 2, 384, 164, 164, 110, 56,,, +7, 3, 5, 3, 2, 2, 2, 1, 128, 164, 164, 110, 56,,, +7, 3, 5, 3, 2, 2, 2, 1, 256, 164, 164, 110, 56,,, +7, 3, 5, 3, 2, 2, 2, 1, 384, 164, 164, 110, 56,,, +7, 3, 5, 3, 2, 2, 2, 2, 128, 164, 164, 110, 56,,, +7, 3, 5, 3, 2, 2, 2, 2, 256, 164, 164, 110, 56,,, +7, 3, 5, 3, 2, 2, 2, 2, 384, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 1, 2, 128, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 1, 2, 256, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 1, 2, 384, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 1, 3, 128, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 1, 3, 256, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 1, 3, 384, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 2, 1, 128, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 2, 1, 256, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 2, 1, 384, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 2, 2, 128, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 2, 2, 256, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 2, 2, 384, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 2, 3, 128, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 2, 3, 256, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 2, 3, 384, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 3, 1, 128, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 3, 1, 256, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 3, 1, 384, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 3, 2, 128, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 3, 2, 256, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 3, 2, 384, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 3, 3, 128, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 3, 3, 256, 164, 164, 110, 56,,, +7, 3, 5, 3, 3, 3, 3, 3, 384, 164, 164, 110, 56,,, +7, 3, 6, 1, 1, 1, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 6, 1, 1, 1, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 6, 1, 1, 1, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 6, 1, 2, 2, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 6, 1, 2, 2, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 6, 1, 2, 2, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 6, 1, 2, 2, 1, 2, 128, 56, 56, 38, 20,,, +7, 3, 6, 1, 2, 2, 1, 2, 256, 56, 56, 38, 20,,, +7, 3, 6, 1, 2, 2, 1, 2, 384, 56, 56, 38, 20,,, +7, 3, 6, 1, 2, 2, 2, 1, 128, 56, 56, 38, 20,,, +7, 3, 6, 1, 2, 2, 2, 1, 256, 56, 56, 38, 20,,, +7, 3, 6, 1, 2, 2, 2, 1, 384, 56, 56, 38, 20,,, +7, 3, 6, 1, 2, 2, 2, 2, 128, 56, 56, 38, 20,,, +7, 3, 6, 1, 2, 2, 2, 2, 256, 56, 56, 38, 20,,, +7, 3, 6, 1, 2, 2, 2, 2, 384, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 1, 2, 128, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 1, 2, 256, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 1, 2, 384, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 1, 3, 128, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 1, 3, 256, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 1, 3, 384, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 2, 1, 128, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 2, 1, 256, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 2, 1, 384, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 2, 2, 128, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 2, 2, 256, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 2, 2, 384, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 2, 3, 128, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 2, 3, 256, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 2, 3, 384, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 3, 1, 128, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 3, 1, 256, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 3, 1, 384, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 3, 2, 128, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 3, 2, 256, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 3, 2, 384, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 3, 3, 128, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 3, 3, 256, 56, 56, 38, 20,,, +7, 3, 6, 1, 3, 3, 3, 3, 384, 56, 56, 38, 20,,, +7, 3, 6, 2, 1, 1, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 6, 2, 1, 1, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 6, 2, 1, 1, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 6, 2, 2, 2, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 6, 2, 2, 2, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 6, 2, 2, 2, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 6, 2, 2, 2, 1, 2, 128, 110, 110, 74, 38,,, +7, 3, 6, 2, 2, 2, 1, 2, 256, 110, 110, 74, 38,,, +7, 3, 6, 2, 2, 2, 1, 2, 384, 110, 110, 74, 38,,, +7, 3, 6, 2, 2, 2, 2, 1, 128, 110, 110, 74, 38,,, +7, 3, 6, 2, 2, 2, 2, 1, 256, 110, 110, 74, 38,,, +7, 3, 6, 2, 2, 2, 2, 1, 384, 110, 110, 74, 38,,, +7, 3, 6, 2, 2, 2, 2, 2, 128, 110, 110, 74, 38,,, +7, 3, 6, 2, 2, 2, 2, 2, 256, 110, 110, 74, 38,,, +7, 3, 6, 2, 2, 2, 2, 2, 384, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 1, 2, 128, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 1, 2, 256, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 1, 2, 384, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 1, 3, 128, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 1, 3, 256, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 1, 3, 384, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 2, 1, 128, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 2, 1, 256, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 2, 1, 384, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 2, 2, 128, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 2, 2, 256, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 2, 2, 384, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 2, 3, 128, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 2, 3, 256, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 2, 3, 384, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 3, 1, 128, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 3, 1, 256, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 3, 1, 384, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 3, 2, 128, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 3, 2, 256, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 3, 2, 384, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 3, 3, 128, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 3, 3, 256, 110, 110, 74, 38,,, +7, 3, 6, 2, 3, 3, 3, 3, 384, 110, 110, 74, 38,,, +7, 3, 6, 3, 1, 1, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 6, 3, 1, 1, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 6, 3, 1, 1, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 6, 3, 2, 2, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 6, 3, 2, 2, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 6, 3, 2, 2, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 6, 3, 2, 2, 1, 2, 128, 164, 164, 110, 56,,, +7, 3, 6, 3, 2, 2, 1, 2, 256, 164, 164, 110, 56,,, +7, 3, 6, 3, 2, 2, 1, 2, 384, 164, 164, 110, 56,,, +7, 3, 6, 3, 2, 2, 2, 1, 128, 164, 164, 110, 56,,, +7, 3, 6, 3, 2, 2, 2, 1, 256, 164, 164, 110, 56,,, +7, 3, 6, 3, 2, 2, 2, 1, 384, 164, 164, 110, 56,,, +7, 3, 6, 3, 2, 2, 2, 2, 128, 164, 164, 110, 56,,, +7, 3, 6, 3, 2, 2, 2, 2, 256, 164, 164, 110, 56,,, +7, 3, 6, 3, 2, 2, 2, 2, 384, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 1, 2, 128, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 1, 2, 256, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 1, 2, 384, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 1, 3, 128, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 1, 3, 256, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 1, 3, 384, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 2, 1, 128, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 2, 1, 256, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 2, 1, 384, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 2, 2, 128, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 2, 2, 256, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 2, 2, 384, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 2, 3, 128, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 2, 3, 256, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 2, 3, 384, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 3, 1, 128, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 3, 1, 256, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 3, 1, 384, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 3, 2, 128, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 3, 2, 256, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 3, 2, 384, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 3, 3, 128, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 3, 3, 256, 164, 164, 110, 56,,, +7, 3, 6, 3, 3, 3, 3, 3, 384, 164, 164, 110, 56,,, +7, 3, 7, 1, 1, 1, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 7, 1, 1, 1, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 7, 1, 1, 1, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 7, 1, 2, 2, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 7, 1, 2, 2, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 7, 1, 2, 2, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 7, 1, 2, 2, 1, 2, 128, 56, 56, 38, 20,,, +7, 3, 7, 1, 2, 2, 1, 2, 256, 56, 56, 38, 20,,, +7, 3, 7, 1, 2, 2, 1, 2, 384, 56, 56, 38, 20,,, +7, 3, 7, 1, 2, 2, 2, 1, 128, 56, 56, 38, 20,,, +7, 3, 7, 1, 2, 2, 2, 1, 256, 56, 56, 38, 20,,, +7, 3, 7, 1, 2, 2, 2, 1, 384, 56, 56, 38, 20,,, +7, 3, 7, 1, 2, 2, 2, 2, 128, 56, 56, 38, 20,,, +7, 3, 7, 1, 2, 2, 2, 2, 256, 56, 56, 38, 20,,, +7, 3, 7, 1, 2, 2, 2, 2, 384, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 1, 1, 128, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 1, 1, 256, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 1, 1, 384, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 1, 2, 128, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 1, 2, 256, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 1, 2, 384, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 1, 3, 128, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 1, 3, 256, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 1, 3, 384, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 2, 1, 128, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 2, 1, 256, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 2, 1, 384, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 2, 2, 128, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 2, 2, 256, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 2, 2, 384, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 2, 3, 128, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 2, 3, 256, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 2, 3, 384, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 3, 1, 128, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 3, 1, 256, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 3, 1, 384, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 3, 2, 128, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 3, 2, 256, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 3, 2, 384, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 3, 3, 128, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 3, 3, 256, 56, 56, 38, 20,,, +7, 3, 7, 1, 3, 3, 3, 3, 384, 56, 56, 38, 20,,, +7, 3, 7, 2, 1, 1, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 7, 2, 1, 1, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 7, 2, 1, 1, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 7, 2, 2, 2, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 7, 2, 2, 2, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 7, 2, 2, 2, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 7, 2, 2, 2, 1, 2, 128, 110, 110, 74, 38,,, +7, 3, 7, 2, 2, 2, 1, 2, 256, 110, 110, 74, 38,,, +7, 3, 7, 2, 2, 2, 1, 2, 384, 110, 110, 74, 38,,, +7, 3, 7, 2, 2, 2, 2, 1, 128, 110, 110, 74, 38,,, +7, 3, 7, 2, 2, 2, 2, 1, 256, 110, 110, 74, 38,,, +7, 3, 7, 2, 2, 2, 2, 1, 384, 110, 110, 74, 38,,, +7, 3, 7, 2, 2, 2, 2, 2, 128, 110, 110, 74, 38,,, +7, 3, 7, 2, 2, 2, 2, 2, 256, 110, 110, 74, 38,,, +7, 3, 7, 2, 2, 2, 2, 2, 384, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 1, 1, 128, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 1, 1, 256, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 1, 1, 384, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 1, 2, 128, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 1, 2, 256, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 1, 2, 384, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 1, 3, 128, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 1, 3, 256, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 1, 3, 384, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 2, 1, 128, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 2, 1, 256, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 2, 1, 384, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 2, 2, 128, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 2, 2, 256, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 2, 2, 384, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 2, 3, 128, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 2, 3, 256, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 2, 3, 384, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 3, 1, 128, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 3, 1, 256, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 3, 1, 384, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 3, 2, 128, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 3, 2, 256, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 3, 2, 384, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 3, 3, 128, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 3, 3, 256, 110, 110, 74, 38,,, +7, 3, 7, 2, 3, 3, 3, 3, 384, 110, 110, 74, 38,,, +7, 3, 7, 3, 1, 1, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 7, 3, 1, 1, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 7, 3, 1, 1, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 7, 3, 2, 2, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 7, 3, 2, 2, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 7, 3, 2, 2, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 7, 3, 2, 2, 1, 2, 128, 164, 164, 110, 56,,, +7, 3, 7, 3, 2, 2, 1, 2, 256, 164, 164, 110, 56,,, +7, 3, 7, 3, 2, 2, 1, 2, 384, 164, 164, 110, 56,,, +7, 3, 7, 3, 2, 2, 2, 1, 128, 164, 164, 110, 56,,, +7, 3, 7, 3, 2, 2, 2, 1, 256, 164, 164, 110, 56,,, +7, 3, 7, 3, 2, 2, 2, 1, 384, 164, 164, 110, 56,,, +7, 3, 7, 3, 2, 2, 2, 2, 128, 164, 164, 110, 56,,, +7, 3, 7, 3, 2, 2, 2, 2, 256, 164, 164, 110, 56,,, +7, 3, 7, 3, 2, 2, 2, 2, 384, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 1, 1, 128, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 1, 1, 256, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 1, 1, 384, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 1, 2, 128, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 1, 2, 256, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 1, 2, 384, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 1, 3, 128, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 1, 3, 256, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 1, 3, 384, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 2, 1, 128, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 2, 1, 256, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 2, 1, 384, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 2, 2, 128, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 2, 2, 256, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 2, 2, 384, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 2, 3, 128, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 2, 3, 256, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 2, 3, 384, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 3, 1, 128, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 3, 1, 256, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 3, 1, 384, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 3, 2, 128, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 3, 2, 256, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 3, 2, 384, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 3, 3, 128, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 3, 3, 256, 164, 164, 110, 56,,, +7, 3, 7, 3, 3, 3, 3, 3, 384, 164, 164, 110, 56,,, +7, 4, 1, 1, 1, 1, 1, 1, 128, 57, 57, 39,,,, +7, 4, 1, 1, 1, 1, 1, 1, 256, 57, 57, 39,,,, +7, 4, 1, 1, 1, 1, 1, 1, 384, 57, 57, 39,,,, +7, 4, 1, 1, 2, 2, 1, 1, 128, 57, 55, 39,,,, +7, 4, 1, 1, 2, 2, 1, 1, 256, 57, 55, 39,,,, +7, 4, 1, 1, 2, 2, 1, 1, 384, 57, 55, 39,,,, +7, 4, 1, 1, 2, 2, 1, 2, 128, 57, 55, 39,,,, +7, 4, 1, 1, 2, 2, 1, 2, 256, 57, 55, 39,,,, +7, 4, 1, 1, 2, 2, 1, 2, 384, 57, 55, 39,,,, +7, 4, 1, 1, 2, 2, 2, 1, 128, 57, 55, 39,,,, +7, 4, 1, 1, 2, 2, 2, 1, 256, 57, 55, 39,,,, +7, 4, 1, 1, 2, 2, 2, 1, 384, 57, 55, 39,,,, +7, 4, 1, 1, 2, 2, 2, 2, 128, 57, 55, 39,,,, +7, 4, 1, 1, 2, 2, 2, 2, 256, 57, 55, 39,,,, +7, 4, 1, 1, 2, 2, 2, 2, 384, 57, 55, 39,,,, +7, 4, 1, 1, 3, 3, 1, 1, 128, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 1, 1, 256, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 1, 1, 384, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 1, 2, 128, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 1, 2, 256, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 1, 2, 384, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 1, 3, 128, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 1, 3, 256, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 1, 3, 384, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 2, 1, 128, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 2, 1, 256, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 2, 1, 384, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 2, 2, 128, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 2, 2, 256, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 2, 2, 384, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 2, 3, 128, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 2, 3, 256, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 2, 3, 384, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 3, 1, 128, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 3, 1, 256, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 3, 1, 384, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 3, 2, 128, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 3, 2, 256, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 3, 2, 384, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 3, 3, 128, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 3, 3, 256, 57, 54, 39,,,, +7, 4, 1, 1, 3, 3, 3, 3, 384, 57, 54, 39,,,, +7, 4, 1, 2, 1, 1, 1, 1, 128, 111, 111, 75,,,, +7, 4, 1, 2, 1, 1, 1, 1, 256, 111, 111, 75,,,, +7, 4, 1, 2, 1, 1, 1, 1, 384, 111, 111, 75,,,, +7, 4, 1, 2, 2, 2, 1, 1, 128, 111, 107, 75,,,, +7, 4, 1, 2, 2, 2, 1, 1, 256, 111, 107, 75,,,, +7, 4, 1, 2, 2, 2, 1, 1, 384, 111, 107, 75,,,, +7, 4, 1, 2, 2, 2, 1, 2, 128, 111, 107, 75,,,, +7, 4, 1, 2, 2, 2, 1, 2, 256, 111, 107, 75,,,, +7, 4, 1, 2, 2, 2, 1, 2, 384, 111, 107, 75,,,, +7, 4, 1, 2, 2, 2, 2, 1, 128, 111, 107, 75,,,, +7, 4, 1, 2, 2, 2, 2, 1, 256, 111, 107, 75,,,, +7, 4, 1, 2, 2, 2, 2, 1, 384, 111, 107, 75,,,, +7, 4, 1, 2, 2, 2, 2, 2, 128, 111, 107, 75,,,, +7, 4, 1, 2, 2, 2, 2, 2, 256, 111, 107, 75,,,, +7, 4, 1, 2, 2, 2, 2, 2, 384, 111, 107, 75,,,, +7, 4, 1, 2, 3, 3, 1, 1, 128, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 1, 1, 256, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 1, 1, 384, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 1, 2, 128, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 1, 2, 256, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 1, 2, 384, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 1, 3, 128, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 1, 3, 256, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 1, 3, 384, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 2, 1, 128, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 2, 1, 256, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 2, 1, 384, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 2, 2, 128, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 2, 2, 256, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 2, 2, 384, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 2, 3, 128, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 2, 3, 256, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 2, 3, 384, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 3, 1, 128, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 3, 1, 256, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 3, 1, 384, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 3, 2, 128, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 3, 2, 256, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 3, 2, 384, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 3, 3, 128, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 3, 3, 256, 111, 105, 75,,,, +7, 4, 1, 2, 3, 3, 3, 3, 384, 111, 105, 75,,,, +7, 4, 1, 3, 1, 1, 1, 1, 128, 165, 165, 111,,,, +7, 4, 1, 3, 1, 1, 1, 1, 256, 165, 165, 111,,,, +7, 4, 1, 3, 1, 1, 1, 1, 384, 165, 165, 111,,,, +7, 4, 1, 3, 2, 2, 1, 1, 128, 165, 159, 111,,,, +7, 4, 1, 3, 2, 2, 1, 1, 256, 165, 159, 111,,,, +7, 4, 1, 3, 2, 2, 1, 1, 384, 165, 159, 111,,,, +7, 4, 1, 3, 2, 2, 1, 2, 128, 165, 159, 111,,,, +7, 4, 1, 3, 2, 2, 1, 2, 256, 165, 159, 111,,,, +7, 4, 1, 3, 2, 2, 1, 2, 384, 165, 159, 111,,,, +7, 4, 1, 3, 2, 2, 2, 1, 128, 165, 159, 111,,,, +7, 4, 1, 3, 2, 2, 2, 1, 256, 165, 159, 111,,,, +7, 4, 1, 3, 2, 2, 2, 1, 384, 165, 159, 111,,,, +7, 4, 1, 3, 2, 2, 2, 2, 128, 165, 159, 111,,,, +7, 4, 1, 3, 2, 2, 2, 2, 256, 165, 159, 111,,,, +7, 4, 1, 3, 2, 2, 2, 2, 384, 165, 159, 111,,,, +7, 4, 1, 3, 3, 3, 1, 1, 128, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 1, 1, 256, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 1, 1, 384, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 1, 2, 128, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 1, 2, 256, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 1, 2, 384, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 1, 3, 128, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 1, 3, 256, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 1, 3, 384, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 2, 1, 128, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 2, 1, 256, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 2, 1, 384, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 2, 2, 128, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 2, 2, 256, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 2, 2, 384, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 2, 3, 128, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 2, 3, 256, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 2, 3, 384, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 3, 1, 128, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 3, 1, 256, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 3, 1, 384, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 3, 2, 128, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 3, 2, 256, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 3, 2, 384, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 3, 3, 128, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 3, 3, 256, 165, 156, 111,,,, +7, 4, 1, 3, 3, 3, 3, 3, 384, 165, 156, 111,,,, +7, 4, 1, 4, 1, 1, 1, 1, 128, 219, 219, 147,,,, +7, 4, 1, 4, 1, 1, 1, 1, 256, 219, 219, 147,,,, +7, 4, 1, 4, 1, 1, 1, 1, 384, 219, 219, 147,,,, +7, 4, 1, 4, 2, 2, 1, 1, 128, 219, 211, 147,,,, +7, 4, 1, 4, 2, 2, 1, 1, 256, 219, 211, 147,,,, +7, 4, 1, 4, 2, 2, 1, 1, 384, 219, 211, 147,,,, +7, 4, 1, 4, 2, 2, 1, 2, 128, 219, 211, 147,,,, +7, 4, 1, 4, 2, 2, 1, 2, 256, 219, 211, 147,,,, +7, 4, 1, 4, 2, 2, 1, 2, 384, 219, 211, 147,,,, +7, 4, 1, 4, 2, 2, 2, 1, 128, 219, 211, 147,,,, +7, 4, 1, 4, 2, 2, 2, 1, 256, 219, 211, 147,,,, +7, 4, 1, 4, 2, 2, 2, 1, 384, 219, 211, 147,,,, +7, 4, 1, 4, 2, 2, 2, 2, 128, 219, 211, 147,,,, +7, 4, 1, 4, 2, 2, 2, 2, 256, 219, 211, 147,,,, +7, 4, 1, 4, 2, 2, 2, 2, 384, 219, 211, 147,,,, +7, 4, 1, 4, 3, 3, 1, 1, 128, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 1, 1, 256, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 1, 1, 384, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 1, 2, 128, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 1, 2, 256, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 1, 2, 384, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 1, 3, 128, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 1, 3, 256, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 1, 3, 384, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 2, 1, 128, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 2, 1, 256, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 2, 1, 384, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 2, 2, 128, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 2, 2, 256, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 2, 2, 384, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 2, 3, 128, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 2, 3, 256, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 2, 3, 384, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 3, 1, 128, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 3, 1, 256, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 3, 1, 384, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 3, 2, 128, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 3, 2, 256, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 3, 2, 384, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 3, 3, 128, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 3, 3, 256, 219, 207, 147,,,, +7, 4, 1, 4, 3, 3, 3, 3, 384, 219, 207, 147,,,, +7, 4, 2, 1, 1, 1, 1, 1, 128, 57, 57, 39,,,, +7, 4, 2, 1, 1, 1, 1, 1, 256, 57, 57, 39,,,, +7, 4, 2, 1, 1, 1, 1, 1, 384, 57, 57, 39,,,, +7, 4, 2, 1, 2, 2, 1, 1, 128, 57, 55, 39,,,, +7, 4, 2, 1, 2, 2, 1, 1, 256, 57, 55, 39,,,, +7, 4, 2, 1, 2, 2, 1, 1, 384, 57, 55, 39,,,, +7, 4, 2, 1, 2, 2, 1, 2, 128, 57, 55, 39,,,, +7, 4, 2, 1, 2, 2, 1, 2, 256, 57, 55, 39,,,, +7, 4, 2, 1, 2, 2, 1, 2, 384, 57, 55, 39,,,, +7, 4, 2, 1, 2, 2, 2, 1, 128, 57, 55, 39,,,, +7, 4, 2, 1, 2, 2, 2, 1, 256, 57, 55, 39,,,, +7, 4, 2, 1, 2, 2, 2, 1, 384, 57, 55, 39,,,, +7, 4, 2, 1, 2, 2, 2, 2, 128, 57, 55, 39,,,, +7, 4, 2, 1, 2, 2, 2, 2, 256, 57, 55, 39,,,, +7, 4, 2, 1, 2, 2, 2, 2, 384, 57, 55, 39,,,, +7, 4, 2, 1, 3, 3, 1, 1, 128, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 1, 1, 256, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 1, 1, 384, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 1, 2, 128, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 1, 2, 256, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 1, 2, 384, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 1, 3, 128, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 1, 3, 256, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 1, 3, 384, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 2, 1, 128, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 2, 1, 256, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 2, 1, 384, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 2, 2, 128, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 2, 2, 256, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 2, 2, 384, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 2, 3, 128, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 2, 3, 256, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 2, 3, 384, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 3, 1, 128, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 3, 1, 256, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 3, 1, 384, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 3, 2, 128, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 3, 2, 256, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 3, 2, 384, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 3, 3, 128, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 3, 3, 256, 57, 54, 39,,,, +7, 4, 2, 1, 3, 3, 3, 3, 384, 57, 54, 39,,,, +7, 4, 2, 2, 1, 1, 1, 1, 128, 111, 111, 75,,,, +7, 4, 2, 2, 1, 1, 1, 1, 256, 111, 111, 75,,,, +7, 4, 2, 2, 1, 1, 1, 1, 384, 111, 111, 75,,,, +7, 4, 2, 2, 2, 2, 1, 1, 128, 111, 107, 75,,,, +7, 4, 2, 2, 2, 2, 1, 1, 256, 111, 107, 75,,,, +7, 4, 2, 2, 2, 2, 1, 1, 384, 111, 107, 75,,,, +7, 4, 2, 2, 2, 2, 1, 2, 128, 111, 107, 75,,,, +7, 4, 2, 2, 2, 2, 1, 2, 256, 111, 107, 75,,,, +7, 4, 2, 2, 2, 2, 1, 2, 384, 111, 107, 75,,,, +7, 4, 2, 2, 2, 2, 2, 1, 128, 111, 107, 75,,,, +7, 4, 2, 2, 2, 2, 2, 1, 256, 111, 107, 75,,,, +7, 4, 2, 2, 2, 2, 2, 1, 384, 111, 107, 75,,,, +7, 4, 2, 2, 2, 2, 2, 2, 128, 111, 107, 75,,,, +7, 4, 2, 2, 2, 2, 2, 2, 256, 111, 107, 75,,,, +7, 4, 2, 2, 2, 2, 2, 2, 384, 111, 107, 75,,,, +7, 4, 2, 2, 3, 3, 1, 1, 128, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 1, 1, 256, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 1, 1, 384, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 1, 2, 128, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 1, 2, 256, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 1, 2, 384, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 1, 3, 128, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 1, 3, 256, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 1, 3, 384, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 2, 1, 128, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 2, 1, 256, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 2, 1, 384, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 2, 2, 128, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 2, 2, 256, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 2, 2, 384, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 2, 3, 128, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 2, 3, 256, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 2, 3, 384, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 3, 1, 128, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 3, 1, 256, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 3, 1, 384, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 3, 2, 128, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 3, 2, 256, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 3, 2, 384, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 3, 3, 128, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 3, 3, 256, 111, 105, 75,,,, +7, 4, 2, 2, 3, 3, 3, 3, 384, 111, 105, 75,,,, +7, 4, 2, 3, 1, 1, 1, 1, 128, 165, 165, 111,,,, +7, 4, 2, 3, 1, 1, 1, 1, 256, 165, 165, 111,,,, +7, 4, 2, 3, 1, 1, 1, 1, 384, 165, 165, 111,,,, +7, 4, 2, 3, 2, 2, 1, 1, 128, 165, 159, 111,,,, +7, 4, 2, 3, 2, 2, 1, 1, 256, 165, 159, 111,,,, +7, 4, 2, 3, 2, 2, 1, 1, 384, 165, 159, 111,,,, +7, 4, 2, 3, 2, 2, 1, 2, 128, 165, 159, 111,,,, +7, 4, 2, 3, 2, 2, 1, 2, 256, 165, 159, 111,,,, +7, 4, 2, 3, 2, 2, 1, 2, 384, 165, 159, 111,,,, +7, 4, 2, 3, 2, 2, 2, 1, 128, 165, 159, 111,,,, +7, 4, 2, 3, 2, 2, 2, 1, 256, 165, 159, 111,,,, +7, 4, 2, 3, 2, 2, 2, 1, 384, 165, 159, 111,,,, +7, 4, 2, 3, 2, 2, 2, 2, 128, 165, 159, 111,,,, +7, 4, 2, 3, 2, 2, 2, 2, 256, 165, 159, 111,,,, +7, 4, 2, 3, 2, 2, 2, 2, 384, 165, 159, 111,,,, +7, 4, 2, 3, 3, 3, 1, 1, 128, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 1, 1, 256, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 1, 1, 384, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 1, 2, 128, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 1, 2, 256, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 1, 2, 384, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 1, 3, 128, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 1, 3, 256, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 1, 3, 384, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 2, 1, 128, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 2, 1, 256, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 2, 1, 384, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 2, 2, 128, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 2, 2, 256, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 2, 2, 384, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 2, 3, 128, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 2, 3, 256, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 2, 3, 384, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 3, 1, 128, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 3, 1, 256, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 3, 1, 384, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 3, 2, 128, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 3, 2, 256, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 3, 2, 384, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 3, 3, 128, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 3, 3, 256, 165, 156, 111,,,, +7, 4, 2, 3, 3, 3, 3, 3, 384, 165, 156, 111,,,, +7, 4, 2, 4, 1, 1, 1, 1, 128, 219, 219, 147,,,, +7, 4, 2, 4, 1, 1, 1, 1, 256, 219, 219, 147,,,, +7, 4, 2, 4, 1, 1, 1, 1, 384, 219, 219, 147,,,, +7, 4, 2, 4, 2, 2, 1, 1, 128, 219, 211, 147,,,, +7, 4, 2, 4, 2, 2, 1, 1, 256, 219, 211, 147,,,, +7, 4, 2, 4, 2, 2, 1, 1, 384, 219, 211, 147,,,, +7, 4, 2, 4, 2, 2, 1, 2, 128, 219, 211, 147,,,, +7, 4, 2, 4, 2, 2, 1, 2, 256, 219, 211, 147,,,, +7, 4, 2, 4, 2, 2, 1, 2, 384, 219, 211, 147,,,, +7, 4, 2, 4, 2, 2, 2, 1, 128, 219, 211, 147,,,, +7, 4, 2, 4, 2, 2, 2, 1, 256, 219, 211, 147,,,, +7, 4, 2, 4, 2, 2, 2, 1, 384, 219, 211, 147,,,, +7, 4, 2, 4, 2, 2, 2, 2, 128, 219, 211, 147,,,, +7, 4, 2, 4, 2, 2, 2, 2, 256, 219, 211, 147,,,, +7, 4, 2, 4, 2, 2, 2, 2, 384, 219, 211, 147,,,, +7, 4, 2, 4, 3, 3, 1, 1, 128, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 1, 1, 256, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 1, 1, 384, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 1, 2, 128, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 1, 2, 256, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 1, 2, 384, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 1, 3, 128, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 1, 3, 256, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 1, 3, 384, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 2, 1, 128, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 2, 1, 256, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 2, 1, 384, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 2, 2, 128, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 2, 2, 256, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 2, 2, 384, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 2, 3, 128, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 2, 3, 256, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 2, 3, 384, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 3, 1, 128, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 3, 1, 256, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 3, 1, 384, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 3, 2, 128, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 3, 2, 256, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 3, 2, 384, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 3, 3, 128, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 3, 3, 256, 219, 207, 147,,,, +7, 4, 2, 4, 3, 3, 3, 3, 384, 219, 207, 147,,,, +7, 4, 3, 1, 1, 1, 1, 1, 128, 57, 57, 39,,,, +7, 4, 3, 1, 1, 1, 1, 1, 256, 57, 57, 39,,,, +7, 4, 3, 1, 1, 1, 1, 1, 384, 57, 57, 39,,,, +7, 4, 3, 1, 2, 2, 1, 1, 128, 57, 55, 39,,,, +7, 4, 3, 1, 2, 2, 1, 1, 256, 57, 55, 39,,,, +7, 4, 3, 1, 2, 2, 1, 1, 384, 57, 55, 39,,,, +7, 4, 3, 1, 2, 2, 1, 2, 128, 57, 55, 39,,,, +7, 4, 3, 1, 2, 2, 1, 2, 256, 57, 55, 39,,,, +7, 4, 3, 1, 2, 2, 1, 2, 384, 57, 55, 39,,,, +7, 4, 3, 1, 2, 2, 2, 1, 128, 57, 55, 39,,,, +7, 4, 3, 1, 2, 2, 2, 1, 256, 57, 55, 39,,,, +7, 4, 3, 1, 2, 2, 2, 1, 384, 57, 55, 39,,,, +7, 4, 3, 1, 2, 2, 2, 2, 128, 57, 55, 39,,,, +7, 4, 3, 1, 2, 2, 2, 2, 256, 57, 55, 39,,,, +7, 4, 3, 1, 2, 2, 2, 2, 384, 57, 55, 39,,,, +7, 4, 3, 1, 3, 3, 1, 1, 128, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 1, 1, 256, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 1, 1, 384, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 1, 2, 128, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 1, 2, 256, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 1, 2, 384, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 1, 3, 128, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 1, 3, 256, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 1, 3, 384, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 2, 1, 128, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 2, 1, 256, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 2, 1, 384, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 2, 2, 128, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 2, 2, 256, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 2, 2, 384, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 2, 3, 128, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 2, 3, 256, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 2, 3, 384, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 3, 1, 128, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 3, 1, 256, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 3, 1, 384, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 3, 2, 128, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 3, 2, 256, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 3, 2, 384, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 3, 3, 128, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 3, 3, 256, 57, 54, 39,,,, +7, 4, 3, 1, 3, 3, 3, 3, 384, 57, 54, 39,,,, +7, 4, 3, 2, 1, 1, 1, 1, 128, 111, 111, 75,,,, +7, 4, 3, 2, 1, 1, 1, 1, 256, 111, 111, 75,,,, +7, 4, 3, 2, 1, 1, 1, 1, 384, 111, 111, 75,,,, +7, 4, 3, 2, 2, 2, 1, 1, 128, 111, 107, 75,,,, +7, 4, 3, 2, 2, 2, 1, 1, 256, 111, 107, 75,,,, +7, 4, 3, 2, 2, 2, 1, 1, 384, 111, 107, 75,,,, +7, 4, 3, 2, 2, 2, 1, 2, 128, 111, 107, 75,,,, +7, 4, 3, 2, 2, 2, 1, 2, 256, 111, 107, 75,,,, +7, 4, 3, 2, 2, 2, 1, 2, 384, 111, 107, 75,,,, +7, 4, 3, 2, 2, 2, 2, 1, 128, 111, 107, 75,,,, +7, 4, 3, 2, 2, 2, 2, 1, 256, 111, 107, 75,,,, +7, 4, 3, 2, 2, 2, 2, 1, 384, 111, 107, 75,,,, +7, 4, 3, 2, 2, 2, 2, 2, 128, 111, 107, 75,,,, +7, 4, 3, 2, 2, 2, 2, 2, 256, 111, 107, 75,,,, +7, 4, 3, 2, 2, 2, 2, 2, 384, 111, 107, 75,,,, +7, 4, 3, 2, 3, 3, 1, 1, 128, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 1, 1, 256, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 1, 1, 384, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 1, 2, 128, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 1, 2, 256, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 1, 2, 384, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 1, 3, 128, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 1, 3, 256, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 1, 3, 384, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 2, 1, 128, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 2, 1, 256, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 2, 1, 384, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 2, 2, 128, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 2, 2, 256, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 2, 2, 384, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 2, 3, 128, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 2, 3, 256, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 2, 3, 384, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 3, 1, 128, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 3, 1, 256, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 3, 1, 384, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 3, 2, 128, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 3, 2, 256, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 3, 2, 384, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 3, 3, 128, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 3, 3, 256, 111, 105, 75,,,, +7, 4, 3, 2, 3, 3, 3, 3, 384, 111, 105, 75,,,, +7, 4, 3, 3, 1, 1, 1, 1, 128, 165, 165, 111,,,, +7, 4, 3, 3, 1, 1, 1, 1, 256, 165, 165, 111,,,, +7, 4, 3, 3, 1, 1, 1, 1, 384, 165, 165, 111,,,, +7, 4, 3, 3, 2, 2, 1, 1, 128, 165, 159, 111,,,, +7, 4, 3, 3, 2, 2, 1, 1, 256, 165, 159, 111,,,, +7, 4, 3, 3, 2, 2, 1, 1, 384, 165, 159, 111,,,, +7, 4, 3, 3, 2, 2, 1, 2, 128, 165, 159, 111,,,, +7, 4, 3, 3, 2, 2, 1, 2, 256, 165, 159, 111,,,, +7, 4, 3, 3, 2, 2, 1, 2, 384, 165, 159, 111,,,, +7, 4, 3, 3, 2, 2, 2, 1, 128, 165, 159, 111,,,, +7, 4, 3, 3, 2, 2, 2, 1, 256, 165, 159, 111,,,, +7, 4, 3, 3, 2, 2, 2, 1, 384, 165, 159, 111,,,, +7, 4, 3, 3, 2, 2, 2, 2, 128, 165, 159, 111,,,, +7, 4, 3, 3, 2, 2, 2, 2, 256, 165, 159, 111,,,, +7, 4, 3, 3, 2, 2, 2, 2, 384, 165, 159, 111,,,, +7, 4, 3, 3, 3, 3, 1, 1, 128, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 1, 1, 256, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 1, 1, 384, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 1, 2, 128, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 1, 2, 256, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 1, 2, 384, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 1, 3, 128, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 1, 3, 256, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 1, 3, 384, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 2, 1, 128, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 2, 1, 256, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 2, 1, 384, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 2, 2, 128, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 2, 2, 256, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 2, 2, 384, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 2, 3, 128, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 2, 3, 256, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 2, 3, 384, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 3, 1, 128, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 3, 1, 256, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 3, 1, 384, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 3, 2, 128, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 3, 2, 256, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 3, 2, 384, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 3, 3, 128, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 3, 3, 256, 165, 156, 111,,,, +7, 4, 3, 3, 3, 3, 3, 3, 384, 165, 156, 111,,,, +7, 4, 3, 4, 1, 1, 1, 1, 128, 219, 219, 147,,,, +7, 4, 3, 4, 1, 1, 1, 1, 256, 219, 219, 147,,,, +7, 4, 3, 4, 1, 1, 1, 1, 384, 219, 219, 147,,,, +7, 4, 3, 4, 2, 2, 1, 1, 128, 219, 211, 147,,,, +7, 4, 3, 4, 2, 2, 1, 1, 256, 219, 211, 147,,,, +7, 4, 3, 4, 2, 2, 1, 1, 384, 219, 211, 147,,,, +7, 4, 3, 4, 2, 2, 1, 2, 128, 219, 211, 147,,,, +7, 4, 3, 4, 2, 2, 1, 2, 256, 219, 211, 147,,,, +7, 4, 3, 4, 2, 2, 1, 2, 384, 219, 211, 147,,,, +7, 4, 3, 4, 2, 2, 2, 1, 128, 219, 211, 147,,,, +7, 4, 3, 4, 2, 2, 2, 1, 256, 219, 211, 147,,,, +7, 4, 3, 4, 2, 2, 2, 1, 384, 219, 211, 147,,,, +7, 4, 3, 4, 2, 2, 2, 2, 128, 219, 211, 147,,,, +7, 4, 3, 4, 2, 2, 2, 2, 256, 219, 211, 147,,,, +7, 4, 3, 4, 2, 2, 2, 2, 384, 219, 211, 147,,,, +7, 4, 3, 4, 3, 3, 1, 1, 128, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 1, 1, 256, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 1, 1, 384, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 1, 2, 128, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 1, 2, 256, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 1, 2, 384, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 1, 3, 128, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 1, 3, 256, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 1, 3, 384, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 2, 1, 128, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 2, 1, 256, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 2, 1, 384, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 2, 2, 128, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 2, 2, 256, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 2, 2, 384, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 2, 3, 128, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 2, 3, 256, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 2, 3, 384, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 3, 1, 128, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 3, 1, 256, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 3, 1, 384, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 3, 2, 128, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 3, 2, 256, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 3, 2, 384, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 3, 3, 128, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 3, 3, 256, 219, 207, 147,,,, +7, 4, 3, 4, 3, 3, 3, 3, 384, 219, 207, 147,,,, +7, 4, 4, 1, 1, 1, 1, 1, 128, 57, 57, 39,,,, +7, 4, 4, 1, 1, 1, 1, 1, 256, 57, 57, 39,,,, +7, 4, 4, 1, 1, 1, 1, 1, 384, 57, 57, 39,,,, +7, 4, 4, 1, 2, 2, 1, 1, 128, 57, 55, 39,,,, +7, 4, 4, 1, 2, 2, 1, 1, 256, 57, 55, 39,,,, +7, 4, 4, 1, 2, 2, 1, 1, 384, 57, 55, 39,,,, +7, 4, 4, 1, 2, 2, 1, 2, 128, 57, 55, 39,,,, +7, 4, 4, 1, 2, 2, 1, 2, 256, 57, 55, 39,,,, +7, 4, 4, 1, 2, 2, 1, 2, 384, 57, 55, 39,,,, +7, 4, 4, 1, 2, 2, 2, 1, 128, 57, 55, 39,,,, +7, 4, 4, 1, 2, 2, 2, 1, 256, 57, 55, 39,,,, +7, 4, 4, 1, 2, 2, 2, 1, 384, 57, 55, 39,,,, +7, 4, 4, 1, 2, 2, 2, 2, 128, 57, 55, 39,,,, +7, 4, 4, 1, 2, 2, 2, 2, 256, 57, 55, 39,,,, +7, 4, 4, 1, 2, 2, 2, 2, 384, 57, 55, 39,,,, +7, 4, 4, 1, 3, 3, 1, 1, 128, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 1, 1, 256, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 1, 1, 384, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 1, 2, 128, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 1, 2, 256, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 1, 2, 384, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 1, 3, 128, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 1, 3, 256, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 1, 3, 384, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 2, 1, 128, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 2, 1, 256, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 2, 1, 384, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 2, 2, 128, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 2, 2, 256, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 2, 2, 384, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 2, 3, 128, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 2, 3, 256, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 2, 3, 384, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 3, 1, 128, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 3, 1, 256, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 3, 1, 384, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 3, 2, 128, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 3, 2, 256, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 3, 2, 384, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 3, 3, 128, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 3, 3, 256, 57, 54, 39,,,, +7, 4, 4, 1, 3, 3, 3, 3, 384, 57, 54, 39,,,, +7, 4, 4, 2, 1, 1, 1, 1, 128, 111, 111, 75,,,, +7, 4, 4, 2, 1, 1, 1, 1, 256, 111, 111, 75,,,, +7, 4, 4, 2, 1, 1, 1, 1, 384, 111, 111, 75,,,, +7, 4, 4, 2, 2, 2, 1, 1, 128, 111, 107, 75,,,, +7, 4, 4, 2, 2, 2, 1, 1, 256, 111, 107, 75,,,, +7, 4, 4, 2, 2, 2, 1, 1, 384, 111, 107, 75,,,, +7, 4, 4, 2, 2, 2, 1, 2, 128, 111, 107, 75,,,, +7, 4, 4, 2, 2, 2, 1, 2, 256, 111, 107, 75,,,, +7, 4, 4, 2, 2, 2, 1, 2, 384, 111, 107, 75,,,, +7, 4, 4, 2, 2, 2, 2, 1, 128, 111, 107, 75,,,, +7, 4, 4, 2, 2, 2, 2, 1, 256, 111, 107, 75,,,, +7, 4, 4, 2, 2, 2, 2, 1, 384, 111, 107, 75,,,, +7, 4, 4, 2, 2, 2, 2, 2, 128, 111, 107, 75,,,, +7, 4, 4, 2, 2, 2, 2, 2, 256, 111, 107, 75,,,, +7, 4, 4, 2, 2, 2, 2, 2, 384, 111, 107, 75,,,, +7, 4, 4, 2, 3, 3, 1, 1, 128, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 1, 1, 256, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 1, 1, 384, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 1, 2, 128, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 1, 2, 256, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 1, 2, 384, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 1, 3, 128, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 1, 3, 256, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 1, 3, 384, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 2, 1, 128, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 2, 1, 256, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 2, 1, 384, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 2, 2, 128, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 2, 2, 256, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 2, 2, 384, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 2, 3, 128, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 2, 3, 256, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 2, 3, 384, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 3, 1, 128, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 3, 1, 256, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 3, 1, 384, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 3, 2, 128, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 3, 2, 256, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 3, 2, 384, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 3, 3, 128, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 3, 3, 256, 111, 105, 75,,,, +7, 4, 4, 2, 3, 3, 3, 3, 384, 111, 105, 75,,,, +7, 4, 4, 3, 1, 1, 1, 1, 128, 165, 165, 111,,,, +7, 4, 4, 3, 1, 1, 1, 1, 256, 165, 165, 111,,,, +7, 4, 4, 3, 1, 1, 1, 1, 384, 165, 165, 111,,,, +7, 4, 4, 3, 2, 2, 1, 1, 128, 165, 159, 111,,,, +7, 4, 4, 3, 2, 2, 1, 1, 256, 165, 159, 111,,,, +7, 4, 4, 3, 2, 2, 1, 1, 384, 165, 159, 111,,,, +7, 4, 4, 3, 2, 2, 1, 2, 128, 165, 159, 111,,,, +7, 4, 4, 3, 2, 2, 1, 2, 256, 165, 159, 111,,,, +7, 4, 4, 3, 2, 2, 1, 2, 384, 165, 159, 111,,,, +7, 4, 4, 3, 2, 2, 2, 1, 128, 165, 159, 111,,,, +7, 4, 4, 3, 2, 2, 2, 1, 256, 165, 159, 111,,,, +7, 4, 4, 3, 2, 2, 2, 1, 384, 165, 159, 111,,,, +7, 4, 4, 3, 2, 2, 2, 2, 128, 165, 159, 111,,,, +7, 4, 4, 3, 2, 2, 2, 2, 256, 165, 159, 111,,,, +7, 4, 4, 3, 2, 2, 2, 2, 384, 165, 159, 111,,,, +7, 4, 4, 3, 3, 3, 1, 1, 128, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 1, 1, 256, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 1, 1, 384, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 1, 2, 128, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 1, 2, 256, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 1, 2, 384, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 1, 3, 128, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 1, 3, 256, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 1, 3, 384, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 2, 1, 128, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 2, 1, 256, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 2, 1, 384, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 2, 2, 128, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 2, 2, 256, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 2, 2, 384, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 2, 3, 128, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 2, 3, 256, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 2, 3, 384, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 3, 1, 128, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 3, 1, 256, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 3, 1, 384, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 3, 2, 128, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 3, 2, 256, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 3, 2, 384, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 3, 3, 128, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 3, 3, 256, 165, 156, 111,,,, +7, 4, 4, 3, 3, 3, 3, 3, 384, 165, 156, 111,,,, +7, 4, 4, 4, 1, 1, 1, 1, 128, 219, 219, 147,,,, +7, 4, 4, 4, 1, 1, 1, 1, 256, 219, 219, 147,,,, +7, 4, 4, 4, 1, 1, 1, 1, 384, 219, 219, 147,,,, +7, 4, 4, 4, 2, 2, 1, 1, 128, 219, 211, 147,,,, +7, 4, 4, 4, 2, 2, 1, 1, 256, 219, 211, 147,,,, +7, 4, 4, 4, 2, 2, 1, 1, 384, 219, 211, 147,,,, +7, 4, 4, 4, 2, 2, 1, 2, 128, 219, 211, 147,,,, +7, 4, 4, 4, 2, 2, 1, 2, 256, 219, 211, 147,,,, +7, 4, 4, 4, 2, 2, 1, 2, 384, 219, 211, 147,,,, +7, 4, 4, 4, 2, 2, 2, 1, 128, 219, 211, 147,,,, +7, 4, 4, 4, 2, 2, 2, 1, 256, 219, 211, 147,,,, +7, 4, 4, 4, 2, 2, 2, 1, 384, 219, 211, 147,,,, +7, 4, 4, 4, 2, 2, 2, 2, 128, 219, 211, 147,,,, +7, 4, 4, 4, 2, 2, 2, 2, 256, 219, 211, 147,,,, +7, 4, 4, 4, 2, 2, 2, 2, 384, 219, 211, 147,,,, +7, 4, 4, 4, 3, 3, 1, 1, 128, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 1, 1, 256, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 1, 1, 384, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 1, 2, 128, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 1, 2, 256, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 1, 2, 384, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 1, 3, 128, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 1, 3, 256, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 1, 3, 384, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 2, 1, 128, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 2, 1, 256, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 2, 1, 384, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 2, 2, 128, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 2, 2, 256, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 2, 2, 384, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 2, 3, 128, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 2, 3, 256, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 2, 3, 384, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 3, 1, 128, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 3, 1, 256, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 3, 1, 384, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 3, 2, 128, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 3, 2, 256, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 3, 2, 384, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 3, 3, 128, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 3, 3, 256, 219, 207, 147,,,, +7, 4, 4, 4, 3, 3, 3, 3, 384, 219, 207, 147,,,, +7, 4, 5, 1, 1, 1, 1, 1, 128, 57, 57, 39,,,, +7, 4, 5, 1, 1, 1, 1, 1, 256, 57, 57, 39,,,, +7, 4, 5, 1, 1, 1, 1, 1, 384, 57, 57, 39,,,, +7, 4, 5, 1, 2, 2, 1, 1, 128, 57, 55, 39,,,, +7, 4, 5, 1, 2, 2, 1, 1, 256, 57, 55, 39,,,, +7, 4, 5, 1, 2, 2, 1, 1, 384, 57, 55, 39,,,, +7, 4, 5, 1, 2, 2, 1, 2, 128, 57, 55, 39,,,, +7, 4, 5, 1, 2, 2, 1, 2, 256, 57, 55, 39,,,, +7, 4, 5, 1, 2, 2, 1, 2, 384, 57, 55, 39,,,, +7, 4, 5, 1, 2, 2, 2, 1, 128, 57, 55, 39,,,, +7, 4, 5, 1, 2, 2, 2, 1, 256, 57, 55, 39,,,, +7, 4, 5, 1, 2, 2, 2, 1, 384, 57, 55, 39,,,, +7, 4, 5, 1, 2, 2, 2, 2, 128, 57, 55, 39,,,, +7, 4, 5, 1, 2, 2, 2, 2, 256, 57, 55, 39,,,, +7, 4, 5, 1, 2, 2, 2, 2, 384, 57, 55, 39,,,, +7, 4, 5, 1, 3, 3, 1, 1, 128, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 1, 1, 256, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 1, 1, 384, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 1, 2, 128, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 1, 2, 256, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 1, 2, 384, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 1, 3, 128, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 1, 3, 256, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 1, 3, 384, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 2, 1, 128, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 2, 1, 256, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 2, 1, 384, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 2, 2, 128, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 2, 2, 256, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 2, 2, 384, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 2, 3, 128, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 2, 3, 256, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 2, 3, 384, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 3, 1, 128, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 3, 1, 256, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 3, 1, 384, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 3, 2, 128, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 3, 2, 256, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 3, 2, 384, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 3, 3, 128, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 3, 3, 256, 57, 54, 39,,,, +7, 4, 5, 1, 3, 3, 3, 3, 384, 57, 54, 39,,,, +7, 4, 5, 2, 1, 1, 1, 1, 128, 111, 111, 75,,,, +7, 4, 5, 2, 1, 1, 1, 1, 256, 111, 111, 75,,,, +7, 4, 5, 2, 1, 1, 1, 1, 384, 111, 111, 75,,,, +7, 4, 5, 2, 2, 2, 1, 1, 128, 111, 107, 75,,,, +7, 4, 5, 2, 2, 2, 1, 1, 256, 111, 107, 75,,,, +7, 4, 5, 2, 2, 2, 1, 1, 384, 111, 107, 75,,,, +7, 4, 5, 2, 2, 2, 1, 2, 128, 111, 107, 75,,,, +7, 4, 5, 2, 2, 2, 1, 2, 256, 111, 107, 75,,,, +7, 4, 5, 2, 2, 2, 1, 2, 384, 111, 107, 75,,,, +7, 4, 5, 2, 2, 2, 2, 1, 128, 111, 107, 75,,,, +7, 4, 5, 2, 2, 2, 2, 1, 256, 111, 107, 75,,,, +7, 4, 5, 2, 2, 2, 2, 1, 384, 111, 107, 75,,,, +7, 4, 5, 2, 2, 2, 2, 2, 128, 111, 107, 75,,,, +7, 4, 5, 2, 2, 2, 2, 2, 256, 111, 107, 75,,,, +7, 4, 5, 2, 2, 2, 2, 2, 384, 111, 107, 75,,,, +7, 4, 5, 2, 3, 3, 1, 1, 128, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 1, 1, 256, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 1, 1, 384, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 1, 2, 128, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 1, 2, 256, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 1, 2, 384, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 1, 3, 128, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 1, 3, 256, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 1, 3, 384, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 2, 1, 128, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 2, 1, 256, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 2, 1, 384, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 2, 2, 128, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 2, 2, 256, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 2, 2, 384, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 2, 3, 128, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 2, 3, 256, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 2, 3, 384, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 3, 1, 128, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 3, 1, 256, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 3, 1, 384, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 3, 2, 128, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 3, 2, 256, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 3, 2, 384, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 3, 3, 128, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 3, 3, 256, 111, 105, 75,,,, +7, 4, 5, 2, 3, 3, 3, 3, 384, 111, 105, 75,,,, +7, 4, 5, 3, 1, 1, 1, 1, 128, 165, 165, 111,,,, +7, 4, 5, 3, 1, 1, 1, 1, 256, 165, 165, 111,,,, +7, 4, 5, 3, 1, 1, 1, 1, 384, 165, 165, 111,,,, +7, 4, 5, 3, 2, 2, 1, 1, 128, 165, 159, 111,,,, +7, 4, 5, 3, 2, 2, 1, 1, 256, 165, 159, 111,,,, +7, 4, 5, 3, 2, 2, 1, 1, 384, 165, 159, 111,,,, +7, 4, 5, 3, 2, 2, 1, 2, 128, 165, 159, 111,,,, +7, 4, 5, 3, 2, 2, 1, 2, 256, 165, 159, 111,,,, +7, 4, 5, 3, 2, 2, 1, 2, 384, 165, 159, 111,,,, +7, 4, 5, 3, 2, 2, 2, 1, 128, 165, 159, 111,,,, +7, 4, 5, 3, 2, 2, 2, 1, 256, 165, 159, 111,,,, +7, 4, 5, 3, 2, 2, 2, 1, 384, 165, 159, 111,,,, +7, 4, 5, 3, 2, 2, 2, 2, 128, 165, 159, 111,,,, +7, 4, 5, 3, 2, 2, 2, 2, 256, 165, 159, 111,,,, +7, 4, 5, 3, 2, 2, 2, 2, 384, 165, 159, 111,,,, +7, 4, 5, 3, 3, 3, 1, 1, 128, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 1, 1, 256, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 1, 1, 384, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 1, 2, 128, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 1, 2, 256, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 1, 2, 384, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 1, 3, 128, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 1, 3, 256, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 1, 3, 384, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 2, 1, 128, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 2, 1, 256, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 2, 1, 384, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 2, 2, 128, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 2, 2, 256, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 2, 2, 384, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 2, 3, 128, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 2, 3, 256, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 2, 3, 384, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 3, 1, 128, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 3, 1, 256, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 3, 1, 384, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 3, 2, 128, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 3, 2, 256, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 3, 2, 384, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 3, 3, 128, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 3, 3, 256, 165, 156, 111,,,, +7, 4, 5, 3, 3, 3, 3, 3, 384, 165, 156, 111,,,, +7, 4, 5, 4, 1, 1, 1, 1, 128, 219, 219, 147,,,, +7, 4, 5, 4, 1, 1, 1, 1, 256, 219, 219, 147,,,, +7, 4, 5, 4, 1, 1, 1, 1, 384, 219, 219, 147,,,, +7, 4, 5, 4, 2, 2, 1, 1, 128, 219, 211, 147,,,, +7, 4, 5, 4, 2, 2, 1, 1, 256, 219, 211, 147,,,, +7, 4, 5, 4, 2, 2, 1, 1, 384, 219, 211, 147,,,, +7, 4, 5, 4, 2, 2, 1, 2, 128, 219, 211, 147,,,, +7, 4, 5, 4, 2, 2, 1, 2, 256, 219, 211, 147,,,, +7, 4, 5, 4, 2, 2, 1, 2, 384, 219, 211, 147,,,, +7, 4, 5, 4, 2, 2, 2, 1, 128, 219, 211, 147,,,, +7, 4, 5, 4, 2, 2, 2, 1, 256, 219, 211, 147,,,, +7, 4, 5, 4, 2, 2, 2, 1, 384, 219, 211, 147,,,, +7, 4, 5, 4, 2, 2, 2, 2, 128, 219, 211, 147,,,, +7, 4, 5, 4, 2, 2, 2, 2, 256, 219, 211, 147,,,, +7, 4, 5, 4, 2, 2, 2, 2, 384, 219, 211, 147,,,, +7, 4, 5, 4, 3, 3, 1, 1, 128, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 1, 1, 256, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 1, 1, 384, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 1, 2, 128, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 1, 2, 256, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 1, 2, 384, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 1, 3, 128, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 1, 3, 256, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 1, 3, 384, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 2, 1, 128, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 2, 1, 256, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 2, 1, 384, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 2, 2, 128, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 2, 2, 256, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 2, 2, 384, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 2, 3, 128, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 2, 3, 256, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 2, 3, 384, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 3, 1, 128, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 3, 1, 256, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 3, 1, 384, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 3, 2, 128, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 3, 2, 256, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 3, 2, 384, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 3, 3, 128, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 3, 3, 256, 219, 207, 147,,,, +7, 4, 5, 4, 3, 3, 3, 3, 384, 219, 207, 147,,,, +7, 4, 6, 1, 1, 1, 1, 1, 128, 57, 57, 39,,,, +7, 4, 6, 1, 1, 1, 1, 1, 256, 57, 57, 39,,,, +7, 4, 6, 1, 1, 1, 1, 1, 384, 57, 57, 39,,,, +7, 4, 6, 1, 2, 2, 1, 1, 128, 57, 55, 39,,,, +7, 4, 6, 1, 2, 2, 1, 1, 256, 57, 55, 39,,,, +7, 4, 6, 1, 2, 2, 1, 1, 384, 57, 55, 39,,,, +7, 4, 6, 1, 2, 2, 1, 2, 128, 57, 55, 39,,,, +7, 4, 6, 1, 2, 2, 1, 2, 256, 57, 55, 39,,,, +7, 4, 6, 1, 2, 2, 1, 2, 384, 57, 55, 39,,,, +7, 4, 6, 1, 2, 2, 2, 1, 128, 57, 55, 39,,,, +7, 4, 6, 1, 2, 2, 2, 1, 256, 57, 55, 39,,,, +7, 4, 6, 1, 2, 2, 2, 1, 384, 57, 55, 39,,,, +7, 4, 6, 1, 2, 2, 2, 2, 128, 57, 55, 39,,,, +7, 4, 6, 1, 2, 2, 2, 2, 256, 57, 55, 39,,,, +7, 4, 6, 1, 2, 2, 2, 2, 384, 57, 55, 39,,,, +7, 4, 6, 1, 3, 3, 1, 1, 128, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 1, 1, 256, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 1, 1, 384, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 1, 2, 128, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 1, 2, 256, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 1, 2, 384, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 1, 3, 128, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 1, 3, 256, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 1, 3, 384, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 2, 1, 128, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 2, 1, 256, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 2, 1, 384, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 2, 2, 128, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 2, 2, 256, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 2, 2, 384, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 2, 3, 128, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 2, 3, 256, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 2, 3, 384, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 3, 1, 128, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 3, 1, 256, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 3, 1, 384, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 3, 2, 128, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 3, 2, 256, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 3, 2, 384, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 3, 3, 128, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 3, 3, 256, 57, 54, 39,,,, +7, 4, 6, 1, 3, 3, 3, 3, 384, 57, 54, 39,,,, +7, 4, 6, 2, 1, 1, 1, 1, 128, 111, 111, 75,,,, +7, 4, 6, 2, 1, 1, 1, 1, 256, 111, 111, 75,,,, +7, 4, 6, 2, 1, 1, 1, 1, 384, 111, 111, 75,,,, +7, 4, 6, 2, 2, 2, 1, 1, 128, 111, 107, 75,,,, +7, 4, 6, 2, 2, 2, 1, 1, 256, 111, 107, 75,,,, +7, 4, 6, 2, 2, 2, 1, 1, 384, 111, 107, 75,,,, +7, 4, 6, 2, 2, 2, 1, 2, 128, 111, 107, 75,,,, +7, 4, 6, 2, 2, 2, 1, 2, 256, 111, 107, 75,,,, +7, 4, 6, 2, 2, 2, 1, 2, 384, 111, 107, 75,,,, +7, 4, 6, 2, 2, 2, 2, 1, 128, 111, 107, 75,,,, +7, 4, 6, 2, 2, 2, 2, 1, 256, 111, 107, 75,,,, +7, 4, 6, 2, 2, 2, 2, 1, 384, 111, 107, 75,,,, +7, 4, 6, 2, 2, 2, 2, 2, 128, 111, 107, 75,,,, +7, 4, 6, 2, 2, 2, 2, 2, 256, 111, 107, 75,,,, +7, 4, 6, 2, 2, 2, 2, 2, 384, 111, 107, 75,,,, +7, 4, 6, 2, 3, 3, 1, 1, 128, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 1, 1, 256, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 1, 1, 384, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 1, 2, 128, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 1, 2, 256, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 1, 2, 384, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 1, 3, 128, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 1, 3, 256, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 1, 3, 384, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 2, 1, 128, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 2, 1, 256, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 2, 1, 384, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 2, 2, 128, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 2, 2, 256, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 2, 2, 384, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 2, 3, 128, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 2, 3, 256, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 2, 3, 384, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 3, 1, 128, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 3, 1, 256, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 3, 1, 384, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 3, 2, 128, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 3, 2, 256, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 3, 2, 384, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 3, 3, 128, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 3, 3, 256, 111, 105, 75,,,, +7, 4, 6, 2, 3, 3, 3, 3, 384, 111, 105, 75,,,, +7, 4, 6, 3, 1, 1, 1, 1, 128, 165, 165, 111,,,, +7, 4, 6, 3, 1, 1, 1, 1, 256, 165, 165, 111,,,, +7, 4, 6, 3, 1, 1, 1, 1, 384, 165, 165, 111,,,, +7, 4, 6, 3, 2, 2, 1, 1, 128, 165, 159, 111,,,, +7, 4, 6, 3, 2, 2, 1, 1, 256, 165, 159, 111,,,, +7, 4, 6, 3, 2, 2, 1, 1, 384, 165, 159, 111,,,, +7, 4, 6, 3, 2, 2, 1, 2, 128, 165, 159, 111,,,, +7, 4, 6, 3, 2, 2, 1, 2, 256, 165, 159, 111,,,, +7, 4, 6, 3, 2, 2, 1, 2, 384, 165, 159, 111,,,, +7, 4, 6, 3, 2, 2, 2, 1, 128, 165, 159, 111,,,, +7, 4, 6, 3, 2, 2, 2, 1, 256, 165, 159, 111,,,, +7, 4, 6, 3, 2, 2, 2, 1, 384, 165, 159, 111,,,, +7, 4, 6, 3, 2, 2, 2, 2, 128, 165, 159, 111,,,, +7, 4, 6, 3, 2, 2, 2, 2, 256, 165, 159, 111,,,, +7, 4, 6, 3, 2, 2, 2, 2, 384, 165, 159, 111,,,, +7, 4, 6, 3, 3, 3, 1, 1, 128, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 1, 1, 256, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 1, 1, 384, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 1, 2, 128, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 1, 2, 256, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 1, 2, 384, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 1, 3, 128, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 1, 3, 256, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 1, 3, 384, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 2, 1, 128, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 2, 1, 256, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 2, 1, 384, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 2, 2, 128, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 2, 2, 256, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 2, 2, 384, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 2, 3, 128, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 2, 3, 256, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 2, 3, 384, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 3, 1, 128, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 3, 1, 256, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 3, 1, 384, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 3, 2, 128, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 3, 2, 256, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 3, 2, 384, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 3, 3, 128, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 3, 3, 256, 165, 156, 111,,,, +7, 4, 6, 3, 3, 3, 3, 3, 384, 165, 156, 111,,,, +7, 4, 6, 4, 1, 1, 1, 1, 128, 219, 219, 147,,,, +7, 4, 6, 4, 1, 1, 1, 1, 256, 219, 219, 147,,,, +7, 4, 6, 4, 1, 1, 1, 1, 384, 219, 219, 147,,,, +7, 4, 6, 4, 2, 2, 1, 1, 128, 219, 211, 147,,,, +7, 4, 6, 4, 2, 2, 1, 1, 256, 219, 211, 147,,,, +7, 4, 6, 4, 2, 2, 1, 1, 384, 219, 211, 147,,,, +7, 4, 6, 4, 2, 2, 1, 2, 128, 219, 211, 147,,,, +7, 4, 6, 4, 2, 2, 1, 2, 256, 219, 211, 147,,,, +7, 4, 6, 4, 2, 2, 1, 2, 384, 219, 211, 147,,,, +7, 4, 6, 4, 2, 2, 2, 1, 128, 219, 211, 147,,,, +7, 4, 6, 4, 2, 2, 2, 1, 256, 219, 211, 147,,,, +7, 4, 6, 4, 2, 2, 2, 1, 384, 219, 211, 147,,,, +7, 4, 6, 4, 2, 2, 2, 2, 128, 219, 211, 147,,,, +7, 4, 6, 4, 2, 2, 2, 2, 256, 219, 211, 147,,,, +7, 4, 6, 4, 2, 2, 2, 2, 384, 219, 211, 147,,,, +7, 4, 6, 4, 3, 3, 1, 1, 128, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 1, 1, 256, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 1, 1, 384, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 1, 2, 128, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 1, 2, 256, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 1, 2, 384, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 1, 3, 128, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 1, 3, 256, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 1, 3, 384, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 2, 1, 128, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 2, 1, 256, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 2, 1, 384, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 2, 2, 128, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 2, 2, 256, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 2, 2, 384, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 2, 3, 128, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 2, 3, 256, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 2, 3, 384, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 3, 1, 128, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 3, 1, 256, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 3, 1, 384, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 3, 2, 128, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 3, 2, 256, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 3, 2, 384, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 3, 3, 128, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 3, 3, 256, 219, 207, 147,,,, +7, 4, 6, 4, 3, 3, 3, 3, 384, 219, 207, 147,,,, +7, 4, 7, 1, 1, 1, 1, 1, 128, 57, 57, 39,,,, +7, 4, 7, 1, 1, 1, 1, 1, 256, 57, 57, 39,,,, +7, 4, 7, 1, 1, 1, 1, 1, 384, 57, 57, 39,,,, +7, 4, 7, 1, 2, 2, 1, 1, 128, 57, 55, 39,,,, +7, 4, 7, 1, 2, 2, 1, 1, 256, 57, 55, 39,,,, +7, 4, 7, 1, 2, 2, 1, 1, 384, 57, 55, 39,,,, +7, 4, 7, 1, 2, 2, 1, 2, 128, 57, 55, 39,,,, +7, 4, 7, 1, 2, 2, 1, 2, 256, 57, 55, 39,,,, +7, 4, 7, 1, 2, 2, 1, 2, 384, 57, 55, 39,,,, +7, 4, 7, 1, 2, 2, 2, 1, 128, 57, 55, 39,,,, +7, 4, 7, 1, 2, 2, 2, 1, 256, 57, 55, 39,,,, +7, 4, 7, 1, 2, 2, 2, 1, 384, 57, 55, 39,,,, +7, 4, 7, 1, 2, 2, 2, 2, 128, 57, 55, 39,,,, +7, 4, 7, 1, 2, 2, 2, 2, 256, 57, 55, 39,,,, +7, 4, 7, 1, 2, 2, 2, 2, 384, 57, 55, 39,,,, +7, 4, 7, 1, 3, 3, 1, 1, 128, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 1, 1, 256, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 1, 1, 384, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 1, 2, 128, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 1, 2, 256, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 1, 2, 384, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 1, 3, 128, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 1, 3, 256, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 1, 3, 384, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 2, 1, 128, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 2, 1, 256, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 2, 1, 384, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 2, 2, 128, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 2, 2, 256, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 2, 2, 384, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 2, 3, 128, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 2, 3, 256, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 2, 3, 384, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 3, 1, 128, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 3, 1, 256, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 3, 1, 384, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 3, 2, 128, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 3, 2, 256, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 3, 2, 384, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 3, 3, 128, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 3, 3, 256, 57, 54, 39,,,, +7, 4, 7, 1, 3, 3, 3, 3, 384, 57, 54, 39,,,, +7, 4, 7, 2, 1, 1, 1, 1, 128, 111, 111, 75,,,, +7, 4, 7, 2, 1, 1, 1, 1, 256, 111, 111, 75,,,, +7, 4, 7, 2, 1, 1, 1, 1, 384, 111, 111, 75,,,, +7, 4, 7, 2, 2, 2, 1, 1, 128, 111, 107, 75,,,, +7, 4, 7, 2, 2, 2, 1, 1, 256, 111, 107, 75,,,, +7, 4, 7, 2, 2, 2, 1, 1, 384, 111, 107, 75,,,, +7, 4, 7, 2, 2, 2, 1, 2, 128, 111, 107, 75,,,, +7, 4, 7, 2, 2, 2, 1, 2, 256, 111, 107, 75,,,, +7, 4, 7, 2, 2, 2, 1, 2, 384, 111, 107, 75,,,, +7, 4, 7, 2, 2, 2, 2, 1, 128, 111, 107, 75,,,, +7, 4, 7, 2, 2, 2, 2, 1, 256, 111, 107, 75,,,, +7, 4, 7, 2, 2, 2, 2, 1, 384, 111, 107, 75,,,, +7, 4, 7, 2, 2, 2, 2, 2, 128, 111, 107, 75,,,, +7, 4, 7, 2, 2, 2, 2, 2, 256, 111, 107, 75,,,, +7, 4, 7, 2, 2, 2, 2, 2, 384, 111, 107, 75,,,, +7, 4, 7, 2, 3, 3, 1, 1, 128, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 1, 1, 256, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 1, 1, 384, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 1, 2, 128, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 1, 2, 256, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 1, 2, 384, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 1, 3, 128, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 1, 3, 256, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 1, 3, 384, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 2, 1, 128, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 2, 1, 256, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 2, 1, 384, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 2, 2, 128, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 2, 2, 256, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 2, 2, 384, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 2, 3, 128, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 2, 3, 256, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 2, 3, 384, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 3, 1, 128, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 3, 1, 256, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 3, 1, 384, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 3, 2, 128, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 3, 2, 256, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 3, 2, 384, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 3, 3, 128, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 3, 3, 256, 111, 105, 75,,,, +7, 4, 7, 2, 3, 3, 3, 3, 384, 111, 105, 75,,,, +7, 4, 7, 3, 1, 1, 1, 1, 128, 165, 165, 111,,,, +7, 4, 7, 3, 1, 1, 1, 1, 256, 165, 165, 111,,,, +7, 4, 7, 3, 1, 1, 1, 1, 384, 165, 165, 111,,,, +7, 4, 7, 3, 2, 2, 1, 1, 128, 165, 159, 111,,,, +7, 4, 7, 3, 2, 2, 1, 1, 256, 165, 159, 111,,,, +7, 4, 7, 3, 2, 2, 1, 1, 384, 165, 159, 111,,,, +7, 4, 7, 3, 2, 2, 1, 2, 128, 165, 159, 111,,,, +7, 4, 7, 3, 2, 2, 1, 2, 256, 165, 159, 111,,,, +7, 4, 7, 3, 2, 2, 1, 2, 384, 165, 159, 111,,,, +7, 4, 7, 3, 2, 2, 2, 1, 128, 165, 159, 111,,,, +7, 4, 7, 3, 2, 2, 2, 1, 256, 165, 159, 111,,,, +7, 4, 7, 3, 2, 2, 2, 1, 384, 165, 159, 111,,,, +7, 4, 7, 3, 2, 2, 2, 2, 128, 165, 159, 111,,,, +7, 4, 7, 3, 2, 2, 2, 2, 256, 165, 159, 111,,,, +7, 4, 7, 3, 2, 2, 2, 2, 384, 165, 159, 111,,,, +7, 4, 7, 3, 3, 3, 1, 1, 128, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 1, 1, 256, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 1, 1, 384, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 1, 2, 128, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 1, 2, 256, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 1, 2, 384, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 1, 3, 128, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 1, 3, 256, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 1, 3, 384, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 2, 1, 128, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 2, 1, 256, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 2, 1, 384, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 2, 2, 128, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 2, 2, 256, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 2, 2, 384, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 2, 3, 128, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 2, 3, 256, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 2, 3, 384, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 3, 1, 128, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 3, 1, 256, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 3, 1, 384, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 3, 2, 128, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 3, 2, 256, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 3, 2, 384, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 3, 3, 128, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 3, 3, 256, 165, 156, 111,,,, +7, 4, 7, 3, 3, 3, 3, 3, 384, 165, 156, 111,,,, +7, 4, 7, 4, 1, 1, 1, 1, 128, 219, 219, 147,,,, +7, 4, 7, 4, 1, 1, 1, 1, 256, 219, 219, 147,,,, +7, 4, 7, 4, 1, 1, 1, 1, 384, 219, 219, 147,,,, +7, 4, 7, 4, 2, 2, 1, 1, 128, 219, 211, 147,,,, +7, 4, 7, 4, 2, 2, 1, 1, 256, 219, 211, 147,,,, +7, 4, 7, 4, 2, 2, 1, 1, 384, 219, 211, 147,,,, +7, 4, 7, 4, 2, 2, 1, 2, 128, 219, 211, 147,,,, +7, 4, 7, 4, 2, 2, 1, 2, 256, 219, 211, 147,,,, +7, 4, 7, 4, 2, 2, 1, 2, 384, 219, 211, 147,,,, +7, 4, 7, 4, 2, 2, 2, 1, 128, 219, 211, 147,,,, +7, 4, 7, 4, 2, 2, 2, 1, 256, 219, 211, 147,,,, +7, 4, 7, 4, 2, 2, 2, 1, 384, 219, 211, 147,,,, +7, 4, 7, 4, 2, 2, 2, 2, 128, 219, 211, 147,,,, +7, 4, 7, 4, 2, 2, 2, 2, 256, 219, 211, 147,,,, +7, 4, 7, 4, 2, 2, 2, 2, 384, 219, 211, 147,,,, +7, 4, 7, 4, 3, 3, 1, 1, 128, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 1, 1, 256, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 1, 1, 384, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 1, 2, 128, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 1, 2, 256, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 1, 2, 384, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 1, 3, 128, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 1, 3, 256, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 1, 3, 384, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 2, 1, 128, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 2, 1, 256, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 2, 1, 384, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 2, 2, 128, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 2, 2, 256, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 2, 2, 384, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 2, 3, 128, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 2, 3, 256, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 2, 3, 384, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 3, 1, 128, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 3, 1, 256, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 3, 1, 384, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 3, 2, 128, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 3, 2, 256, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 3, 2, 384, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 3, 3, 128, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 3, 3, 256, 219, 207, 147,,,, +7, 4, 7, 4, 3, 3, 3, 3, 384, 219, 207, 147,,,, +7, 5, 1, 1, 1, 1, 1, 1, 128, 58, 40, 22,,,, +7, 5, 1, 1, 1, 1, 1, 1, 256, 58, 40, 22,,,, +7, 5, 1, 1, 1, 1, 1, 1, 384, 58, 40, 22,,,, +7, 5, 1, 1, 2, 2, 1, 1, 128, 58, 40, 22,,,, +7, 5, 1, 1, 2, 2, 1, 1, 256, 58, 40, 22,,,, +7, 5, 1, 1, 2, 2, 1, 1, 384, 58, 40, 22,,,, +7, 5, 1, 1, 2, 2, 1, 2, 128, 58, 40, 22,,,, +7, 5, 1, 1, 2, 2, 1, 2, 256, 58, 40, 22,,,, +7, 5, 1, 1, 2, 2, 1, 2, 384, 58, 40, 22,,,, +7, 5, 1, 1, 2, 2, 2, 1, 128, 58, 40, 22,,,, +7, 5, 1, 1, 2, 2, 2, 1, 256, 58, 40, 22,,,, +7, 5, 1, 1, 2, 2, 2, 1, 384, 58, 40, 22,,,, +7, 5, 1, 1, 2, 2, 2, 2, 128, 58, 40, 22,,,, +7, 5, 1, 1, 2, 2, 2, 2, 256, 58, 40, 22,,,, +7, 5, 1, 1, 2, 2, 2, 2, 384, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 1, 1, 128, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 1, 1, 256, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 1, 1, 384, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 1, 2, 128, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 1, 2, 256, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 1, 2, 384, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 1, 3, 128, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 1, 3, 256, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 1, 3, 384, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 2, 1, 128, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 2, 1, 256, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 2, 1, 384, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 2, 2, 128, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 2, 2, 256, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 2, 2, 384, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 2, 3, 128, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 2, 3, 256, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 2, 3, 384, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 3, 1, 128, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 3, 1, 256, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 3, 1, 384, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 3, 2, 128, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 3, 2, 256, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 3, 2, 384, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 3, 3, 128, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 3, 3, 256, 58, 40, 22,,,, +7, 5, 1, 1, 3, 3, 3, 3, 384, 58, 40, 22,,,, +7, 5, 1, 2, 1, 1, 1, 1, 128, 112, 76, 40,,,, +7, 5, 1, 2, 1, 1, 1, 1, 256, 112, 76, 40,,,, +7, 5, 1, 2, 1, 1, 1, 1, 384, 112, 76, 40,,,, +7, 5, 1, 2, 2, 2, 1, 1, 128, 112, 76, 40,,,, +7, 5, 1, 2, 2, 2, 1, 1, 256, 112, 76, 40,,,, +7, 5, 1, 2, 2, 2, 1, 1, 384, 112, 76, 40,,,, +7, 5, 1, 2, 2, 2, 1, 2, 128, 112, 76, 40,,,, +7, 5, 1, 2, 2, 2, 1, 2, 256, 112, 76, 40,,,, +7, 5, 1, 2, 2, 2, 1, 2, 384, 112, 76, 40,,,, +7, 5, 1, 2, 2, 2, 2, 1, 128, 112, 76, 40,,,, +7, 5, 1, 2, 2, 2, 2, 1, 256, 112, 76, 40,,,, +7, 5, 1, 2, 2, 2, 2, 1, 384, 112, 76, 40,,,, +7, 5, 1, 2, 2, 2, 2, 2, 128, 112, 76, 40,,,, +7, 5, 1, 2, 2, 2, 2, 2, 256, 112, 76, 40,,,, +7, 5, 1, 2, 2, 2, 2, 2, 384, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 1, 1, 128, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 1, 1, 256, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 1, 1, 384, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 1, 2, 128, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 1, 2, 256, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 1, 2, 384, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 1, 3, 128, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 1, 3, 256, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 1, 3, 384, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 2, 1, 128, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 2, 1, 256, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 2, 1, 384, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 2, 2, 128, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 2, 2, 256, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 2, 2, 384, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 2, 3, 128, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 2, 3, 256, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 2, 3, 384, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 3, 1, 128, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 3, 1, 256, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 3, 1, 384, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 3, 2, 128, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 3, 2, 256, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 3, 2, 384, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 3, 3, 128, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 3, 3, 256, 112, 76, 40,,,, +7, 5, 1, 2, 3, 3, 3, 3, 384, 112, 76, 40,,,, +7, 5, 1, 3, 1, 1, 1, 1, 128, 166, 112, 58,,,, +7, 5, 1, 3, 1, 1, 1, 1, 256, 166, 112, 58,,,, +7, 5, 1, 3, 1, 1, 1, 1, 384, 166, 112, 58,,,, +7, 5, 1, 3, 2, 2, 1, 1, 128, 166, 112, 58,,,, +7, 5, 1, 3, 2, 2, 1, 1, 256, 166, 112, 58,,,, +7, 5, 1, 3, 2, 2, 1, 1, 384, 166, 112, 58,,,, +7, 5, 1, 3, 2, 2, 1, 2, 128, 166, 112, 58,,,, +7, 5, 1, 3, 2, 2, 1, 2, 256, 166, 112, 58,,,, +7, 5, 1, 3, 2, 2, 1, 2, 384, 166, 112, 58,,,, +7, 5, 1, 3, 2, 2, 2, 1, 128, 166, 112, 58,,,, +7, 5, 1, 3, 2, 2, 2, 1, 256, 166, 112, 58,,,, +7, 5, 1, 3, 2, 2, 2, 1, 384, 166, 112, 58,,,, +7, 5, 1, 3, 2, 2, 2, 2, 128, 166, 112, 58,,,, +7, 5, 1, 3, 2, 2, 2, 2, 256, 166, 112, 58,,,, +7, 5, 1, 3, 2, 2, 2, 2, 384, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 1, 1, 128, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 1, 1, 256, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 1, 1, 384, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 1, 2, 128, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 1, 2, 256, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 1, 2, 384, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 1, 3, 128, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 1, 3, 256, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 1, 3, 384, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 2, 1, 128, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 2, 1, 256, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 2, 1, 384, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 2, 2, 128, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 2, 2, 256, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 2, 2, 384, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 2, 3, 128, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 2, 3, 256, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 2, 3, 384, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 3, 1, 128, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 3, 1, 256, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 3, 1, 384, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 3, 2, 128, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 3, 2, 256, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 3, 2, 384, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 3, 3, 128, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 3, 3, 256, 166, 112, 58,,,, +7, 5, 1, 3, 3, 3, 3, 3, 384, 166, 112, 58,,,, +7, 5, 1, 4, 1, 1, 1, 1, 128, 220, 148, 76,,,, +7, 5, 1, 4, 1, 1, 1, 1, 256, 220, 148, 76,,,, +7, 5, 1, 4, 1, 1, 1, 1, 384, 220, 148, 76,,,, +7, 5, 1, 4, 2, 2, 1, 1, 128, 220, 148, 76,,,, +7, 5, 1, 4, 2, 2, 1, 1, 256, 220, 148, 76,,,, +7, 5, 1, 4, 2, 2, 1, 1, 384, 220, 148, 76,,,, +7, 5, 1, 4, 2, 2, 1, 2, 128, 220, 148, 76,,,, +7, 5, 1, 4, 2, 2, 1, 2, 256, 220, 148, 76,,,, +7, 5, 1, 4, 2, 2, 1, 2, 384, 220, 148, 76,,,, +7, 5, 1, 4, 2, 2, 2, 1, 128, 220, 148, 76,,,, +7, 5, 1, 4, 2, 2, 2, 1, 256, 220, 148, 76,,,, +7, 5, 1, 4, 2, 2, 2, 1, 384, 220, 148, 76,,,, +7, 5, 1, 4, 2, 2, 2, 2, 128, 220, 148, 76,,,, +7, 5, 1, 4, 2, 2, 2, 2, 256, 220, 148, 76,,,, +7, 5, 1, 4, 2, 2, 2, 2, 384, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 1, 1, 128, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 1, 1, 256, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 1, 1, 384, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 1, 2, 128, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 1, 2, 256, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 1, 2, 384, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 1, 3, 128, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 1, 3, 256, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 1, 3, 384, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 2, 1, 128, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 2, 1, 256, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 2, 1, 384, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 2, 2, 128, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 2, 2, 256, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 2, 2, 384, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 2, 3, 128, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 2, 3, 256, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 2, 3, 384, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 3, 1, 128, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 3, 1, 256, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 3, 1, 384, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 3, 2, 128, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 3, 2, 256, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 3, 2, 384, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 3, 3, 128, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 3, 3, 256, 220, 148, 76,,,, +7, 5, 1, 4, 3, 3, 3, 3, 384, 220, 148, 76,,,, +7, 5, 1, 5, 1, 1, 1, 1, 128, 240, 184, 94,,,, +7, 5, 1, 5, 1, 1, 1, 1, 256, 240, 184, 94,,,, +7, 5, 1, 5, 1, 1, 1, 1, 384, 240, 184, 94,,,, +7, 5, 1, 5, 2, 2, 1, 1, 128, 240, 184, 94,,,, +7, 5, 1, 5, 2, 2, 1, 1, 256, 240, 184, 94,,,, +7, 5, 1, 5, 2, 2, 1, 1, 384, 240, 184, 94,,,, +7, 5, 1, 5, 2, 2, 1, 2, 128, 240, 184, 94,,,, +7, 5, 1, 5, 2, 2, 1, 2, 256, 240, 184, 94,,,, +7, 5, 1, 5, 2, 2, 1, 2, 384, 240, 184, 94,,,, +7, 5, 1, 5, 2, 2, 2, 1, 128, 240, 184, 94,,,, +7, 5, 1, 5, 2, 2, 2, 1, 256, 240, 184, 94,,,, +7, 5, 1, 5, 2, 2, 2, 1, 384, 240, 184, 94,,,, +7, 5, 1, 5, 2, 2, 2, 2, 128, 240, 184, 94,,,, +7, 5, 1, 5, 2, 2, 2, 2, 256, 240, 184, 94,,,, +7, 5, 1, 5, 2, 2, 2, 2, 384, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 1, 1, 128, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 1, 1, 256, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 1, 1, 384, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 1, 2, 128, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 1, 2, 256, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 1, 2, 384, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 1, 3, 128, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 1, 3, 256, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 1, 3, 384, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 2, 1, 128, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 2, 1, 256, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 2, 1, 384, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 2, 2, 128, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 2, 2, 256, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 2, 2, 384, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 2, 3, 128, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 2, 3, 256, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 2, 3, 384, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 3, 1, 128, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 3, 1, 256, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 3, 1, 384, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 3, 2, 128, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 3, 2, 256, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 3, 2, 384, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 3, 3, 128, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 3, 3, 256, 240, 184, 94,,,, +7, 5, 1, 5, 3, 3, 3, 3, 384, 240, 184, 94,,,, +7, 5, 2, 1, 1, 1, 1, 1, 128, 58, 40, 22,,,, +7, 5, 2, 1, 1, 1, 1, 1, 256, 58, 40, 22,,,, +7, 5, 2, 1, 1, 1, 1, 1, 384, 58, 40, 22,,,, +7, 5, 2, 1, 2, 2, 1, 1, 128, 58, 40, 22,,,, +7, 5, 2, 1, 2, 2, 1, 1, 256, 58, 40, 22,,,, +7, 5, 2, 1, 2, 2, 1, 1, 384, 58, 40, 22,,,, +7, 5, 2, 1, 2, 2, 1, 2, 128, 58, 40, 22,,,, +7, 5, 2, 1, 2, 2, 1, 2, 256, 58, 40, 22,,,, +7, 5, 2, 1, 2, 2, 1, 2, 384, 58, 40, 22,,,, +7, 5, 2, 1, 2, 2, 2, 1, 128, 58, 40, 22,,,, +7, 5, 2, 1, 2, 2, 2, 1, 256, 58, 40, 22,,,, +7, 5, 2, 1, 2, 2, 2, 1, 384, 58, 40, 22,,,, +7, 5, 2, 1, 2, 2, 2, 2, 128, 58, 40, 22,,,, +7, 5, 2, 1, 2, 2, 2, 2, 256, 58, 40, 22,,,, +7, 5, 2, 1, 2, 2, 2, 2, 384, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 1, 1, 128, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 1, 1, 256, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 1, 1, 384, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 1, 2, 128, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 1, 2, 256, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 1, 2, 384, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 1, 3, 128, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 1, 3, 256, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 1, 3, 384, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 2, 1, 128, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 2, 1, 256, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 2, 1, 384, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 2, 2, 128, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 2, 2, 256, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 2, 2, 384, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 2, 3, 128, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 2, 3, 256, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 2, 3, 384, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 3, 1, 128, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 3, 1, 256, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 3, 1, 384, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 3, 2, 128, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 3, 2, 256, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 3, 2, 384, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 3, 3, 128, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 3, 3, 256, 58, 40, 22,,,, +7, 5, 2, 1, 3, 3, 3, 3, 384, 58, 40, 22,,,, +7, 5, 2, 2, 1, 1, 1, 1, 128, 112, 76, 40,,,, +7, 5, 2, 2, 1, 1, 1, 1, 256, 112, 76, 40,,,, +7, 5, 2, 2, 1, 1, 1, 1, 384, 112, 76, 40,,,, +7, 5, 2, 2, 2, 2, 1, 1, 128, 112, 76, 40,,,, +7, 5, 2, 2, 2, 2, 1, 1, 256, 112, 76, 40,,,, +7, 5, 2, 2, 2, 2, 1, 1, 384, 112, 76, 40,,,, +7, 5, 2, 2, 2, 2, 1, 2, 128, 112, 76, 40,,,, +7, 5, 2, 2, 2, 2, 1, 2, 256, 112, 76, 40,,,, +7, 5, 2, 2, 2, 2, 1, 2, 384, 112, 76, 40,,,, +7, 5, 2, 2, 2, 2, 2, 1, 128, 112, 76, 40,,,, +7, 5, 2, 2, 2, 2, 2, 1, 256, 112, 76, 40,,,, +7, 5, 2, 2, 2, 2, 2, 1, 384, 112, 76, 40,,,, +7, 5, 2, 2, 2, 2, 2, 2, 128, 112, 76, 40,,,, +7, 5, 2, 2, 2, 2, 2, 2, 256, 112, 76, 40,,,, +7, 5, 2, 2, 2, 2, 2, 2, 384, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 1, 1, 128, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 1, 1, 256, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 1, 1, 384, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 1, 2, 128, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 1, 2, 256, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 1, 2, 384, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 1, 3, 128, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 1, 3, 256, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 1, 3, 384, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 2, 1, 128, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 2, 1, 256, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 2, 1, 384, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 2, 2, 128, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 2, 2, 256, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 2, 2, 384, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 2, 3, 128, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 2, 3, 256, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 2, 3, 384, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 3, 1, 128, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 3, 1, 256, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 3, 1, 384, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 3, 2, 128, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 3, 2, 256, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 3, 2, 384, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 3, 3, 128, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 3, 3, 256, 112, 76, 40,,,, +7, 5, 2, 2, 3, 3, 3, 3, 384, 112, 76, 40,,,, +7, 5, 2, 3, 1, 1, 1, 1, 128, 166, 112, 58,,,, +7, 5, 2, 3, 1, 1, 1, 1, 256, 166, 112, 58,,,, +7, 5, 2, 3, 1, 1, 1, 1, 384, 166, 112, 58,,,, +7, 5, 2, 3, 2, 2, 1, 1, 128, 166, 112, 58,,,, +7, 5, 2, 3, 2, 2, 1, 1, 256, 166, 112, 58,,,, +7, 5, 2, 3, 2, 2, 1, 1, 384, 166, 112, 58,,,, +7, 5, 2, 3, 2, 2, 1, 2, 128, 166, 112, 58,,,, +7, 5, 2, 3, 2, 2, 1, 2, 256, 166, 112, 58,,,, +7, 5, 2, 3, 2, 2, 1, 2, 384, 166, 112, 58,,,, +7, 5, 2, 3, 2, 2, 2, 1, 128, 166, 112, 58,,,, +7, 5, 2, 3, 2, 2, 2, 1, 256, 166, 112, 58,,,, +7, 5, 2, 3, 2, 2, 2, 1, 384, 166, 112, 58,,,, +7, 5, 2, 3, 2, 2, 2, 2, 128, 166, 112, 58,,,, +7, 5, 2, 3, 2, 2, 2, 2, 256, 166, 112, 58,,,, +7, 5, 2, 3, 2, 2, 2, 2, 384, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 1, 1, 128, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 1, 1, 256, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 1, 1, 384, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 1, 2, 128, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 1, 2, 256, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 1, 2, 384, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 1, 3, 128, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 1, 3, 256, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 1, 3, 384, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 2, 1, 128, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 2, 1, 256, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 2, 1, 384, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 2, 2, 128, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 2, 2, 256, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 2, 2, 384, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 2, 3, 128, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 2, 3, 256, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 2, 3, 384, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 3, 1, 128, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 3, 1, 256, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 3, 1, 384, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 3, 2, 128, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 3, 2, 256, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 3, 2, 384, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 3, 3, 128, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 3, 3, 256, 166, 112, 58,,,, +7, 5, 2, 3, 3, 3, 3, 3, 384, 166, 112, 58,,,, +7, 5, 2, 4, 1, 1, 1, 1, 128, 220, 148, 76,,,, +7, 5, 2, 4, 1, 1, 1, 1, 256, 220, 148, 76,,,, +7, 5, 2, 4, 1, 1, 1, 1, 384, 220, 148, 76,,,, +7, 5, 2, 4, 2, 2, 1, 1, 128, 220, 148, 76,,,, +7, 5, 2, 4, 2, 2, 1, 1, 256, 220, 148, 76,,,, +7, 5, 2, 4, 2, 2, 1, 1, 384, 220, 148, 76,,,, +7, 5, 2, 4, 2, 2, 1, 2, 128, 220, 148, 76,,,, +7, 5, 2, 4, 2, 2, 1, 2, 256, 220, 148, 76,,,, +7, 5, 2, 4, 2, 2, 1, 2, 384, 220, 148, 76,,,, +7, 5, 2, 4, 2, 2, 2, 1, 128, 220, 148, 76,,,, +7, 5, 2, 4, 2, 2, 2, 1, 256, 220, 148, 76,,,, +7, 5, 2, 4, 2, 2, 2, 1, 384, 220, 148, 76,,,, +7, 5, 2, 4, 2, 2, 2, 2, 128, 220, 148, 76,,,, +7, 5, 2, 4, 2, 2, 2, 2, 256, 220, 148, 76,,,, +7, 5, 2, 4, 2, 2, 2, 2, 384, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 1, 1, 128, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 1, 1, 256, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 1, 1, 384, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 1, 2, 128, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 1, 2, 256, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 1, 2, 384, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 1, 3, 128, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 1, 3, 256, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 1, 3, 384, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 2, 1, 128, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 2, 1, 256, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 2, 1, 384, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 2, 2, 128, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 2, 2, 256, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 2, 2, 384, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 2, 3, 128, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 2, 3, 256, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 2, 3, 384, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 3, 1, 128, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 3, 1, 256, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 3, 1, 384, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 3, 2, 128, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 3, 2, 256, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 3, 2, 384, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 3, 3, 128, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 3, 3, 256, 220, 148, 76,,,, +7, 5, 2, 4, 3, 3, 3, 3, 384, 220, 148, 76,,,, +7, 5, 2, 5, 1, 1, 1, 1, 128, 240, 184, 94,,,, +7, 5, 2, 5, 1, 1, 1, 1, 256, 240, 184, 94,,,, +7, 5, 2, 5, 1, 1, 1, 1, 384, 240, 184, 94,,,, +7, 5, 2, 5, 2, 2, 1, 1, 128, 240, 184, 94,,,, +7, 5, 2, 5, 2, 2, 1, 1, 256, 240, 184, 94,,,, +7, 5, 2, 5, 2, 2, 1, 1, 384, 240, 184, 94,,,, +7, 5, 2, 5, 2, 2, 1, 2, 128, 240, 184, 94,,,, +7, 5, 2, 5, 2, 2, 1, 2, 256, 240, 184, 94,,,, +7, 5, 2, 5, 2, 2, 1, 2, 384, 240, 184, 94,,,, +7, 5, 2, 5, 2, 2, 2, 1, 128, 240, 184, 94,,,, +7, 5, 2, 5, 2, 2, 2, 1, 256, 240, 184, 94,,,, +7, 5, 2, 5, 2, 2, 2, 1, 384, 240, 184, 94,,,, +7, 5, 2, 5, 2, 2, 2, 2, 128, 240, 184, 94,,,, +7, 5, 2, 5, 2, 2, 2, 2, 256, 240, 184, 94,,,, +7, 5, 2, 5, 2, 2, 2, 2, 384, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 1, 1, 128, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 1, 1, 256, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 1, 1, 384, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 1, 2, 128, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 1, 2, 256, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 1, 2, 384, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 1, 3, 128, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 1, 3, 256, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 1, 3, 384, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 2, 1, 128, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 2, 1, 256, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 2, 1, 384, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 2, 2, 128, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 2, 2, 256, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 2, 2, 384, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 2, 3, 128, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 2, 3, 256, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 2, 3, 384, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 3, 1, 128, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 3, 1, 256, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 3, 1, 384, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 3, 2, 128, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 3, 2, 256, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 3, 2, 384, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 3, 3, 128, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 3, 3, 256, 240, 184, 94,,,, +7, 5, 2, 5, 3, 3, 3, 3, 384, 240, 184, 94,,,, +7, 5, 3, 1, 1, 1, 1, 1, 128, 58, 40, 22,,,, +7, 5, 3, 1, 1, 1, 1, 1, 256, 58, 40, 22,,,, +7, 5, 3, 1, 1, 1, 1, 1, 384, 58, 40, 22,,,, +7, 5, 3, 1, 2, 2, 1, 1, 128, 58, 40, 22,,,, +7, 5, 3, 1, 2, 2, 1, 1, 256, 58, 40, 22,,,, +7, 5, 3, 1, 2, 2, 1, 1, 384, 58, 40, 22,,,, +7, 5, 3, 1, 2, 2, 1, 2, 128, 58, 40, 22,,,, +7, 5, 3, 1, 2, 2, 1, 2, 256, 58, 40, 22,,,, +7, 5, 3, 1, 2, 2, 1, 2, 384, 58, 40, 22,,,, +7, 5, 3, 1, 2, 2, 2, 1, 128, 58, 40, 22,,,, +7, 5, 3, 1, 2, 2, 2, 1, 256, 58, 40, 22,,,, +7, 5, 3, 1, 2, 2, 2, 1, 384, 58, 40, 22,,,, +7, 5, 3, 1, 2, 2, 2, 2, 128, 58, 40, 22,,,, +7, 5, 3, 1, 2, 2, 2, 2, 256, 58, 40, 22,,,, +7, 5, 3, 1, 2, 2, 2, 2, 384, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 1, 1, 128, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 1, 1, 256, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 1, 1, 384, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 1, 2, 128, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 1, 2, 256, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 1, 2, 384, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 1, 3, 128, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 1, 3, 256, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 1, 3, 384, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 2, 1, 128, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 2, 1, 256, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 2, 1, 384, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 2, 2, 128, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 2, 2, 256, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 2, 2, 384, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 2, 3, 128, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 2, 3, 256, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 2, 3, 384, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 3, 1, 128, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 3, 1, 256, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 3, 1, 384, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 3, 2, 128, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 3, 2, 256, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 3, 2, 384, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 3, 3, 128, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 3, 3, 256, 58, 40, 22,,,, +7, 5, 3, 1, 3, 3, 3, 3, 384, 58, 40, 22,,,, +7, 5, 3, 2, 1, 1, 1, 1, 128, 112, 76, 40,,,, +7, 5, 3, 2, 1, 1, 1, 1, 256, 112, 76, 40,,,, +7, 5, 3, 2, 1, 1, 1, 1, 384, 112, 76, 40,,,, +7, 5, 3, 2, 2, 2, 1, 1, 128, 112, 76, 40,,,, +7, 5, 3, 2, 2, 2, 1, 1, 256, 112, 76, 40,,,, +7, 5, 3, 2, 2, 2, 1, 1, 384, 112, 76, 40,,,, +7, 5, 3, 2, 2, 2, 1, 2, 128, 112, 76, 40,,,, +7, 5, 3, 2, 2, 2, 1, 2, 256, 112, 76, 40,,,, +7, 5, 3, 2, 2, 2, 1, 2, 384, 112, 76, 40,,,, +7, 5, 3, 2, 2, 2, 2, 1, 128, 112, 76, 40,,,, +7, 5, 3, 2, 2, 2, 2, 1, 256, 112, 76, 40,,,, +7, 5, 3, 2, 2, 2, 2, 1, 384, 112, 76, 40,,,, +7, 5, 3, 2, 2, 2, 2, 2, 128, 112, 76, 40,,,, +7, 5, 3, 2, 2, 2, 2, 2, 256, 112, 76, 40,,,, +7, 5, 3, 2, 2, 2, 2, 2, 384, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 1, 1, 128, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 1, 1, 256, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 1, 1, 384, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 1, 2, 128, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 1, 2, 256, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 1, 2, 384, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 1, 3, 128, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 1, 3, 256, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 1, 3, 384, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 2, 1, 128, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 2, 1, 256, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 2, 1, 384, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 2, 2, 128, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 2, 2, 256, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 2, 2, 384, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 2, 3, 128, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 2, 3, 256, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 2, 3, 384, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 3, 1, 128, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 3, 1, 256, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 3, 1, 384, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 3, 2, 128, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 3, 2, 256, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 3, 2, 384, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 3, 3, 128, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 3, 3, 256, 112, 76, 40,,,, +7, 5, 3, 2, 3, 3, 3, 3, 384, 112, 76, 40,,,, +7, 5, 3, 3, 1, 1, 1, 1, 128, 166, 112, 58,,,, +7, 5, 3, 3, 1, 1, 1, 1, 256, 166, 112, 58,,,, +7, 5, 3, 3, 1, 1, 1, 1, 384, 166, 112, 58,,,, +7, 5, 3, 3, 2, 2, 1, 1, 128, 166, 112, 58,,,, +7, 5, 3, 3, 2, 2, 1, 1, 256, 166, 112, 58,,,, +7, 5, 3, 3, 2, 2, 1, 1, 384, 166, 112, 58,,,, +7, 5, 3, 3, 2, 2, 1, 2, 128, 166, 112, 58,,,, +7, 5, 3, 3, 2, 2, 1, 2, 256, 166, 112, 58,,,, +7, 5, 3, 3, 2, 2, 1, 2, 384, 166, 112, 58,,,, +7, 5, 3, 3, 2, 2, 2, 1, 128, 166, 112, 58,,,, +7, 5, 3, 3, 2, 2, 2, 1, 256, 166, 112, 58,,,, +7, 5, 3, 3, 2, 2, 2, 1, 384, 166, 112, 58,,,, +7, 5, 3, 3, 2, 2, 2, 2, 128, 166, 112, 58,,,, +7, 5, 3, 3, 2, 2, 2, 2, 256, 166, 112, 58,,,, +7, 5, 3, 3, 2, 2, 2, 2, 384, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 1, 1, 128, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 1, 1, 256, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 1, 1, 384, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 1, 2, 128, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 1, 2, 256, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 1, 2, 384, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 1, 3, 128, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 1, 3, 256, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 1, 3, 384, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 2, 1, 128, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 2, 1, 256, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 2, 1, 384, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 2, 2, 128, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 2, 2, 256, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 2, 2, 384, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 2, 3, 128, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 2, 3, 256, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 2, 3, 384, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 3, 1, 128, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 3, 1, 256, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 3, 1, 384, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 3, 2, 128, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 3, 2, 256, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 3, 2, 384, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 3, 3, 128, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 3, 3, 256, 166, 112, 58,,,, +7, 5, 3, 3, 3, 3, 3, 3, 384, 166, 112, 58,,,, +7, 5, 3, 4, 1, 1, 1, 1, 128, 220, 148, 76,,,, +7, 5, 3, 4, 1, 1, 1, 1, 256, 220, 148, 76,,,, +7, 5, 3, 4, 1, 1, 1, 1, 384, 220, 148, 76,,,, +7, 5, 3, 4, 2, 2, 1, 1, 128, 220, 148, 76,,,, +7, 5, 3, 4, 2, 2, 1, 1, 256, 220, 148, 76,,,, +7, 5, 3, 4, 2, 2, 1, 1, 384, 220, 148, 76,,,, +7, 5, 3, 4, 2, 2, 1, 2, 128, 220, 148, 76,,,, +7, 5, 3, 4, 2, 2, 1, 2, 256, 220, 148, 76,,,, +7, 5, 3, 4, 2, 2, 1, 2, 384, 220, 148, 76,,,, +7, 5, 3, 4, 2, 2, 2, 1, 128, 220, 148, 76,,,, +7, 5, 3, 4, 2, 2, 2, 1, 256, 220, 148, 76,,,, +7, 5, 3, 4, 2, 2, 2, 1, 384, 220, 148, 76,,,, +7, 5, 3, 4, 2, 2, 2, 2, 128, 220, 148, 76,,,, +7, 5, 3, 4, 2, 2, 2, 2, 256, 220, 148, 76,,,, +7, 5, 3, 4, 2, 2, 2, 2, 384, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 1, 1, 128, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 1, 1, 256, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 1, 1, 384, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 1, 2, 128, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 1, 2, 256, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 1, 2, 384, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 1, 3, 128, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 1, 3, 256, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 1, 3, 384, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 2, 1, 128, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 2, 1, 256, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 2, 1, 384, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 2, 2, 128, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 2, 2, 256, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 2, 2, 384, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 2, 3, 128, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 2, 3, 256, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 2, 3, 384, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 3, 1, 128, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 3, 1, 256, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 3, 1, 384, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 3, 2, 128, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 3, 2, 256, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 3, 2, 384, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 3, 3, 128, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 3, 3, 256, 220, 148, 76,,,, +7, 5, 3, 4, 3, 3, 3, 3, 384, 220, 148, 76,,,, +7, 5, 3, 5, 1, 1, 1, 1, 128, 240, 184, 94,,,, +7, 5, 3, 5, 1, 1, 1, 1, 256, 240, 184, 94,,,, +7, 5, 3, 5, 1, 1, 1, 1, 384, 240, 184, 94,,,, +7, 5, 3, 5, 2, 2, 1, 1, 128, 240, 184, 94,,,, +7, 5, 3, 5, 2, 2, 1, 1, 256, 240, 184, 94,,,, +7, 5, 3, 5, 2, 2, 1, 1, 384, 240, 184, 94,,,, +7, 5, 3, 5, 2, 2, 1, 2, 128, 240, 184, 94,,,, +7, 5, 3, 5, 2, 2, 1, 2, 256, 240, 184, 94,,,, +7, 5, 3, 5, 2, 2, 1, 2, 384, 240, 184, 94,,,, +7, 5, 3, 5, 2, 2, 2, 1, 128, 240, 184, 94,,,, +7, 5, 3, 5, 2, 2, 2, 1, 256, 240, 184, 94,,,, +7, 5, 3, 5, 2, 2, 2, 1, 384, 240, 184, 94,,,, +7, 5, 3, 5, 2, 2, 2, 2, 128, 240, 184, 94,,,, +7, 5, 3, 5, 2, 2, 2, 2, 256, 240, 184, 94,,,, +7, 5, 3, 5, 2, 2, 2, 2, 384, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 1, 1, 128, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 1, 1, 256, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 1, 1, 384, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 1, 2, 128, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 1, 2, 256, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 1, 2, 384, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 1, 3, 128, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 1, 3, 256, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 1, 3, 384, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 2, 1, 128, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 2, 1, 256, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 2, 1, 384, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 2, 2, 128, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 2, 2, 256, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 2, 2, 384, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 2, 3, 128, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 2, 3, 256, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 2, 3, 384, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 3, 1, 128, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 3, 1, 256, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 3, 1, 384, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 3, 2, 128, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 3, 2, 256, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 3, 2, 384, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 3, 3, 128, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 3, 3, 256, 240, 184, 94,,,, +7, 5, 3, 5, 3, 3, 3, 3, 384, 240, 184, 94,,,, +7, 5, 4, 1, 1, 1, 1, 1, 128, 58, 40, 22,,,, +7, 5, 4, 1, 1, 1, 1, 1, 256, 58, 40, 22,,,, +7, 5, 4, 1, 1, 1, 1, 1, 384, 58, 40, 22,,,, +7, 5, 4, 1, 2, 2, 1, 1, 128, 58, 40, 22,,,, +7, 5, 4, 1, 2, 2, 1, 1, 256, 58, 40, 22,,,, +7, 5, 4, 1, 2, 2, 1, 1, 384, 58, 40, 22,,,, +7, 5, 4, 1, 2, 2, 1, 2, 128, 58, 40, 22,,,, +7, 5, 4, 1, 2, 2, 1, 2, 256, 58, 40, 22,,,, +7, 5, 4, 1, 2, 2, 1, 2, 384, 58, 40, 22,,,, +7, 5, 4, 1, 2, 2, 2, 1, 128, 58, 40, 22,,,, +7, 5, 4, 1, 2, 2, 2, 1, 256, 58, 40, 22,,,, +7, 5, 4, 1, 2, 2, 2, 1, 384, 58, 40, 22,,,, +7, 5, 4, 1, 2, 2, 2, 2, 128, 58, 40, 22,,,, +7, 5, 4, 1, 2, 2, 2, 2, 256, 58, 40, 22,,,, +7, 5, 4, 1, 2, 2, 2, 2, 384, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 1, 1, 128, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 1, 1, 256, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 1, 1, 384, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 1, 2, 128, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 1, 2, 256, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 1, 2, 384, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 1, 3, 128, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 1, 3, 256, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 1, 3, 384, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 2, 1, 128, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 2, 1, 256, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 2, 1, 384, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 2, 2, 128, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 2, 2, 256, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 2, 2, 384, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 2, 3, 128, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 2, 3, 256, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 2, 3, 384, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 3, 1, 128, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 3, 1, 256, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 3, 1, 384, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 3, 2, 128, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 3, 2, 256, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 3, 2, 384, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 3, 3, 128, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 3, 3, 256, 58, 40, 22,,,, +7, 5, 4, 1, 3, 3, 3, 3, 384, 58, 40, 22,,,, +7, 5, 4, 2, 1, 1, 1, 1, 128, 112, 76, 40,,,, +7, 5, 4, 2, 1, 1, 1, 1, 256, 112, 76, 40,,,, +7, 5, 4, 2, 1, 1, 1, 1, 384, 112, 76, 40,,,, +7, 5, 4, 2, 2, 2, 1, 1, 128, 112, 76, 40,,,, +7, 5, 4, 2, 2, 2, 1, 1, 256, 112, 76, 40,,,, +7, 5, 4, 2, 2, 2, 1, 1, 384, 112, 76, 40,,,, +7, 5, 4, 2, 2, 2, 1, 2, 128, 112, 76, 40,,,, +7, 5, 4, 2, 2, 2, 1, 2, 256, 112, 76, 40,,,, +7, 5, 4, 2, 2, 2, 1, 2, 384, 112, 76, 40,,,, +7, 5, 4, 2, 2, 2, 2, 1, 128, 112, 76, 40,,,, +7, 5, 4, 2, 2, 2, 2, 1, 256, 112, 76, 40,,,, +7, 5, 4, 2, 2, 2, 2, 1, 384, 112, 76, 40,,,, +7, 5, 4, 2, 2, 2, 2, 2, 128, 112, 76, 40,,,, +7, 5, 4, 2, 2, 2, 2, 2, 256, 112, 76, 40,,,, +7, 5, 4, 2, 2, 2, 2, 2, 384, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 1, 1, 128, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 1, 1, 256, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 1, 1, 384, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 1, 2, 128, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 1, 2, 256, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 1, 2, 384, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 1, 3, 128, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 1, 3, 256, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 1, 3, 384, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 2, 1, 128, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 2, 1, 256, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 2, 1, 384, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 2, 2, 128, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 2, 2, 256, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 2, 2, 384, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 2, 3, 128, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 2, 3, 256, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 2, 3, 384, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 3, 1, 128, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 3, 1, 256, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 3, 1, 384, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 3, 2, 128, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 3, 2, 256, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 3, 2, 384, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 3, 3, 128, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 3, 3, 256, 112, 76, 40,,,, +7, 5, 4, 2, 3, 3, 3, 3, 384, 112, 76, 40,,,, +7, 5, 4, 3, 1, 1, 1, 1, 128, 166, 112, 58,,,, +7, 5, 4, 3, 1, 1, 1, 1, 256, 166, 112, 58,,,, +7, 5, 4, 3, 1, 1, 1, 1, 384, 166, 112, 58,,,, +7, 5, 4, 3, 2, 2, 1, 1, 128, 166, 112, 58,,,, +7, 5, 4, 3, 2, 2, 1, 1, 256, 166, 112, 58,,,, +7, 5, 4, 3, 2, 2, 1, 1, 384, 166, 112, 58,,,, +7, 5, 4, 3, 2, 2, 1, 2, 128, 166, 112, 58,,,, +7, 5, 4, 3, 2, 2, 1, 2, 256, 166, 112, 58,,,, +7, 5, 4, 3, 2, 2, 1, 2, 384, 166, 112, 58,,,, +7, 5, 4, 3, 2, 2, 2, 1, 128, 166, 112, 58,,,, +7, 5, 4, 3, 2, 2, 2, 1, 256, 166, 112, 58,,,, +7, 5, 4, 3, 2, 2, 2, 1, 384, 166, 112, 58,,,, +7, 5, 4, 3, 2, 2, 2, 2, 128, 166, 112, 58,,,, +7, 5, 4, 3, 2, 2, 2, 2, 256, 166, 112, 58,,,, +7, 5, 4, 3, 2, 2, 2, 2, 384, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 1, 1, 128, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 1, 1, 256, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 1, 1, 384, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 1, 2, 128, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 1, 2, 256, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 1, 2, 384, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 1, 3, 128, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 1, 3, 256, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 1, 3, 384, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 2, 1, 128, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 2, 1, 256, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 2, 1, 384, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 2, 2, 128, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 2, 2, 256, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 2, 2, 384, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 2, 3, 128, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 2, 3, 256, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 2, 3, 384, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 3, 1, 128, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 3, 1, 256, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 3, 1, 384, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 3, 2, 128, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 3, 2, 256, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 3, 2, 384, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 3, 3, 128, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 3, 3, 256, 166, 112, 58,,,, +7, 5, 4, 3, 3, 3, 3, 3, 384, 166, 112, 58,,,, +7, 5, 4, 4, 1, 1, 1, 1, 128, 220, 148, 76,,,, +7, 5, 4, 4, 1, 1, 1, 1, 256, 220, 148, 76,,,, +7, 5, 4, 4, 1, 1, 1, 1, 384, 220, 148, 76,,,, +7, 5, 4, 4, 2, 2, 1, 1, 128, 220, 148, 76,,,, +7, 5, 4, 4, 2, 2, 1, 1, 256, 220, 148, 76,,,, +7, 5, 4, 4, 2, 2, 1, 1, 384, 220, 148, 76,,,, +7, 5, 4, 4, 2, 2, 1, 2, 128, 220, 148, 76,,,, +7, 5, 4, 4, 2, 2, 1, 2, 256, 220, 148, 76,,,, +7, 5, 4, 4, 2, 2, 1, 2, 384, 220, 148, 76,,,, +7, 5, 4, 4, 2, 2, 2, 1, 128, 220, 148, 76,,,, +7, 5, 4, 4, 2, 2, 2, 1, 256, 220, 148, 76,,,, +7, 5, 4, 4, 2, 2, 2, 1, 384, 220, 148, 76,,,, +7, 5, 4, 4, 2, 2, 2, 2, 128, 220, 148, 76,,,, +7, 5, 4, 4, 2, 2, 2, 2, 256, 220, 148, 76,,,, +7, 5, 4, 4, 2, 2, 2, 2, 384, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 1, 1, 128, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 1, 1, 256, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 1, 1, 384, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 1, 2, 128, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 1, 2, 256, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 1, 2, 384, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 1, 3, 128, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 1, 3, 256, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 1, 3, 384, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 2, 1, 128, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 2, 1, 256, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 2, 1, 384, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 2, 2, 128, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 2, 2, 256, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 2, 2, 384, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 2, 3, 128, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 2, 3, 256, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 2, 3, 384, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 3, 1, 128, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 3, 1, 256, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 3, 1, 384, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 3, 2, 128, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 3, 2, 256, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 3, 2, 384, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 3, 3, 128, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 3, 3, 256, 220, 148, 76,,,, +7, 5, 4, 4, 3, 3, 3, 3, 384, 220, 148, 76,,,, +7, 5, 4, 5, 1, 1, 1, 1, 128, 240, 184, 94,,,, +7, 5, 4, 5, 1, 1, 1, 1, 256, 240, 184, 94,,,, +7, 5, 4, 5, 1, 1, 1, 1, 384, 240, 184, 94,,,, +7, 5, 4, 5, 2, 2, 1, 1, 128, 240, 184, 94,,,, +7, 5, 4, 5, 2, 2, 1, 1, 256, 240, 184, 94,,,, +7, 5, 4, 5, 2, 2, 1, 1, 384, 240, 184, 94,,,, +7, 5, 4, 5, 2, 2, 1, 2, 128, 240, 184, 94,,,, +7, 5, 4, 5, 2, 2, 1, 2, 256, 240, 184, 94,,,, +7, 5, 4, 5, 2, 2, 1, 2, 384, 240, 184, 94,,,, +7, 5, 4, 5, 2, 2, 2, 1, 128, 240, 184, 94,,,, +7, 5, 4, 5, 2, 2, 2, 1, 256, 240, 184, 94,,,, +7, 5, 4, 5, 2, 2, 2, 1, 384, 240, 184, 94,,,, +7, 5, 4, 5, 2, 2, 2, 2, 128, 240, 184, 94,,,, +7, 5, 4, 5, 2, 2, 2, 2, 256, 240, 184, 94,,,, +7, 5, 4, 5, 2, 2, 2, 2, 384, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 1, 1, 128, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 1, 1, 256, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 1, 1, 384, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 1, 2, 128, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 1, 2, 256, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 1, 2, 384, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 1, 3, 128, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 1, 3, 256, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 1, 3, 384, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 2, 1, 128, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 2, 1, 256, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 2, 1, 384, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 2, 2, 128, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 2, 2, 256, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 2, 2, 384, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 2, 3, 128, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 2, 3, 256, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 2, 3, 384, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 3, 1, 128, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 3, 1, 256, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 3, 1, 384, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 3, 2, 128, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 3, 2, 256, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 3, 2, 384, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 3, 3, 128, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 3, 3, 256, 240, 184, 94,,,, +7, 5, 4, 5, 3, 3, 3, 3, 384, 240, 184, 94,,,, +7, 5, 5, 1, 1, 1, 1, 1, 128, 58, 40, 22,,,, +7, 5, 5, 1, 1, 1, 1, 1, 256, 58, 40, 22,,,, +7, 5, 5, 1, 1, 1, 1, 1, 384, 58, 40, 22,,,, +7, 5, 5, 1, 2, 2, 1, 1, 128, 58, 40, 22,,,, +7, 5, 5, 1, 2, 2, 1, 1, 256, 58, 40, 22,,,, +7, 5, 5, 1, 2, 2, 1, 1, 384, 58, 40, 22,,,, +7, 5, 5, 1, 2, 2, 1, 2, 128, 58, 40, 22,,,, +7, 5, 5, 1, 2, 2, 1, 2, 256, 58, 40, 22,,,, +7, 5, 5, 1, 2, 2, 1, 2, 384, 58, 40, 22,,,, +7, 5, 5, 1, 2, 2, 2, 1, 128, 58, 40, 22,,,, +7, 5, 5, 1, 2, 2, 2, 1, 256, 58, 40, 22,,,, +7, 5, 5, 1, 2, 2, 2, 1, 384, 58, 40, 22,,,, +7, 5, 5, 1, 2, 2, 2, 2, 128, 58, 40, 22,,,, +7, 5, 5, 1, 2, 2, 2, 2, 256, 58, 40, 22,,,, +7, 5, 5, 1, 2, 2, 2, 2, 384, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 1, 1, 128, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 1, 1, 256, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 1, 1, 384, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 1, 2, 128, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 1, 2, 256, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 1, 2, 384, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 1, 3, 128, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 1, 3, 256, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 1, 3, 384, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 2, 1, 128, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 2, 1, 256, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 2, 1, 384, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 2, 2, 128, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 2, 2, 256, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 2, 2, 384, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 2, 3, 128, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 2, 3, 256, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 2, 3, 384, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 3, 1, 128, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 3, 1, 256, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 3, 1, 384, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 3, 2, 128, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 3, 2, 256, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 3, 2, 384, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 3, 3, 128, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 3, 3, 256, 58, 40, 22,,,, +7, 5, 5, 1, 3, 3, 3, 3, 384, 58, 40, 22,,,, +7, 5, 5, 2, 1, 1, 1, 1, 128, 112, 76, 40,,,, +7, 5, 5, 2, 1, 1, 1, 1, 256, 112, 76, 40,,,, +7, 5, 5, 2, 1, 1, 1, 1, 384, 112, 76, 40,,,, +7, 5, 5, 2, 2, 2, 1, 1, 128, 112, 76, 40,,,, +7, 5, 5, 2, 2, 2, 1, 1, 256, 112, 76, 40,,,, +7, 5, 5, 2, 2, 2, 1, 1, 384, 112, 76, 40,,,, +7, 5, 5, 2, 2, 2, 1, 2, 128, 112, 76, 40,,,, +7, 5, 5, 2, 2, 2, 1, 2, 256, 112, 76, 40,,,, +7, 5, 5, 2, 2, 2, 1, 2, 384, 112, 76, 40,,,, +7, 5, 5, 2, 2, 2, 2, 1, 128, 112, 76, 40,,,, +7, 5, 5, 2, 2, 2, 2, 1, 256, 112, 76, 40,,,, +7, 5, 5, 2, 2, 2, 2, 1, 384, 112, 76, 40,,,, +7, 5, 5, 2, 2, 2, 2, 2, 128, 112, 76, 40,,,, +7, 5, 5, 2, 2, 2, 2, 2, 256, 112, 76, 40,,,, +7, 5, 5, 2, 2, 2, 2, 2, 384, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 1, 1, 128, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 1, 1, 256, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 1, 1, 384, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 1, 2, 128, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 1, 2, 256, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 1, 2, 384, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 1, 3, 128, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 1, 3, 256, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 1, 3, 384, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 2, 1, 128, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 2, 1, 256, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 2, 1, 384, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 2, 2, 128, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 2, 2, 256, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 2, 2, 384, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 2, 3, 128, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 2, 3, 256, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 2, 3, 384, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 3, 1, 128, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 3, 1, 256, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 3, 1, 384, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 3, 2, 128, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 3, 2, 256, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 3, 2, 384, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 3, 3, 128, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 3, 3, 256, 112, 76, 40,,,, +7, 5, 5, 2, 3, 3, 3, 3, 384, 112, 76, 40,,,, +7, 5, 5, 3, 1, 1, 1, 1, 128, 166, 112, 58,,,, +7, 5, 5, 3, 1, 1, 1, 1, 256, 166, 112, 58,,,, +7, 5, 5, 3, 1, 1, 1, 1, 384, 166, 112, 58,,,, +7, 5, 5, 3, 2, 2, 1, 1, 128, 166, 112, 58,,,, +7, 5, 5, 3, 2, 2, 1, 1, 256, 166, 112, 58,,,, +7, 5, 5, 3, 2, 2, 1, 1, 384, 166, 112, 58,,,, +7, 5, 5, 3, 2, 2, 1, 2, 128, 166, 112, 58,,,, +7, 5, 5, 3, 2, 2, 1, 2, 256, 166, 112, 58,,,, +7, 5, 5, 3, 2, 2, 1, 2, 384, 166, 112, 58,,,, +7, 5, 5, 3, 2, 2, 2, 1, 128, 166, 112, 58,,,, +7, 5, 5, 3, 2, 2, 2, 1, 256, 166, 112, 58,,,, +7, 5, 5, 3, 2, 2, 2, 1, 384, 166, 112, 58,,,, +7, 5, 5, 3, 2, 2, 2, 2, 128, 166, 112, 58,,,, +7, 5, 5, 3, 2, 2, 2, 2, 256, 166, 112, 58,,,, +7, 5, 5, 3, 2, 2, 2, 2, 384, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 1, 1, 128, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 1, 1, 256, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 1, 1, 384, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 1, 2, 128, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 1, 2, 256, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 1, 2, 384, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 1, 3, 128, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 1, 3, 256, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 1, 3, 384, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 2, 1, 128, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 2, 1, 256, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 2, 1, 384, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 2, 2, 128, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 2, 2, 256, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 2, 2, 384, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 2, 3, 128, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 2, 3, 256, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 2, 3, 384, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 3, 1, 128, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 3, 1, 256, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 3, 1, 384, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 3, 2, 128, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 3, 2, 256, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 3, 2, 384, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 3, 3, 128, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 3, 3, 256, 166, 112, 58,,,, +7, 5, 5, 3, 3, 3, 3, 3, 384, 166, 112, 58,,,, +7, 5, 5, 4, 1, 1, 1, 1, 128, 220, 148, 76,,,, +7, 5, 5, 4, 1, 1, 1, 1, 256, 220, 148, 76,,,, +7, 5, 5, 4, 1, 1, 1, 1, 384, 220, 148, 76,,,, +7, 5, 5, 4, 2, 2, 1, 1, 128, 220, 148, 76,,,, +7, 5, 5, 4, 2, 2, 1, 1, 256, 220, 148, 76,,,, +7, 5, 5, 4, 2, 2, 1, 1, 384, 220, 148, 76,,,, +7, 5, 5, 4, 2, 2, 1, 2, 128, 220, 148, 76,,,, +7, 5, 5, 4, 2, 2, 1, 2, 256, 220, 148, 76,,,, +7, 5, 5, 4, 2, 2, 1, 2, 384, 220, 148, 76,,,, +7, 5, 5, 4, 2, 2, 2, 1, 128, 220, 148, 76,,,, +7, 5, 5, 4, 2, 2, 2, 1, 256, 220, 148, 76,,,, +7, 5, 5, 4, 2, 2, 2, 1, 384, 220, 148, 76,,,, +7, 5, 5, 4, 2, 2, 2, 2, 128, 220, 148, 76,,,, +7, 5, 5, 4, 2, 2, 2, 2, 256, 220, 148, 76,,,, +7, 5, 5, 4, 2, 2, 2, 2, 384, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 1, 1, 128, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 1, 1, 256, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 1, 1, 384, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 1, 2, 128, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 1, 2, 256, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 1, 2, 384, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 1, 3, 128, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 1, 3, 256, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 1, 3, 384, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 2, 1, 128, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 2, 1, 256, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 2, 1, 384, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 2, 2, 128, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 2, 2, 256, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 2, 2, 384, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 2, 3, 128, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 2, 3, 256, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 2, 3, 384, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 3, 1, 128, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 3, 1, 256, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 3, 1, 384, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 3, 2, 128, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 3, 2, 256, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 3, 2, 384, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 3, 3, 128, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 3, 3, 256, 220, 148, 76,,,, +7, 5, 5, 4, 3, 3, 3, 3, 384, 220, 148, 76,,,, +7, 5, 5, 5, 1, 1, 1, 1, 128, 240, 184, 94,,,, +7, 5, 5, 5, 1, 1, 1, 1, 256, 240, 184, 94,,,, +7, 5, 5, 5, 1, 1, 1, 1, 384, 240, 184, 94,,,, +7, 5, 5, 5, 2, 2, 1, 1, 128, 240, 184, 94,,,, +7, 5, 5, 5, 2, 2, 1, 1, 256, 240, 184, 94,,,, +7, 5, 5, 5, 2, 2, 1, 1, 384, 240, 184, 94,,,, +7, 5, 5, 5, 2, 2, 1, 2, 128, 240, 184, 94,,,, +7, 5, 5, 5, 2, 2, 1, 2, 256, 240, 184, 94,,,, +7, 5, 5, 5, 2, 2, 1, 2, 384, 240, 184, 94,,,, +7, 5, 5, 5, 2, 2, 2, 1, 128, 240, 184, 94,,,, +7, 5, 5, 5, 2, 2, 2, 1, 256, 240, 184, 94,,,, +7, 5, 5, 5, 2, 2, 2, 1, 384, 240, 184, 94,,,, +7, 5, 5, 5, 2, 2, 2, 2, 128, 240, 184, 94,,,, +7, 5, 5, 5, 2, 2, 2, 2, 256, 240, 184, 94,,,, +7, 5, 5, 5, 2, 2, 2, 2, 384, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 1, 1, 128, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 1, 1, 256, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 1, 1, 384, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 1, 2, 128, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 1, 2, 256, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 1, 2, 384, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 1, 3, 128, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 1, 3, 256, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 1, 3, 384, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 2, 1, 128, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 2, 1, 256, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 2, 1, 384, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 2, 2, 128, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 2, 2, 256, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 2, 2, 384, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 2, 3, 128, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 2, 3, 256, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 2, 3, 384, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 3, 1, 128, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 3, 1, 256, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 3, 1, 384, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 3, 2, 128, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 3, 2, 256, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 3, 2, 384, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 3, 3, 128, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 3, 3, 256, 240, 184, 94,,,, +7, 5, 5, 5, 3, 3, 3, 3, 384, 240, 184, 94,,,, +7, 5, 6, 1, 1, 1, 1, 1, 128, 58, 40, 22,,,, +7, 5, 6, 1, 1, 1, 1, 1, 256, 58, 40, 22,,,, +7, 5, 6, 1, 1, 1, 1, 1, 384, 58, 40, 22,,,, +7, 5, 6, 1, 2, 2, 1, 1, 128, 58, 40, 22,,,, +7, 5, 6, 1, 2, 2, 1, 1, 256, 58, 40, 22,,,, +7, 5, 6, 1, 2, 2, 1, 1, 384, 58, 40, 22,,,, +7, 5, 6, 1, 2, 2, 1, 2, 128, 58, 40, 22,,,, +7, 5, 6, 1, 2, 2, 1, 2, 256, 58, 40, 22,,,, +7, 5, 6, 1, 2, 2, 1, 2, 384, 58, 40, 22,,,, +7, 5, 6, 1, 2, 2, 2, 1, 128, 58, 40, 22,,,, +7, 5, 6, 1, 2, 2, 2, 1, 256, 58, 40, 22,,,, +7, 5, 6, 1, 2, 2, 2, 1, 384, 58, 40, 22,,,, +7, 5, 6, 1, 2, 2, 2, 2, 128, 58, 40, 22,,,, +7, 5, 6, 1, 2, 2, 2, 2, 256, 58, 40, 22,,,, +7, 5, 6, 1, 2, 2, 2, 2, 384, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 1, 1, 128, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 1, 1, 256, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 1, 1, 384, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 1, 2, 128, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 1, 2, 256, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 1, 2, 384, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 1, 3, 128, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 1, 3, 256, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 1, 3, 384, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 2, 1, 128, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 2, 1, 256, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 2, 1, 384, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 2, 2, 128, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 2, 2, 256, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 2, 2, 384, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 2, 3, 128, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 2, 3, 256, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 2, 3, 384, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 3, 1, 128, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 3, 1, 256, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 3, 1, 384, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 3, 2, 128, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 3, 2, 256, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 3, 2, 384, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 3, 3, 128, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 3, 3, 256, 58, 40, 22,,,, +7, 5, 6, 1, 3, 3, 3, 3, 384, 58, 40, 22,,,, +7, 5, 6, 2, 1, 1, 1, 1, 128, 112, 76, 40,,,, +7, 5, 6, 2, 1, 1, 1, 1, 256, 112, 76, 40,,,, +7, 5, 6, 2, 1, 1, 1, 1, 384, 112, 76, 40,,,, +7, 5, 6, 2, 2, 2, 1, 1, 128, 112, 76, 40,,,, +7, 5, 6, 2, 2, 2, 1, 1, 256, 112, 76, 40,,,, +7, 5, 6, 2, 2, 2, 1, 1, 384, 112, 76, 40,,,, +7, 5, 6, 2, 2, 2, 1, 2, 128, 112, 76, 40,,,, +7, 5, 6, 2, 2, 2, 1, 2, 256, 112, 76, 40,,,, +7, 5, 6, 2, 2, 2, 1, 2, 384, 112, 76, 40,,,, +7, 5, 6, 2, 2, 2, 2, 1, 128, 112, 76, 40,,,, +7, 5, 6, 2, 2, 2, 2, 1, 256, 112, 76, 40,,,, +7, 5, 6, 2, 2, 2, 2, 1, 384, 112, 76, 40,,,, +7, 5, 6, 2, 2, 2, 2, 2, 128, 112, 76, 40,,,, +7, 5, 6, 2, 2, 2, 2, 2, 256, 112, 76, 40,,,, +7, 5, 6, 2, 2, 2, 2, 2, 384, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 1, 1, 128, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 1, 1, 256, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 1, 1, 384, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 1, 2, 128, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 1, 2, 256, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 1, 2, 384, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 1, 3, 128, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 1, 3, 256, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 1, 3, 384, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 2, 1, 128, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 2, 1, 256, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 2, 1, 384, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 2, 2, 128, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 2, 2, 256, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 2, 2, 384, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 2, 3, 128, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 2, 3, 256, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 2, 3, 384, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 3, 1, 128, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 3, 1, 256, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 3, 1, 384, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 3, 2, 128, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 3, 2, 256, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 3, 2, 384, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 3, 3, 128, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 3, 3, 256, 112, 76, 40,,,, +7, 5, 6, 2, 3, 3, 3, 3, 384, 112, 76, 40,,,, +7, 5, 6, 3, 1, 1, 1, 1, 128, 166, 112, 58,,,, +7, 5, 6, 3, 1, 1, 1, 1, 256, 166, 112, 58,,,, +7, 5, 6, 3, 1, 1, 1, 1, 384, 166, 112, 58,,,, +7, 5, 6, 3, 2, 2, 1, 1, 128, 166, 112, 58,,,, +7, 5, 6, 3, 2, 2, 1, 1, 256, 166, 112, 58,,,, +7, 5, 6, 3, 2, 2, 1, 1, 384, 166, 112, 58,,,, +7, 5, 6, 3, 2, 2, 1, 2, 128, 166, 112, 58,,,, +7, 5, 6, 3, 2, 2, 1, 2, 256, 166, 112, 58,,,, +7, 5, 6, 3, 2, 2, 1, 2, 384, 166, 112, 58,,,, +7, 5, 6, 3, 2, 2, 2, 1, 128, 166, 112, 58,,,, +7, 5, 6, 3, 2, 2, 2, 1, 256, 166, 112, 58,,,, +7, 5, 6, 3, 2, 2, 2, 1, 384, 166, 112, 58,,,, +7, 5, 6, 3, 2, 2, 2, 2, 128, 166, 112, 58,,,, +7, 5, 6, 3, 2, 2, 2, 2, 256, 166, 112, 58,,,, +7, 5, 6, 3, 2, 2, 2, 2, 384, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 1, 1, 128, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 1, 1, 256, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 1, 1, 384, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 1, 2, 128, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 1, 2, 256, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 1, 2, 384, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 1, 3, 128, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 1, 3, 256, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 1, 3, 384, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 2, 1, 128, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 2, 1, 256, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 2, 1, 384, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 2, 2, 128, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 2, 2, 256, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 2, 2, 384, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 2, 3, 128, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 2, 3, 256, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 2, 3, 384, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 3, 1, 128, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 3, 1, 256, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 3, 1, 384, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 3, 2, 128, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 3, 2, 256, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 3, 2, 384, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 3, 3, 128, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 3, 3, 256, 166, 112, 58,,,, +7, 5, 6, 3, 3, 3, 3, 3, 384, 166, 112, 58,,,, +7, 5, 6, 4, 1, 1, 1, 1, 128, 220, 148, 76,,,, +7, 5, 6, 4, 1, 1, 1, 1, 256, 220, 148, 76,,,, +7, 5, 6, 4, 1, 1, 1, 1, 384, 220, 148, 76,,,, +7, 5, 6, 4, 2, 2, 1, 1, 128, 220, 148, 76,,,, +7, 5, 6, 4, 2, 2, 1, 1, 256, 220, 148, 76,,,, +7, 5, 6, 4, 2, 2, 1, 1, 384, 220, 148, 76,,,, +7, 5, 6, 4, 2, 2, 1, 2, 128, 220, 148, 76,,,, +7, 5, 6, 4, 2, 2, 1, 2, 256, 220, 148, 76,,,, +7, 5, 6, 4, 2, 2, 1, 2, 384, 220, 148, 76,,,, +7, 5, 6, 4, 2, 2, 2, 1, 128, 220, 148, 76,,,, +7, 5, 6, 4, 2, 2, 2, 1, 256, 220, 148, 76,,,, +7, 5, 6, 4, 2, 2, 2, 1, 384, 220, 148, 76,,,, +7, 5, 6, 4, 2, 2, 2, 2, 128, 220, 148, 76,,,, +7, 5, 6, 4, 2, 2, 2, 2, 256, 220, 148, 76,,,, +7, 5, 6, 4, 2, 2, 2, 2, 384, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 1, 1, 128, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 1, 1, 256, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 1, 1, 384, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 1, 2, 128, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 1, 2, 256, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 1, 2, 384, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 1, 3, 128, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 1, 3, 256, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 1, 3, 384, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 2, 1, 128, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 2, 1, 256, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 2, 1, 384, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 2, 2, 128, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 2, 2, 256, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 2, 2, 384, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 2, 3, 128, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 2, 3, 256, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 2, 3, 384, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 3, 1, 128, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 3, 1, 256, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 3, 1, 384, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 3, 2, 128, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 3, 2, 256, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 3, 2, 384, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 3, 3, 128, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 3, 3, 256, 220, 148, 76,,,, +7, 5, 6, 4, 3, 3, 3, 3, 384, 220, 148, 76,,,, +7, 5, 6, 5, 1, 1, 1, 1, 128, 240, 184, 94,,,, +7, 5, 6, 5, 1, 1, 1, 1, 256, 240, 184, 94,,,, +7, 5, 6, 5, 1, 1, 1, 1, 384, 240, 184, 94,,,, +7, 5, 6, 5, 2, 2, 1, 1, 128, 240, 184, 94,,,, +7, 5, 6, 5, 2, 2, 1, 1, 256, 240, 184, 94,,,, +7, 5, 6, 5, 2, 2, 1, 1, 384, 240, 184, 94,,,, +7, 5, 6, 5, 2, 2, 1, 2, 128, 240, 184, 94,,,, +7, 5, 6, 5, 2, 2, 1, 2, 256, 240, 184, 94,,,, +7, 5, 6, 5, 2, 2, 1, 2, 384, 240, 184, 94,,,, +7, 5, 6, 5, 2, 2, 2, 1, 128, 240, 184, 94,,,, +7, 5, 6, 5, 2, 2, 2, 1, 256, 240, 184, 94,,,, +7, 5, 6, 5, 2, 2, 2, 1, 384, 240, 184, 94,,,, +7, 5, 6, 5, 2, 2, 2, 2, 128, 240, 184, 94,,,, +7, 5, 6, 5, 2, 2, 2, 2, 256, 240, 184, 94,,,, +7, 5, 6, 5, 2, 2, 2, 2, 384, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 1, 1, 128, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 1, 1, 256, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 1, 1, 384, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 1, 2, 128, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 1, 2, 256, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 1, 2, 384, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 1, 3, 128, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 1, 3, 256, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 1, 3, 384, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 2, 1, 128, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 2, 1, 256, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 2, 1, 384, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 2, 2, 128, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 2, 2, 256, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 2, 2, 384, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 2, 3, 128, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 2, 3, 256, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 2, 3, 384, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 3, 1, 128, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 3, 1, 256, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 3, 1, 384, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 3, 2, 128, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 3, 2, 256, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 3, 2, 384, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 3, 3, 128, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 3, 3, 256, 240, 184, 94,,,, +7, 5, 6, 5, 3, 3, 3, 3, 384, 240, 184, 94,,,, +7, 5, 7, 1, 1, 1, 1, 1, 128, 58, 40, 22,,,, +7, 5, 7, 1, 1, 1, 1, 1, 256, 58, 40, 22,,,, +7, 5, 7, 1, 1, 1, 1, 1, 384, 58, 40, 22,,,, +7, 5, 7, 1, 2, 2, 1, 1, 128, 58, 40, 22,,,, +7, 5, 7, 1, 2, 2, 1, 1, 256, 58, 40, 22,,,, +7, 5, 7, 1, 2, 2, 1, 1, 384, 58, 40, 22,,,, +7, 5, 7, 1, 2, 2, 1, 2, 128, 58, 40, 22,,,, +7, 5, 7, 1, 2, 2, 1, 2, 256, 58, 40, 22,,,, +7, 5, 7, 1, 2, 2, 1, 2, 384, 58, 40, 22,,,, +7, 5, 7, 1, 2, 2, 2, 1, 128, 58, 40, 22,,,, +7, 5, 7, 1, 2, 2, 2, 1, 256, 58, 40, 22,,,, +7, 5, 7, 1, 2, 2, 2, 1, 384, 58, 40, 22,,,, +7, 5, 7, 1, 2, 2, 2, 2, 128, 58, 40, 22,,,, +7, 5, 7, 1, 2, 2, 2, 2, 256, 58, 40, 22,,,, +7, 5, 7, 1, 2, 2, 2, 2, 384, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 1, 1, 128, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 1, 1, 256, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 1, 1, 384, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 1, 2, 128, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 1, 2, 256, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 1, 2, 384, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 1, 3, 128, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 1, 3, 256, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 1, 3, 384, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 2, 1, 128, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 2, 1, 256, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 2, 1, 384, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 2, 2, 128, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 2, 2, 256, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 2, 2, 384, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 2, 3, 128, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 2, 3, 256, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 2, 3, 384, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 3, 1, 128, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 3, 1, 256, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 3, 1, 384, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 3, 2, 128, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 3, 2, 256, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 3, 2, 384, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 3, 3, 128, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 3, 3, 256, 58, 40, 22,,,, +7, 5, 7, 1, 3, 3, 3, 3, 384, 58, 40, 22,,,, +7, 5, 7, 2, 1, 1, 1, 1, 128, 112, 76, 40,,,, +7, 5, 7, 2, 1, 1, 1, 1, 256, 112, 76, 40,,,, +7, 5, 7, 2, 1, 1, 1, 1, 384, 112, 76, 40,,,, +7, 5, 7, 2, 2, 2, 1, 1, 128, 112, 76, 40,,,, +7, 5, 7, 2, 2, 2, 1, 1, 256, 112, 76, 40,,,, +7, 5, 7, 2, 2, 2, 1, 1, 384, 112, 76, 40,,,, +7, 5, 7, 2, 2, 2, 1, 2, 128, 112, 76, 40,,,, +7, 5, 7, 2, 2, 2, 1, 2, 256, 112, 76, 40,,,, +7, 5, 7, 2, 2, 2, 1, 2, 384, 112, 76, 40,,,, +7, 5, 7, 2, 2, 2, 2, 1, 128, 112, 76, 40,,,, +7, 5, 7, 2, 2, 2, 2, 1, 256, 112, 76, 40,,,, +7, 5, 7, 2, 2, 2, 2, 1, 384, 112, 76, 40,,,, +7, 5, 7, 2, 2, 2, 2, 2, 128, 112, 76, 40,,,, +7, 5, 7, 2, 2, 2, 2, 2, 256, 112, 76, 40,,,, +7, 5, 7, 2, 2, 2, 2, 2, 384, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 1, 1, 128, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 1, 1, 256, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 1, 1, 384, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 1, 2, 128, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 1, 2, 256, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 1, 2, 384, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 1, 3, 128, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 1, 3, 256, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 1, 3, 384, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 2, 1, 128, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 2, 1, 256, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 2, 1, 384, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 2, 2, 128, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 2, 2, 256, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 2, 2, 384, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 2, 3, 128, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 2, 3, 256, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 2, 3, 384, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 3, 1, 128, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 3, 1, 256, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 3, 1, 384, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 3, 2, 128, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 3, 2, 256, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 3, 2, 384, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 3, 3, 128, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 3, 3, 256, 112, 76, 40,,,, +7, 5, 7, 2, 3, 3, 3, 3, 384, 112, 76, 40,,,, +7, 5, 7, 3, 1, 1, 1, 1, 128, 166, 112, 58,,,, +7, 5, 7, 3, 1, 1, 1, 1, 256, 166, 112, 58,,,, +7, 5, 7, 3, 1, 1, 1, 1, 384, 166, 112, 58,,,, +7, 5, 7, 3, 2, 2, 1, 1, 128, 166, 112, 58,,,, +7, 5, 7, 3, 2, 2, 1, 1, 256, 166, 112, 58,,,, +7, 5, 7, 3, 2, 2, 1, 1, 384, 166, 112, 58,,,, +7, 5, 7, 3, 2, 2, 1, 2, 128, 166, 112, 58,,,, +7, 5, 7, 3, 2, 2, 1, 2, 256, 166, 112, 58,,,, +7, 5, 7, 3, 2, 2, 1, 2, 384, 166, 112, 58,,,, +7, 5, 7, 3, 2, 2, 2, 1, 128, 166, 112, 58,,,, +7, 5, 7, 3, 2, 2, 2, 1, 256, 166, 112, 58,,,, +7, 5, 7, 3, 2, 2, 2, 1, 384, 166, 112, 58,,,, +7, 5, 7, 3, 2, 2, 2, 2, 128, 166, 112, 58,,,, +7, 5, 7, 3, 2, 2, 2, 2, 256, 166, 112, 58,,,, +7, 5, 7, 3, 2, 2, 2, 2, 384, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 1, 1, 128, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 1, 1, 256, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 1, 1, 384, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 1, 2, 128, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 1, 2, 256, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 1, 2, 384, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 1, 3, 128, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 1, 3, 256, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 1, 3, 384, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 2, 1, 128, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 2, 1, 256, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 2, 1, 384, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 2, 2, 128, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 2, 2, 256, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 2, 2, 384, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 2, 3, 128, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 2, 3, 256, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 2, 3, 384, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 3, 1, 128, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 3, 1, 256, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 3, 1, 384, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 3, 2, 128, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 3, 2, 256, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 3, 2, 384, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 3, 3, 128, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 3, 3, 256, 166, 112, 58,,,, +7, 5, 7, 3, 3, 3, 3, 3, 384, 166, 112, 58,,,, +7, 5, 7, 4, 1, 1, 1, 1, 128, 220, 148, 76,,,, +7, 5, 7, 4, 1, 1, 1, 1, 256, 220, 148, 76,,,, +7, 5, 7, 4, 1, 1, 1, 1, 384, 220, 148, 76,,,, +7, 5, 7, 4, 2, 2, 1, 1, 128, 220, 148, 76,,,, +7, 5, 7, 4, 2, 2, 1, 1, 256, 220, 148, 76,,,, +7, 5, 7, 4, 2, 2, 1, 1, 384, 220, 148, 76,,,, +7, 5, 7, 4, 2, 2, 1, 2, 128, 220, 148, 76,,,, +7, 5, 7, 4, 2, 2, 1, 2, 256, 220, 148, 76,,,, +7, 5, 7, 4, 2, 2, 1, 2, 384, 220, 148, 76,,,, +7, 5, 7, 4, 2, 2, 2, 1, 128, 220, 148, 76,,,, +7, 5, 7, 4, 2, 2, 2, 1, 256, 220, 148, 76,,,, +7, 5, 7, 4, 2, 2, 2, 1, 384, 220, 148, 76,,,, +7, 5, 7, 4, 2, 2, 2, 2, 128, 220, 148, 76,,,, +7, 5, 7, 4, 2, 2, 2, 2, 256, 220, 148, 76,,,, +7, 5, 7, 4, 2, 2, 2, 2, 384, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 1, 1, 128, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 1, 1, 256, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 1, 1, 384, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 1, 2, 128, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 1, 2, 256, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 1, 2, 384, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 1, 3, 128, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 1, 3, 256, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 1, 3, 384, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 2, 1, 128, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 2, 1, 256, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 2, 1, 384, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 2, 2, 128, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 2, 2, 256, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 2, 2, 384, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 2, 3, 128, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 2, 3, 256, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 2, 3, 384, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 3, 1, 128, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 3, 1, 256, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 3, 1, 384, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 3, 2, 128, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 3, 2, 256, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 3, 2, 384, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 3, 3, 128, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 3, 3, 256, 220, 148, 76,,,, +7, 5, 7, 4, 3, 3, 3, 3, 384, 220, 148, 76,,,, +7, 5, 7, 5, 1, 1, 1, 1, 128, 240, 184, 94,,,, +7, 5, 7, 5, 1, 1, 1, 1, 256, 240, 184, 94,,,, +7, 5, 7, 5, 1, 1, 1, 1, 384, 240, 184, 94,,,, +7, 5, 7, 5, 2, 2, 1, 1, 128, 240, 184, 94,,,, +7, 5, 7, 5, 2, 2, 1, 1, 256, 240, 184, 94,,,, +7, 5, 7, 5, 2, 2, 1, 1, 384, 240, 184, 94,,,, +7, 5, 7, 5, 2, 2, 1, 2, 128, 240, 184, 94,,,, +7, 5, 7, 5, 2, 2, 1, 2, 256, 240, 184, 94,,,, +7, 5, 7, 5, 2, 2, 1, 2, 384, 240, 184, 94,,,, +7, 5, 7, 5, 2, 2, 2, 1, 128, 240, 184, 94,,,, +7, 5, 7, 5, 2, 2, 2, 1, 256, 240, 184, 94,,,, +7, 5, 7, 5, 2, 2, 2, 1, 384, 240, 184, 94,,,, +7, 5, 7, 5, 2, 2, 2, 2, 128, 240, 184, 94,,,, +7, 5, 7, 5, 2, 2, 2, 2, 256, 240, 184, 94,,,, +7, 5, 7, 5, 2, 2, 2, 2, 384, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 1, 1, 128, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 1, 1, 256, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 1, 1, 384, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 1, 2, 128, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 1, 2, 256, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 1, 2, 384, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 1, 3, 128, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 1, 3, 256, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 1, 3, 384, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 2, 1, 128, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 2, 1, 256, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 2, 1, 384, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 2, 2, 128, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 2, 2, 256, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 2, 2, 384, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 2, 3, 128, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 2, 3, 256, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 2, 3, 384, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 3, 1, 128, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 3, 1, 256, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 3, 1, 384, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 3, 2, 128, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 3, 2, 256, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 3, 2, 384, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 3, 3, 128, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 3, 3, 256, 240, 184, 94,,,, +7, 5, 7, 5, 3, 3, 3, 3, 384, 240, 184, 94,,,, +7, 6, 1, 1, 1, 1, 1, 1, 128, 59, 41, 23,,,, +7, 6, 1, 1, 1, 1, 1, 1, 256, 59, 41, 23,,,, +7, 6, 1, 1, 1, 1, 1, 1, 384, 59, 41, 23,,,, +7, 6, 1, 1, 2, 2, 1, 1, 128, 59, 41, 23,,,, +7, 6, 1, 1, 2, 2, 1, 1, 256, 59, 41, 23,,,, +7, 6, 1, 1, 2, 2, 1, 1, 384, 59, 41, 23,,,, +7, 6, 1, 1, 2, 2, 1, 2, 128, 59, 41, 23,,,, +7, 6, 1, 1, 2, 2, 1, 2, 256, 59, 41, 23,,,, +7, 6, 1, 1, 2, 2, 1, 2, 384, 59, 41, 23,,,, +7, 6, 1, 1, 2, 2, 2, 1, 128, 59, 41, 23,,,, +7, 6, 1, 1, 2, 2, 2, 1, 256, 59, 41, 23,,,, +7, 6, 1, 1, 2, 2, 2, 1, 384, 59, 41, 23,,,, +7, 6, 1, 1, 2, 2, 2, 2, 128, 59, 41, 23,,,, +7, 6, 1, 1, 2, 2, 2, 2, 256, 59, 41, 23,,,, +7, 6, 1, 1, 2, 2, 2, 2, 384, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 1, 1, 128, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 1, 1, 256, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 1, 1, 384, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 1, 2, 128, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 1, 2, 256, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 1, 2, 384, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 1, 3, 128, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 1, 3, 256, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 1, 3, 384, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 2, 1, 128, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 2, 1, 256, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 2, 1, 384, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 2, 2, 128, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 2, 2, 256, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 2, 2, 384, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 2, 3, 128, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 2, 3, 256, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 2, 3, 384, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 3, 1, 128, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 3, 1, 256, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 3, 1, 384, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 3, 2, 128, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 3, 2, 256, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 3, 2, 384, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 3, 3, 128, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 3, 3, 256, 59, 41, 23,,,, +7, 6, 1, 1, 3, 3, 3, 3, 384, 59, 41, 23,,,, +7, 6, 1, 2, 1, 1, 1, 1, 128, 113, 77, 41,,,, +7, 6, 1, 2, 1, 1, 1, 1, 256, 113, 77, 41,,,, +7, 6, 1, 2, 1, 1, 1, 1, 384, 113, 77, 41,,,, +7, 6, 1, 2, 2, 2, 1, 1, 128, 113, 77, 41,,,, +7, 6, 1, 2, 2, 2, 1, 1, 256, 113, 77, 41,,,, +7, 6, 1, 2, 2, 2, 1, 1, 384, 113, 77, 41,,,, +7, 6, 1, 2, 2, 2, 1, 2, 128, 113, 77, 41,,,, +7, 6, 1, 2, 2, 2, 1, 2, 256, 113, 77, 41,,,, +7, 6, 1, 2, 2, 2, 1, 2, 384, 113, 77, 41,,,, +7, 6, 1, 2, 2, 2, 2, 1, 128, 113, 77, 41,,,, +7, 6, 1, 2, 2, 2, 2, 1, 256, 113, 77, 41,,,, +7, 6, 1, 2, 2, 2, 2, 1, 384, 113, 77, 41,,,, +7, 6, 1, 2, 2, 2, 2, 2, 128, 113, 77, 41,,,, +7, 6, 1, 2, 2, 2, 2, 2, 256, 113, 77, 41,,,, +7, 6, 1, 2, 2, 2, 2, 2, 384, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 1, 1, 128, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 1, 1, 256, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 1, 1, 384, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 1, 2, 128, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 1, 2, 256, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 1, 2, 384, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 1, 3, 128, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 1, 3, 256, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 1, 3, 384, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 2, 1, 128, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 2, 1, 256, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 2, 1, 384, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 2, 2, 128, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 2, 2, 256, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 2, 2, 384, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 2, 3, 128, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 2, 3, 256, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 2, 3, 384, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 3, 1, 128, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 3, 1, 256, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 3, 1, 384, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 3, 2, 128, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 3, 2, 256, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 3, 2, 384, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 3, 3, 128, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 3, 3, 256, 113, 77, 41,,,, +7, 6, 1, 2, 3, 3, 3, 3, 384, 113, 77, 41,,,, +7, 6, 1, 3, 1, 1, 1, 1, 128, 167, 113, 59,,,, +7, 6, 1, 3, 1, 1, 1, 1, 256, 167, 113, 59,,,, +7, 6, 1, 3, 1, 1, 1, 1, 384, 167, 113, 59,,,, +7, 6, 1, 3, 2, 2, 1, 1, 128, 167, 113, 59,,,, +7, 6, 1, 3, 2, 2, 1, 1, 256, 167, 113, 59,,,, +7, 6, 1, 3, 2, 2, 1, 1, 384, 167, 113, 59,,,, +7, 6, 1, 3, 2, 2, 1, 2, 128, 167, 113, 59,,,, +7, 6, 1, 3, 2, 2, 1, 2, 256, 167, 113, 59,,,, +7, 6, 1, 3, 2, 2, 1, 2, 384, 167, 113, 59,,,, +7, 6, 1, 3, 2, 2, 2, 1, 128, 167, 113, 59,,,, +7, 6, 1, 3, 2, 2, 2, 1, 256, 167, 113, 59,,,, +7, 6, 1, 3, 2, 2, 2, 1, 384, 167, 113, 59,,,, +7, 6, 1, 3, 2, 2, 2, 2, 128, 167, 113, 59,,,, +7, 6, 1, 3, 2, 2, 2, 2, 256, 167, 113, 59,,,, +7, 6, 1, 3, 2, 2, 2, 2, 384, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 1, 1, 128, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 1, 1, 256, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 1, 1, 384, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 1, 2, 128, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 1, 2, 256, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 1, 2, 384, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 1, 3, 128, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 1, 3, 256, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 1, 3, 384, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 2, 1, 128, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 2, 1, 256, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 2, 1, 384, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 2, 2, 128, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 2, 2, 256, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 2, 2, 384, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 2, 3, 128, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 2, 3, 256, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 2, 3, 384, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 3, 1, 128, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 3, 1, 256, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 3, 1, 384, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 3, 2, 128, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 3, 2, 256, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 3, 2, 384, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 3, 3, 128, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 3, 3, 256, 167, 113, 59,,,, +7, 6, 1, 3, 3, 3, 3, 3, 384, 167, 113, 59,,,, +7, 6, 1, 4, 1, 1, 1, 1, 128, 221, 149, 77,,,, +7, 6, 1, 4, 1, 1, 1, 1, 256, 221, 149, 77,,,, +7, 6, 1, 4, 1, 1, 1, 1, 384, 221, 149, 77,,,, +7, 6, 1, 4, 2, 2, 1, 1, 128, 221, 149, 77,,,, +7, 6, 1, 4, 2, 2, 1, 1, 256, 221, 149, 77,,,, +7, 6, 1, 4, 2, 2, 1, 1, 384, 221, 149, 77,,,, +7, 6, 1, 4, 2, 2, 1, 2, 128, 221, 149, 77,,,, +7, 6, 1, 4, 2, 2, 1, 2, 256, 221, 149, 77,,,, +7, 6, 1, 4, 2, 2, 1, 2, 384, 221, 149, 77,,,, +7, 6, 1, 4, 2, 2, 2, 1, 128, 221, 149, 77,,,, +7, 6, 1, 4, 2, 2, 2, 1, 256, 221, 149, 77,,,, +7, 6, 1, 4, 2, 2, 2, 1, 384, 221, 149, 77,,,, +7, 6, 1, 4, 2, 2, 2, 2, 128, 221, 149, 77,,,, +7, 6, 1, 4, 2, 2, 2, 2, 256, 221, 149, 77,,,, +7, 6, 1, 4, 2, 2, 2, 2, 384, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 1, 1, 128, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 1, 1, 256, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 1, 1, 384, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 1, 2, 128, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 1, 2, 256, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 1, 2, 384, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 1, 3, 128, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 1, 3, 256, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 1, 3, 384, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 2, 1, 128, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 2, 1, 256, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 2, 1, 384, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 2, 2, 128, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 2, 2, 256, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 2, 2, 384, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 2, 3, 128, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 2, 3, 256, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 2, 3, 384, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 3, 1, 128, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 3, 1, 256, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 3, 1, 384, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 3, 2, 128, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 3, 2, 256, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 3, 2, 384, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 3, 3, 128, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 3, 3, 256, 221, 149, 77,,,, +7, 6, 1, 4, 3, 3, 3, 3, 384, 221, 149, 77,,,, +7, 6, 1, 5, 1, 1, 1, 1, 128, 240, 185, 95,,,, +7, 6, 1, 5, 1, 1, 1, 1, 256, 240, 185, 95,,,, +7, 6, 1, 5, 1, 1, 1, 1, 384, 240, 185, 95,,,, +7, 6, 1, 5, 2, 2, 1, 1, 128, 240, 185, 95,,,, +7, 6, 1, 5, 2, 2, 1, 1, 256, 240, 185, 95,,,, +7, 6, 1, 5, 2, 2, 1, 1, 384, 240, 185, 95,,,, +7, 6, 1, 5, 2, 2, 1, 2, 128, 240, 185, 95,,,, +7, 6, 1, 5, 2, 2, 1, 2, 256, 240, 185, 95,,,, +7, 6, 1, 5, 2, 2, 1, 2, 384, 240, 185, 95,,,, +7, 6, 1, 5, 2, 2, 2, 1, 128, 240, 185, 95,,,, +7, 6, 1, 5, 2, 2, 2, 1, 256, 240, 185, 95,,,, +7, 6, 1, 5, 2, 2, 2, 1, 384, 240, 185, 95,,,, +7, 6, 1, 5, 2, 2, 2, 2, 128, 240, 185, 95,,,, +7, 6, 1, 5, 2, 2, 2, 2, 256, 240, 185, 95,,,, +7, 6, 1, 5, 2, 2, 2, 2, 384, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 1, 1, 128, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 1, 1, 256, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 1, 1, 384, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 1, 2, 128, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 1, 2, 256, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 1, 2, 384, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 1, 3, 128, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 1, 3, 256, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 1, 3, 384, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 2, 1, 128, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 2, 1, 256, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 2, 1, 384, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 2, 2, 128, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 2, 2, 256, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 2, 2, 384, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 2, 3, 128, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 2, 3, 256, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 2, 3, 384, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 3, 1, 128, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 3, 1, 256, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 3, 1, 384, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 3, 2, 128, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 3, 2, 256, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 3, 2, 384, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 3, 3, 128, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 3, 3, 256, 240, 185, 95,,,, +7, 6, 1, 5, 3, 3, 3, 3, 384, 240, 185, 95,,,, +7, 6, 1, 6, 1, 1, 1, 1, 128, 240, 221, 113,,,, +7, 6, 1, 6, 1, 1, 1, 1, 256, 240, 221, 113,,,, +7, 6, 1, 6, 1, 1, 1, 1, 384, 240, 221, 113,,,, +7, 6, 1, 6, 2, 2, 1, 1, 128, 240, 221, 113,,,, +7, 6, 1, 6, 2, 2, 1, 1, 256, 240, 221, 113,,,, +7, 6, 1, 6, 2, 2, 1, 1, 384, 240, 221, 113,,,, +7, 6, 1, 6, 2, 2, 1, 2, 128, 240, 221, 113,,,, +7, 6, 1, 6, 2, 2, 1, 2, 256, 240, 221, 113,,,, +7, 6, 1, 6, 2, 2, 1, 2, 384, 240, 221, 113,,,, +7, 6, 1, 6, 2, 2, 2, 1, 128, 240, 221, 113,,,, +7, 6, 1, 6, 2, 2, 2, 1, 256, 240, 221, 113,,,, +7, 6, 1, 6, 2, 2, 2, 1, 384, 240, 221, 113,,,, +7, 6, 1, 6, 2, 2, 2, 2, 128, 240, 221, 113,,,, +7, 6, 1, 6, 2, 2, 2, 2, 256, 240, 221, 113,,,, +7, 6, 1, 6, 2, 2, 2, 2, 384, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 1, 1, 128, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 1, 1, 256, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 1, 1, 384, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 1, 2, 128, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 1, 2, 256, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 1, 2, 384, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 1, 3, 128, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 1, 3, 256, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 1, 3, 384, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 2, 1, 128, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 2, 1, 256, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 2, 1, 384, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 2, 2, 128, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 2, 2, 256, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 2, 2, 384, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 2, 3, 128, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 2, 3, 256, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 2, 3, 384, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 3, 1, 128, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 3, 1, 256, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 3, 1, 384, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 3, 2, 128, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 3, 2, 256, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 3, 2, 384, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 3, 3, 128, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 3, 3, 256, 240, 221, 113,,,, +7, 6, 1, 6, 3, 3, 3, 3, 384, 240, 221, 113,,,, +7, 6, 2, 1, 1, 1, 1, 1, 128, 59, 41, 23,,,, +7, 6, 2, 1, 1, 1, 1, 1, 256, 59, 41, 23,,,, +7, 6, 2, 1, 1, 1, 1, 1, 384, 59, 41, 23,,,, +7, 6, 2, 1, 2, 2, 1, 1, 128, 59, 41, 23,,,, +7, 6, 2, 1, 2, 2, 1, 1, 256, 59, 41, 23,,,, +7, 6, 2, 1, 2, 2, 1, 1, 384, 59, 41, 23,,,, +7, 6, 2, 1, 2, 2, 1, 2, 128, 59, 41, 23,,,, +7, 6, 2, 1, 2, 2, 1, 2, 256, 59, 41, 23,,,, +7, 6, 2, 1, 2, 2, 1, 2, 384, 59, 41, 23,,,, +7, 6, 2, 1, 2, 2, 2, 1, 128, 59, 41, 23,,,, +7, 6, 2, 1, 2, 2, 2, 1, 256, 59, 41, 23,,,, +7, 6, 2, 1, 2, 2, 2, 1, 384, 59, 41, 23,,,, +7, 6, 2, 1, 2, 2, 2, 2, 128, 59, 41, 23,,,, +7, 6, 2, 1, 2, 2, 2, 2, 256, 59, 41, 23,,,, +7, 6, 2, 1, 2, 2, 2, 2, 384, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 1, 1, 128, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 1, 1, 256, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 1, 1, 384, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 1, 2, 128, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 1, 2, 256, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 1, 2, 384, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 1, 3, 128, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 1, 3, 256, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 1, 3, 384, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 2, 1, 128, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 2, 1, 256, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 2, 1, 384, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 2, 2, 128, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 2, 2, 256, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 2, 2, 384, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 2, 3, 128, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 2, 3, 256, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 2, 3, 384, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 3, 1, 128, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 3, 1, 256, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 3, 1, 384, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 3, 2, 128, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 3, 2, 256, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 3, 2, 384, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 3, 3, 128, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 3, 3, 256, 59, 41, 23,,,, +7, 6, 2, 1, 3, 3, 3, 3, 384, 59, 41, 23,,,, +7, 6, 2, 2, 1, 1, 1, 1, 128, 113, 77, 41,,,, +7, 6, 2, 2, 1, 1, 1, 1, 256, 113, 77, 41,,,, +7, 6, 2, 2, 1, 1, 1, 1, 384, 113, 77, 41,,,, +7, 6, 2, 2, 2, 2, 1, 1, 128, 113, 77, 41,,,, +7, 6, 2, 2, 2, 2, 1, 1, 256, 113, 77, 41,,,, +7, 6, 2, 2, 2, 2, 1, 1, 384, 113, 77, 41,,,, +7, 6, 2, 2, 2, 2, 1, 2, 128, 113, 77, 41,,,, +7, 6, 2, 2, 2, 2, 1, 2, 256, 113, 77, 41,,,, +7, 6, 2, 2, 2, 2, 1, 2, 384, 113, 77, 41,,,, +7, 6, 2, 2, 2, 2, 2, 1, 128, 113, 77, 41,,,, +7, 6, 2, 2, 2, 2, 2, 1, 256, 113, 77, 41,,,, +7, 6, 2, 2, 2, 2, 2, 1, 384, 113, 77, 41,,,, +7, 6, 2, 2, 2, 2, 2, 2, 128, 113, 77, 41,,,, +7, 6, 2, 2, 2, 2, 2, 2, 256, 113, 77, 41,,,, +7, 6, 2, 2, 2, 2, 2, 2, 384, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 1, 1, 128, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 1, 1, 256, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 1, 1, 384, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 1, 2, 128, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 1, 2, 256, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 1, 2, 384, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 1, 3, 128, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 1, 3, 256, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 1, 3, 384, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 2, 1, 128, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 2, 1, 256, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 2, 1, 384, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 2, 2, 128, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 2, 2, 256, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 2, 2, 384, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 2, 3, 128, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 2, 3, 256, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 2, 3, 384, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 3, 1, 128, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 3, 1, 256, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 3, 1, 384, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 3, 2, 128, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 3, 2, 256, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 3, 2, 384, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 3, 3, 128, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 3, 3, 256, 113, 77, 41,,,, +7, 6, 2, 2, 3, 3, 3, 3, 384, 113, 77, 41,,,, +7, 6, 2, 3, 1, 1, 1, 1, 128, 167, 113, 59,,,, +7, 6, 2, 3, 1, 1, 1, 1, 256, 167, 113, 59,,,, +7, 6, 2, 3, 1, 1, 1, 1, 384, 167, 113, 59,,,, +7, 6, 2, 3, 2, 2, 1, 1, 128, 167, 113, 59,,,, +7, 6, 2, 3, 2, 2, 1, 1, 256, 167, 113, 59,,,, +7, 6, 2, 3, 2, 2, 1, 1, 384, 167, 113, 59,,,, +7, 6, 2, 3, 2, 2, 1, 2, 128, 167, 113, 59,,,, +7, 6, 2, 3, 2, 2, 1, 2, 256, 167, 113, 59,,,, +7, 6, 2, 3, 2, 2, 1, 2, 384, 167, 113, 59,,,, +7, 6, 2, 3, 2, 2, 2, 1, 128, 167, 113, 59,,,, +7, 6, 2, 3, 2, 2, 2, 1, 256, 167, 113, 59,,,, +7, 6, 2, 3, 2, 2, 2, 1, 384, 167, 113, 59,,,, +7, 6, 2, 3, 2, 2, 2, 2, 128, 167, 113, 59,,,, +7, 6, 2, 3, 2, 2, 2, 2, 256, 167, 113, 59,,,, +7, 6, 2, 3, 2, 2, 2, 2, 384, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 1, 1, 128, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 1, 1, 256, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 1, 1, 384, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 1, 2, 128, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 1, 2, 256, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 1, 2, 384, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 1, 3, 128, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 1, 3, 256, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 1, 3, 384, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 2, 1, 128, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 2, 1, 256, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 2, 1, 384, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 2, 2, 128, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 2, 2, 256, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 2, 2, 384, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 2, 3, 128, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 2, 3, 256, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 2, 3, 384, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 3, 1, 128, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 3, 1, 256, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 3, 1, 384, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 3, 2, 128, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 3, 2, 256, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 3, 2, 384, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 3, 3, 128, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 3, 3, 256, 167, 113, 59,,,, +7, 6, 2, 3, 3, 3, 3, 3, 384, 167, 113, 59,,,, +7, 6, 2, 4, 1, 1, 1, 1, 128, 221, 149, 77,,,, +7, 6, 2, 4, 1, 1, 1, 1, 256, 221, 149, 77,,,, +7, 6, 2, 4, 1, 1, 1, 1, 384, 221, 149, 77,,,, +7, 6, 2, 4, 2, 2, 1, 1, 128, 221, 149, 77,,,, +7, 6, 2, 4, 2, 2, 1, 1, 256, 221, 149, 77,,,, +7, 6, 2, 4, 2, 2, 1, 1, 384, 221, 149, 77,,,, +7, 6, 2, 4, 2, 2, 1, 2, 128, 221, 149, 77,,,, +7, 6, 2, 4, 2, 2, 1, 2, 256, 221, 149, 77,,,, +7, 6, 2, 4, 2, 2, 1, 2, 384, 221, 149, 77,,,, +7, 6, 2, 4, 2, 2, 2, 1, 128, 221, 149, 77,,,, +7, 6, 2, 4, 2, 2, 2, 1, 256, 221, 149, 77,,,, +7, 6, 2, 4, 2, 2, 2, 1, 384, 221, 149, 77,,,, +7, 6, 2, 4, 2, 2, 2, 2, 128, 221, 149, 77,,,, +7, 6, 2, 4, 2, 2, 2, 2, 256, 221, 149, 77,,,, +7, 6, 2, 4, 2, 2, 2, 2, 384, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 1, 1, 128, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 1, 1, 256, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 1, 1, 384, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 1, 2, 128, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 1, 2, 256, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 1, 2, 384, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 1, 3, 128, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 1, 3, 256, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 1, 3, 384, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 2, 1, 128, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 2, 1, 256, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 2, 1, 384, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 2, 2, 128, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 2, 2, 256, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 2, 2, 384, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 2, 3, 128, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 2, 3, 256, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 2, 3, 384, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 3, 1, 128, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 3, 1, 256, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 3, 1, 384, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 3, 2, 128, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 3, 2, 256, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 3, 2, 384, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 3, 3, 128, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 3, 3, 256, 221, 149, 77,,,, +7, 6, 2, 4, 3, 3, 3, 3, 384, 221, 149, 77,,,, +7, 6, 2, 5, 1, 1, 1, 1, 128, 240, 185, 95,,,, +7, 6, 2, 5, 1, 1, 1, 1, 256, 240, 185, 95,,,, +7, 6, 2, 5, 1, 1, 1, 1, 384, 240, 185, 95,,,, +7, 6, 2, 5, 2, 2, 1, 1, 128, 240, 185, 95,,,, +7, 6, 2, 5, 2, 2, 1, 1, 256, 240, 185, 95,,,, +7, 6, 2, 5, 2, 2, 1, 1, 384, 240, 185, 95,,,, +7, 6, 2, 5, 2, 2, 1, 2, 128, 240, 185, 95,,,, +7, 6, 2, 5, 2, 2, 1, 2, 256, 240, 185, 95,,,, +7, 6, 2, 5, 2, 2, 1, 2, 384, 240, 185, 95,,,, +7, 6, 2, 5, 2, 2, 2, 1, 128, 240, 185, 95,,,, +7, 6, 2, 5, 2, 2, 2, 1, 256, 240, 185, 95,,,, +7, 6, 2, 5, 2, 2, 2, 1, 384, 240, 185, 95,,,, +7, 6, 2, 5, 2, 2, 2, 2, 128, 240, 185, 95,,,, +7, 6, 2, 5, 2, 2, 2, 2, 256, 240, 185, 95,,,, +7, 6, 2, 5, 2, 2, 2, 2, 384, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 1, 1, 128, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 1, 1, 256, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 1, 1, 384, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 1, 2, 128, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 1, 2, 256, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 1, 2, 384, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 1, 3, 128, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 1, 3, 256, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 1, 3, 384, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 2, 1, 128, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 2, 1, 256, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 2, 1, 384, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 2, 2, 128, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 2, 2, 256, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 2, 2, 384, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 2, 3, 128, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 2, 3, 256, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 2, 3, 384, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 3, 1, 128, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 3, 1, 256, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 3, 1, 384, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 3, 2, 128, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 3, 2, 256, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 3, 2, 384, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 3, 3, 128, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 3, 3, 256, 240, 185, 95,,,, +7, 6, 2, 5, 3, 3, 3, 3, 384, 240, 185, 95,,,, +7, 6, 2, 6, 1, 1, 1, 1, 128, 240, 221, 113,,,, +7, 6, 2, 6, 1, 1, 1, 1, 256, 240, 221, 113,,,, +7, 6, 2, 6, 1, 1, 1, 1, 384, 240, 221, 113,,,, +7, 6, 2, 6, 2, 2, 1, 1, 128, 240, 221, 113,,,, +7, 6, 2, 6, 2, 2, 1, 1, 256, 240, 221, 113,,,, +7, 6, 2, 6, 2, 2, 1, 1, 384, 240, 221, 113,,,, +7, 6, 2, 6, 2, 2, 1, 2, 128, 240, 221, 113,,,, +7, 6, 2, 6, 2, 2, 1, 2, 256, 240, 221, 113,,,, +7, 6, 2, 6, 2, 2, 1, 2, 384, 240, 221, 113,,,, +7, 6, 2, 6, 2, 2, 2, 1, 128, 240, 221, 113,,,, +7, 6, 2, 6, 2, 2, 2, 1, 256, 240, 221, 113,,,, +7, 6, 2, 6, 2, 2, 2, 1, 384, 240, 221, 113,,,, +7, 6, 2, 6, 2, 2, 2, 2, 128, 240, 221, 113,,,, +7, 6, 2, 6, 2, 2, 2, 2, 256, 240, 221, 113,,,, +7, 6, 2, 6, 2, 2, 2, 2, 384, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 1, 1, 128, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 1, 1, 256, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 1, 1, 384, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 1, 2, 128, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 1, 2, 256, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 1, 2, 384, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 1, 3, 128, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 1, 3, 256, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 1, 3, 384, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 2, 1, 128, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 2, 1, 256, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 2, 1, 384, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 2, 2, 128, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 2, 2, 256, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 2, 2, 384, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 2, 3, 128, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 2, 3, 256, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 2, 3, 384, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 3, 1, 128, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 3, 1, 256, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 3, 1, 384, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 3, 2, 128, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 3, 2, 256, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 3, 2, 384, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 3, 3, 128, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 3, 3, 256, 240, 221, 113,,,, +7, 6, 2, 6, 3, 3, 3, 3, 384, 240, 221, 113,,,, +7, 6, 3, 1, 1, 1, 1, 1, 128, 59, 41, 23,,,, +7, 6, 3, 1, 1, 1, 1, 1, 256, 59, 41, 23,,,, +7, 6, 3, 1, 1, 1, 1, 1, 384, 59, 41, 23,,,, +7, 6, 3, 1, 2, 2, 1, 1, 128, 59, 41, 23,,,, +7, 6, 3, 1, 2, 2, 1, 1, 256, 59, 41, 23,,,, +7, 6, 3, 1, 2, 2, 1, 1, 384, 59, 41, 23,,,, +7, 6, 3, 1, 2, 2, 1, 2, 128, 59, 41, 23,,,, +7, 6, 3, 1, 2, 2, 1, 2, 256, 59, 41, 23,,,, +7, 6, 3, 1, 2, 2, 1, 2, 384, 59, 41, 23,,,, +7, 6, 3, 1, 2, 2, 2, 1, 128, 59, 41, 23,,,, +7, 6, 3, 1, 2, 2, 2, 1, 256, 59, 41, 23,,,, +7, 6, 3, 1, 2, 2, 2, 1, 384, 59, 41, 23,,,, +7, 6, 3, 1, 2, 2, 2, 2, 128, 59, 41, 23,,,, +7, 6, 3, 1, 2, 2, 2, 2, 256, 59, 41, 23,,,, +7, 6, 3, 1, 2, 2, 2, 2, 384, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 1, 1, 128, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 1, 1, 256, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 1, 1, 384, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 1, 2, 128, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 1, 2, 256, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 1, 2, 384, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 1, 3, 128, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 1, 3, 256, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 1, 3, 384, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 2, 1, 128, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 2, 1, 256, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 2, 1, 384, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 2, 2, 128, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 2, 2, 256, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 2, 2, 384, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 2, 3, 128, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 2, 3, 256, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 2, 3, 384, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 3, 1, 128, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 3, 1, 256, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 3, 1, 384, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 3, 2, 128, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 3, 2, 256, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 3, 2, 384, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 3, 3, 128, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 3, 3, 256, 59, 41, 23,,,, +7, 6, 3, 1, 3, 3, 3, 3, 384, 59, 41, 23,,,, +7, 6, 3, 2, 1, 1, 1, 1, 128, 113, 77, 41,,,, +7, 6, 3, 2, 1, 1, 1, 1, 256, 113, 77, 41,,,, +7, 6, 3, 2, 1, 1, 1, 1, 384, 113, 77, 41,,,, +7, 6, 3, 2, 2, 2, 1, 1, 128, 113, 77, 41,,,, +7, 6, 3, 2, 2, 2, 1, 1, 256, 113, 77, 41,,,, +7, 6, 3, 2, 2, 2, 1, 1, 384, 113, 77, 41,,,, +7, 6, 3, 2, 2, 2, 1, 2, 128, 113, 77, 41,,,, +7, 6, 3, 2, 2, 2, 1, 2, 256, 113, 77, 41,,,, +7, 6, 3, 2, 2, 2, 1, 2, 384, 113, 77, 41,,,, +7, 6, 3, 2, 2, 2, 2, 1, 128, 113, 77, 41,,,, +7, 6, 3, 2, 2, 2, 2, 1, 256, 113, 77, 41,,,, +7, 6, 3, 2, 2, 2, 2, 1, 384, 113, 77, 41,,,, +7, 6, 3, 2, 2, 2, 2, 2, 128, 113, 77, 41,,,, +7, 6, 3, 2, 2, 2, 2, 2, 256, 113, 77, 41,,,, +7, 6, 3, 2, 2, 2, 2, 2, 384, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 1, 1, 128, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 1, 1, 256, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 1, 1, 384, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 1, 2, 128, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 1, 2, 256, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 1, 2, 384, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 1, 3, 128, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 1, 3, 256, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 1, 3, 384, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 2, 1, 128, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 2, 1, 256, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 2, 1, 384, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 2, 2, 128, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 2, 2, 256, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 2, 2, 384, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 2, 3, 128, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 2, 3, 256, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 2, 3, 384, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 3, 1, 128, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 3, 1, 256, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 3, 1, 384, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 3, 2, 128, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 3, 2, 256, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 3, 2, 384, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 3, 3, 128, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 3, 3, 256, 113, 77, 41,,,, +7, 6, 3, 2, 3, 3, 3, 3, 384, 113, 77, 41,,,, +7, 6, 3, 3, 1, 1, 1, 1, 128, 167, 113, 59,,,, +7, 6, 3, 3, 1, 1, 1, 1, 256, 167, 113, 59,,,, +7, 6, 3, 3, 1, 1, 1, 1, 384, 167, 113, 59,,,, +7, 6, 3, 3, 2, 2, 1, 1, 128, 167, 113, 59,,,, +7, 6, 3, 3, 2, 2, 1, 1, 256, 167, 113, 59,,,, +7, 6, 3, 3, 2, 2, 1, 1, 384, 167, 113, 59,,,, +7, 6, 3, 3, 2, 2, 1, 2, 128, 167, 113, 59,,,, +7, 6, 3, 3, 2, 2, 1, 2, 256, 167, 113, 59,,,, +7, 6, 3, 3, 2, 2, 1, 2, 384, 167, 113, 59,,,, +7, 6, 3, 3, 2, 2, 2, 1, 128, 167, 113, 59,,,, +7, 6, 3, 3, 2, 2, 2, 1, 256, 167, 113, 59,,,, +7, 6, 3, 3, 2, 2, 2, 1, 384, 167, 113, 59,,,, +7, 6, 3, 3, 2, 2, 2, 2, 128, 167, 113, 59,,,, +7, 6, 3, 3, 2, 2, 2, 2, 256, 167, 113, 59,,,, +7, 6, 3, 3, 2, 2, 2, 2, 384, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 1, 1, 128, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 1, 1, 256, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 1, 1, 384, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 1, 2, 128, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 1, 2, 256, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 1, 2, 384, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 1, 3, 128, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 1, 3, 256, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 1, 3, 384, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 2, 1, 128, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 2, 1, 256, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 2, 1, 384, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 2, 2, 128, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 2, 2, 256, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 2, 2, 384, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 2, 3, 128, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 2, 3, 256, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 2, 3, 384, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 3, 1, 128, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 3, 1, 256, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 3, 1, 384, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 3, 2, 128, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 3, 2, 256, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 3, 2, 384, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 3, 3, 128, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 3, 3, 256, 167, 113, 59,,,, +7, 6, 3, 3, 3, 3, 3, 3, 384, 167, 113, 59,,,, +7, 6, 3, 4, 1, 1, 1, 1, 128, 221, 149, 77,,,, +7, 6, 3, 4, 1, 1, 1, 1, 256, 221, 149, 77,,,, +7, 6, 3, 4, 1, 1, 1, 1, 384, 221, 149, 77,,,, +7, 6, 3, 4, 2, 2, 1, 1, 128, 221, 149, 77,,,, +7, 6, 3, 4, 2, 2, 1, 1, 256, 221, 149, 77,,,, +7, 6, 3, 4, 2, 2, 1, 1, 384, 221, 149, 77,,,, +7, 6, 3, 4, 2, 2, 1, 2, 128, 221, 149, 77,,,, +7, 6, 3, 4, 2, 2, 1, 2, 256, 221, 149, 77,,,, +7, 6, 3, 4, 2, 2, 1, 2, 384, 221, 149, 77,,,, +7, 6, 3, 4, 2, 2, 2, 1, 128, 221, 149, 77,,,, +7, 6, 3, 4, 2, 2, 2, 1, 256, 221, 149, 77,,,, +7, 6, 3, 4, 2, 2, 2, 1, 384, 221, 149, 77,,,, +7, 6, 3, 4, 2, 2, 2, 2, 128, 221, 149, 77,,,, +7, 6, 3, 4, 2, 2, 2, 2, 256, 221, 149, 77,,,, +7, 6, 3, 4, 2, 2, 2, 2, 384, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 1, 1, 128, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 1, 1, 256, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 1, 1, 384, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 1, 2, 128, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 1, 2, 256, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 1, 2, 384, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 1, 3, 128, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 1, 3, 256, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 1, 3, 384, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 2, 1, 128, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 2, 1, 256, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 2, 1, 384, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 2, 2, 128, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 2, 2, 256, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 2, 2, 384, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 2, 3, 128, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 2, 3, 256, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 2, 3, 384, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 3, 1, 128, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 3, 1, 256, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 3, 1, 384, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 3, 2, 128, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 3, 2, 256, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 3, 2, 384, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 3, 3, 128, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 3, 3, 256, 221, 149, 77,,,, +7, 6, 3, 4, 3, 3, 3, 3, 384, 221, 149, 77,,,, +7, 6, 3, 5, 1, 1, 1, 1, 128, 240, 185, 95,,,, +7, 6, 3, 5, 1, 1, 1, 1, 256, 240, 185, 95,,,, +7, 6, 3, 5, 1, 1, 1, 1, 384, 240, 185, 95,,,, +7, 6, 3, 5, 2, 2, 1, 1, 128, 240, 185, 95,,,, +7, 6, 3, 5, 2, 2, 1, 1, 256, 240, 185, 95,,,, +7, 6, 3, 5, 2, 2, 1, 1, 384, 240, 185, 95,,,, +7, 6, 3, 5, 2, 2, 1, 2, 128, 240, 185, 95,,,, +7, 6, 3, 5, 2, 2, 1, 2, 256, 240, 185, 95,,,, +7, 6, 3, 5, 2, 2, 1, 2, 384, 240, 185, 95,,,, +7, 6, 3, 5, 2, 2, 2, 1, 128, 240, 185, 95,,,, +7, 6, 3, 5, 2, 2, 2, 1, 256, 240, 185, 95,,,, +7, 6, 3, 5, 2, 2, 2, 1, 384, 240, 185, 95,,,, +7, 6, 3, 5, 2, 2, 2, 2, 128, 240, 185, 95,,,, +7, 6, 3, 5, 2, 2, 2, 2, 256, 240, 185, 95,,,, +7, 6, 3, 5, 2, 2, 2, 2, 384, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 1, 1, 128, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 1, 1, 256, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 1, 1, 384, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 1, 2, 128, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 1, 2, 256, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 1, 2, 384, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 1, 3, 128, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 1, 3, 256, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 1, 3, 384, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 2, 1, 128, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 2, 1, 256, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 2, 1, 384, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 2, 2, 128, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 2, 2, 256, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 2, 2, 384, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 2, 3, 128, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 2, 3, 256, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 2, 3, 384, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 3, 1, 128, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 3, 1, 256, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 3, 1, 384, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 3, 2, 128, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 3, 2, 256, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 3, 2, 384, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 3, 3, 128, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 3, 3, 256, 240, 185, 95,,,, +7, 6, 3, 5, 3, 3, 3, 3, 384, 240, 185, 95,,,, +7, 6, 3, 6, 1, 1, 1, 1, 128, 240, 221, 113,,,, +7, 6, 3, 6, 1, 1, 1, 1, 256, 240, 221, 113,,,, +7, 6, 3, 6, 1, 1, 1, 1, 384, 240, 221, 113,,,, +7, 6, 3, 6, 2, 2, 1, 1, 128, 240, 221, 113,,,, +7, 6, 3, 6, 2, 2, 1, 1, 256, 240, 221, 113,,,, +7, 6, 3, 6, 2, 2, 1, 1, 384, 240, 221, 113,,,, +7, 6, 3, 6, 2, 2, 1, 2, 128, 240, 221, 113,,,, +7, 6, 3, 6, 2, 2, 1, 2, 256, 240, 221, 113,,,, +7, 6, 3, 6, 2, 2, 1, 2, 384, 240, 221, 113,,,, +7, 6, 3, 6, 2, 2, 2, 1, 128, 240, 221, 113,,,, +7, 6, 3, 6, 2, 2, 2, 1, 256, 240, 221, 113,,,, +7, 6, 3, 6, 2, 2, 2, 1, 384, 240, 221, 113,,,, +7, 6, 3, 6, 2, 2, 2, 2, 128, 240, 221, 113,,,, +7, 6, 3, 6, 2, 2, 2, 2, 256, 240, 221, 113,,,, +7, 6, 3, 6, 2, 2, 2, 2, 384, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 1, 1, 128, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 1, 1, 256, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 1, 1, 384, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 1, 2, 128, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 1, 2, 256, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 1, 2, 384, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 1, 3, 128, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 1, 3, 256, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 1, 3, 384, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 2, 1, 128, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 2, 1, 256, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 2, 1, 384, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 2, 2, 128, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 2, 2, 256, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 2, 2, 384, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 2, 3, 128, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 2, 3, 256, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 2, 3, 384, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 3, 1, 128, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 3, 1, 256, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 3, 1, 384, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 3, 2, 128, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 3, 2, 256, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 3, 2, 384, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 3, 3, 128, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 3, 3, 256, 240, 221, 113,,,, +7, 6, 3, 6, 3, 3, 3, 3, 384, 240, 221, 113,,,, +7, 6, 4, 1, 1, 1, 1, 1, 128, 59, 41, 23,,,, +7, 6, 4, 1, 1, 1, 1, 1, 256, 59, 41, 23,,,, +7, 6, 4, 1, 1, 1, 1, 1, 384, 59, 41, 23,,,, +7, 6, 4, 1, 2, 2, 1, 1, 128, 59, 41, 23,,,, +7, 6, 4, 1, 2, 2, 1, 1, 256, 59, 41, 23,,,, +7, 6, 4, 1, 2, 2, 1, 1, 384, 59, 41, 23,,,, +7, 6, 4, 1, 2, 2, 1, 2, 128, 59, 41, 23,,,, +7, 6, 4, 1, 2, 2, 1, 2, 256, 59, 41, 23,,,, +7, 6, 4, 1, 2, 2, 1, 2, 384, 59, 41, 23,,,, +7, 6, 4, 1, 2, 2, 2, 1, 128, 59, 41, 23,,,, +7, 6, 4, 1, 2, 2, 2, 1, 256, 59, 41, 23,,,, +7, 6, 4, 1, 2, 2, 2, 1, 384, 59, 41, 23,,,, +7, 6, 4, 1, 2, 2, 2, 2, 128, 59, 41, 23,,,, +7, 6, 4, 1, 2, 2, 2, 2, 256, 59, 41, 23,,,, +7, 6, 4, 1, 2, 2, 2, 2, 384, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 1, 1, 128, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 1, 1, 256, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 1, 1, 384, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 1, 2, 128, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 1, 2, 256, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 1, 2, 384, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 1, 3, 128, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 1, 3, 256, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 1, 3, 384, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 2, 1, 128, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 2, 1, 256, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 2, 1, 384, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 2, 2, 128, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 2, 2, 256, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 2, 2, 384, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 2, 3, 128, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 2, 3, 256, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 2, 3, 384, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 3, 1, 128, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 3, 1, 256, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 3, 1, 384, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 3, 2, 128, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 3, 2, 256, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 3, 2, 384, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 3, 3, 128, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 3, 3, 256, 59, 41, 23,,,, +7, 6, 4, 1, 3, 3, 3, 3, 384, 59, 41, 23,,,, +7, 6, 4, 2, 1, 1, 1, 1, 128, 113, 77, 41,,,, +7, 6, 4, 2, 1, 1, 1, 1, 256, 113, 77, 41,,,, +7, 6, 4, 2, 1, 1, 1, 1, 384, 113, 77, 41,,,, +7, 6, 4, 2, 2, 2, 1, 1, 128, 113, 77, 41,,,, +7, 6, 4, 2, 2, 2, 1, 1, 256, 113, 77, 41,,,, +7, 6, 4, 2, 2, 2, 1, 1, 384, 113, 77, 41,,,, +7, 6, 4, 2, 2, 2, 1, 2, 128, 113, 77, 41,,,, +7, 6, 4, 2, 2, 2, 1, 2, 256, 113, 77, 41,,,, +7, 6, 4, 2, 2, 2, 1, 2, 384, 113, 77, 41,,,, +7, 6, 4, 2, 2, 2, 2, 1, 128, 113, 77, 41,,,, +7, 6, 4, 2, 2, 2, 2, 1, 256, 113, 77, 41,,,, +7, 6, 4, 2, 2, 2, 2, 1, 384, 113, 77, 41,,,, +7, 6, 4, 2, 2, 2, 2, 2, 128, 113, 77, 41,,,, +7, 6, 4, 2, 2, 2, 2, 2, 256, 113, 77, 41,,,, +7, 6, 4, 2, 2, 2, 2, 2, 384, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 1, 1, 128, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 1, 1, 256, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 1, 1, 384, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 1, 2, 128, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 1, 2, 256, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 1, 2, 384, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 1, 3, 128, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 1, 3, 256, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 1, 3, 384, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 2, 1, 128, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 2, 1, 256, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 2, 1, 384, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 2, 2, 128, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 2, 2, 256, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 2, 2, 384, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 2, 3, 128, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 2, 3, 256, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 2, 3, 384, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 3, 1, 128, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 3, 1, 256, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 3, 1, 384, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 3, 2, 128, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 3, 2, 256, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 3, 2, 384, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 3, 3, 128, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 3, 3, 256, 113, 77, 41,,,, +7, 6, 4, 2, 3, 3, 3, 3, 384, 113, 77, 41,,,, +7, 6, 4, 3, 1, 1, 1, 1, 128, 167, 113, 59,,,, +7, 6, 4, 3, 1, 1, 1, 1, 256, 167, 113, 59,,,, +7, 6, 4, 3, 1, 1, 1, 1, 384, 167, 113, 59,,,, +7, 6, 4, 3, 2, 2, 1, 1, 128, 167, 113, 59,,,, +7, 6, 4, 3, 2, 2, 1, 1, 256, 167, 113, 59,,,, +7, 6, 4, 3, 2, 2, 1, 1, 384, 167, 113, 59,,,, +7, 6, 4, 3, 2, 2, 1, 2, 128, 167, 113, 59,,,, +7, 6, 4, 3, 2, 2, 1, 2, 256, 167, 113, 59,,,, +7, 6, 4, 3, 2, 2, 1, 2, 384, 167, 113, 59,,,, +7, 6, 4, 3, 2, 2, 2, 1, 128, 167, 113, 59,,,, +7, 6, 4, 3, 2, 2, 2, 1, 256, 167, 113, 59,,,, +7, 6, 4, 3, 2, 2, 2, 1, 384, 167, 113, 59,,,, +7, 6, 4, 3, 2, 2, 2, 2, 128, 167, 113, 59,,,, +7, 6, 4, 3, 2, 2, 2, 2, 256, 167, 113, 59,,,, +7, 6, 4, 3, 2, 2, 2, 2, 384, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 1, 1, 128, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 1, 1, 256, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 1, 1, 384, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 1, 2, 128, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 1, 2, 256, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 1, 2, 384, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 1, 3, 128, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 1, 3, 256, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 1, 3, 384, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 2, 1, 128, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 2, 1, 256, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 2, 1, 384, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 2, 2, 128, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 2, 2, 256, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 2, 2, 384, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 2, 3, 128, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 2, 3, 256, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 2, 3, 384, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 3, 1, 128, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 3, 1, 256, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 3, 1, 384, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 3, 2, 128, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 3, 2, 256, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 3, 2, 384, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 3, 3, 128, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 3, 3, 256, 167, 113, 59,,,, +7, 6, 4, 3, 3, 3, 3, 3, 384, 167, 113, 59,,,, +7, 6, 4, 4, 1, 1, 1, 1, 128, 221, 149, 77,,,, +7, 6, 4, 4, 1, 1, 1, 1, 256, 221, 149, 77,,,, +7, 6, 4, 4, 1, 1, 1, 1, 384, 221, 149, 77,,,, +7, 6, 4, 4, 2, 2, 1, 1, 128, 221, 149, 77,,,, +7, 6, 4, 4, 2, 2, 1, 1, 256, 221, 149, 77,,,, +7, 6, 4, 4, 2, 2, 1, 1, 384, 221, 149, 77,,,, +7, 6, 4, 4, 2, 2, 1, 2, 128, 221, 149, 77,,,, +7, 6, 4, 4, 2, 2, 1, 2, 256, 221, 149, 77,,,, +7, 6, 4, 4, 2, 2, 1, 2, 384, 221, 149, 77,,,, +7, 6, 4, 4, 2, 2, 2, 1, 128, 221, 149, 77,,,, +7, 6, 4, 4, 2, 2, 2, 1, 256, 221, 149, 77,,,, +7, 6, 4, 4, 2, 2, 2, 1, 384, 221, 149, 77,,,, +7, 6, 4, 4, 2, 2, 2, 2, 128, 221, 149, 77,,,, +7, 6, 4, 4, 2, 2, 2, 2, 256, 221, 149, 77,,,, +7, 6, 4, 4, 2, 2, 2, 2, 384, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 1, 1, 128, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 1, 1, 256, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 1, 1, 384, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 1, 2, 128, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 1, 2, 256, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 1, 2, 384, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 1, 3, 128, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 1, 3, 256, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 1, 3, 384, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 2, 1, 128, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 2, 1, 256, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 2, 1, 384, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 2, 2, 128, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 2, 2, 256, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 2, 2, 384, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 2, 3, 128, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 2, 3, 256, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 2, 3, 384, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 3, 1, 128, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 3, 1, 256, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 3, 1, 384, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 3, 2, 128, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 3, 2, 256, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 3, 2, 384, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 3, 3, 128, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 3, 3, 256, 221, 149, 77,,,, +7, 6, 4, 4, 3, 3, 3, 3, 384, 221, 149, 77,,,, +7, 6, 4, 5, 1, 1, 1, 1, 128, 240, 185, 95,,,, +7, 6, 4, 5, 1, 1, 1, 1, 256, 240, 185, 95,,,, +7, 6, 4, 5, 1, 1, 1, 1, 384, 240, 185, 95,,,, +7, 6, 4, 5, 2, 2, 1, 1, 128, 240, 185, 95,,,, +7, 6, 4, 5, 2, 2, 1, 1, 256, 240, 185, 95,,,, +7, 6, 4, 5, 2, 2, 1, 1, 384, 240, 185, 95,,,, +7, 6, 4, 5, 2, 2, 1, 2, 128, 240, 185, 95,,,, +7, 6, 4, 5, 2, 2, 1, 2, 256, 240, 185, 95,,,, +7, 6, 4, 5, 2, 2, 1, 2, 384, 240, 185, 95,,,, +7, 6, 4, 5, 2, 2, 2, 1, 128, 240, 185, 95,,,, +7, 6, 4, 5, 2, 2, 2, 1, 256, 240, 185, 95,,,, +7, 6, 4, 5, 2, 2, 2, 1, 384, 240, 185, 95,,,, +7, 6, 4, 5, 2, 2, 2, 2, 128, 240, 185, 95,,,, +7, 6, 4, 5, 2, 2, 2, 2, 256, 240, 185, 95,,,, +7, 6, 4, 5, 2, 2, 2, 2, 384, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 1, 1, 128, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 1, 1, 256, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 1, 1, 384, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 1, 2, 128, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 1, 2, 256, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 1, 2, 384, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 1, 3, 128, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 1, 3, 256, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 1, 3, 384, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 2, 1, 128, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 2, 1, 256, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 2, 1, 384, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 2, 2, 128, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 2, 2, 256, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 2, 2, 384, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 2, 3, 128, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 2, 3, 256, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 2, 3, 384, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 3, 1, 128, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 3, 1, 256, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 3, 1, 384, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 3, 2, 128, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 3, 2, 256, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 3, 2, 384, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 3, 3, 128, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 3, 3, 256, 240, 185, 95,,,, +7, 6, 4, 5, 3, 3, 3, 3, 384, 240, 185, 95,,,, +7, 6, 4, 6, 1, 1, 1, 1, 128, 240, 221, 113,,,, +7, 6, 4, 6, 1, 1, 1, 1, 256, 240, 221, 113,,,, +7, 6, 4, 6, 1, 1, 1, 1, 384, 240, 221, 113,,,, +7, 6, 4, 6, 2, 2, 1, 1, 128, 240, 221, 113,,,, +7, 6, 4, 6, 2, 2, 1, 1, 256, 240, 221, 113,,,, +7, 6, 4, 6, 2, 2, 1, 1, 384, 240, 221, 113,,,, +7, 6, 4, 6, 2, 2, 1, 2, 128, 240, 221, 113,,,, +7, 6, 4, 6, 2, 2, 1, 2, 256, 240, 221, 113,,,, +7, 6, 4, 6, 2, 2, 1, 2, 384, 240, 221, 113,,,, +7, 6, 4, 6, 2, 2, 2, 1, 128, 240, 221, 113,,,, +7, 6, 4, 6, 2, 2, 2, 1, 256, 240, 221, 113,,,, +7, 6, 4, 6, 2, 2, 2, 1, 384, 240, 221, 113,,,, +7, 6, 4, 6, 2, 2, 2, 2, 128, 240, 221, 113,,,, +7, 6, 4, 6, 2, 2, 2, 2, 256, 240, 221, 113,,,, +7, 6, 4, 6, 2, 2, 2, 2, 384, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 1, 1, 128, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 1, 1, 256, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 1, 1, 384, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 1, 2, 128, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 1, 2, 256, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 1, 2, 384, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 1, 3, 128, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 1, 3, 256, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 1, 3, 384, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 2, 1, 128, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 2, 1, 256, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 2, 1, 384, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 2, 2, 128, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 2, 2, 256, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 2, 2, 384, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 2, 3, 128, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 2, 3, 256, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 2, 3, 384, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 3, 1, 128, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 3, 1, 256, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 3, 1, 384, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 3, 2, 128, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 3, 2, 256, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 3, 2, 384, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 3, 3, 128, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 3, 3, 256, 240, 221, 113,,,, +7, 6, 4, 6, 3, 3, 3, 3, 384, 240, 221, 113,,,, +7, 6, 5, 1, 1, 1, 1, 1, 128, 59, 41, 23,,,, +7, 6, 5, 1, 1, 1, 1, 1, 256, 59, 41, 23,,,, +7, 6, 5, 1, 1, 1, 1, 1, 384, 59, 41, 23,,,, +7, 6, 5, 1, 2, 2, 1, 1, 128, 59, 41, 23,,,, +7, 6, 5, 1, 2, 2, 1, 1, 256, 59, 41, 23,,,, +7, 6, 5, 1, 2, 2, 1, 1, 384, 59, 41, 23,,,, +7, 6, 5, 1, 2, 2, 1, 2, 128, 59, 41, 23,,,, +7, 6, 5, 1, 2, 2, 1, 2, 256, 59, 41, 23,,,, +7, 6, 5, 1, 2, 2, 1, 2, 384, 59, 41, 23,,,, +7, 6, 5, 1, 2, 2, 2, 1, 128, 59, 41, 23,,,, +7, 6, 5, 1, 2, 2, 2, 1, 256, 59, 41, 23,,,, +7, 6, 5, 1, 2, 2, 2, 1, 384, 59, 41, 23,,,, +7, 6, 5, 1, 2, 2, 2, 2, 128, 59, 41, 23,,,, +7, 6, 5, 1, 2, 2, 2, 2, 256, 59, 41, 23,,,, +7, 6, 5, 1, 2, 2, 2, 2, 384, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 1, 1, 128, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 1, 1, 256, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 1, 1, 384, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 1, 2, 128, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 1, 2, 256, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 1, 2, 384, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 1, 3, 128, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 1, 3, 256, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 1, 3, 384, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 2, 1, 128, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 2, 1, 256, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 2, 1, 384, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 2, 2, 128, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 2, 2, 256, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 2, 2, 384, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 2, 3, 128, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 2, 3, 256, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 2, 3, 384, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 3, 1, 128, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 3, 1, 256, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 3, 1, 384, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 3, 2, 128, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 3, 2, 256, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 3, 2, 384, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 3, 3, 128, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 3, 3, 256, 59, 41, 23,,,, +7, 6, 5, 1, 3, 3, 3, 3, 384, 59, 41, 23,,,, +7, 6, 5, 2, 1, 1, 1, 1, 128, 113, 77, 41,,,, +7, 6, 5, 2, 1, 1, 1, 1, 256, 113, 77, 41,,,, +7, 6, 5, 2, 1, 1, 1, 1, 384, 113, 77, 41,,,, +7, 6, 5, 2, 2, 2, 1, 1, 128, 113, 77, 41,,,, +7, 6, 5, 2, 2, 2, 1, 1, 256, 113, 77, 41,,,, +7, 6, 5, 2, 2, 2, 1, 1, 384, 113, 77, 41,,,, +7, 6, 5, 2, 2, 2, 1, 2, 128, 113, 77, 41,,,, +7, 6, 5, 2, 2, 2, 1, 2, 256, 113, 77, 41,,,, +7, 6, 5, 2, 2, 2, 1, 2, 384, 113, 77, 41,,,, +7, 6, 5, 2, 2, 2, 2, 1, 128, 113, 77, 41,,,, +7, 6, 5, 2, 2, 2, 2, 1, 256, 113, 77, 41,,,, +7, 6, 5, 2, 2, 2, 2, 1, 384, 113, 77, 41,,,, +7, 6, 5, 2, 2, 2, 2, 2, 128, 113, 77, 41,,,, +7, 6, 5, 2, 2, 2, 2, 2, 256, 113, 77, 41,,,, +7, 6, 5, 2, 2, 2, 2, 2, 384, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 1, 1, 128, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 1, 1, 256, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 1, 1, 384, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 1, 2, 128, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 1, 2, 256, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 1, 2, 384, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 1, 3, 128, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 1, 3, 256, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 1, 3, 384, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 2, 1, 128, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 2, 1, 256, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 2, 1, 384, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 2, 2, 128, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 2, 2, 256, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 2, 2, 384, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 2, 3, 128, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 2, 3, 256, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 2, 3, 384, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 3, 1, 128, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 3, 1, 256, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 3, 1, 384, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 3, 2, 128, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 3, 2, 256, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 3, 2, 384, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 3, 3, 128, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 3, 3, 256, 113, 77, 41,,,, +7, 6, 5, 2, 3, 3, 3, 3, 384, 113, 77, 41,,,, +7, 6, 5, 3, 1, 1, 1, 1, 128, 167, 113, 59,,,, +7, 6, 5, 3, 1, 1, 1, 1, 256, 167, 113, 59,,,, +7, 6, 5, 3, 1, 1, 1, 1, 384, 167, 113, 59,,,, +7, 6, 5, 3, 2, 2, 1, 1, 128, 167, 113, 59,,,, +7, 6, 5, 3, 2, 2, 1, 1, 256, 167, 113, 59,,,, +7, 6, 5, 3, 2, 2, 1, 1, 384, 167, 113, 59,,,, +7, 6, 5, 3, 2, 2, 1, 2, 128, 167, 113, 59,,,, +7, 6, 5, 3, 2, 2, 1, 2, 256, 167, 113, 59,,,, +7, 6, 5, 3, 2, 2, 1, 2, 384, 167, 113, 59,,,, +7, 6, 5, 3, 2, 2, 2, 1, 128, 167, 113, 59,,,, +7, 6, 5, 3, 2, 2, 2, 1, 256, 167, 113, 59,,,, +7, 6, 5, 3, 2, 2, 2, 1, 384, 167, 113, 59,,,, +7, 6, 5, 3, 2, 2, 2, 2, 128, 167, 113, 59,,,, +7, 6, 5, 3, 2, 2, 2, 2, 256, 167, 113, 59,,,, +7, 6, 5, 3, 2, 2, 2, 2, 384, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 1, 1, 128, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 1, 1, 256, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 1, 1, 384, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 1, 2, 128, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 1, 2, 256, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 1, 2, 384, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 1, 3, 128, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 1, 3, 256, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 1, 3, 384, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 2, 1, 128, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 2, 1, 256, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 2, 1, 384, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 2, 2, 128, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 2, 2, 256, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 2, 2, 384, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 2, 3, 128, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 2, 3, 256, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 2, 3, 384, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 3, 1, 128, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 3, 1, 256, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 3, 1, 384, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 3, 2, 128, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 3, 2, 256, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 3, 2, 384, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 3, 3, 128, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 3, 3, 256, 167, 113, 59,,,, +7, 6, 5, 3, 3, 3, 3, 3, 384, 167, 113, 59,,,, +7, 6, 5, 4, 1, 1, 1, 1, 128, 221, 149, 77,,,, +7, 6, 5, 4, 1, 1, 1, 1, 256, 221, 149, 77,,,, +7, 6, 5, 4, 1, 1, 1, 1, 384, 221, 149, 77,,,, +7, 6, 5, 4, 2, 2, 1, 1, 128, 221, 149, 77,,,, +7, 6, 5, 4, 2, 2, 1, 1, 256, 221, 149, 77,,,, +7, 6, 5, 4, 2, 2, 1, 1, 384, 221, 149, 77,,,, +7, 6, 5, 4, 2, 2, 1, 2, 128, 221, 149, 77,,,, +7, 6, 5, 4, 2, 2, 1, 2, 256, 221, 149, 77,,,, +7, 6, 5, 4, 2, 2, 1, 2, 384, 221, 149, 77,,,, +7, 6, 5, 4, 2, 2, 2, 1, 128, 221, 149, 77,,,, +7, 6, 5, 4, 2, 2, 2, 1, 256, 221, 149, 77,,,, +7, 6, 5, 4, 2, 2, 2, 1, 384, 221, 149, 77,,,, +7, 6, 5, 4, 2, 2, 2, 2, 128, 221, 149, 77,,,, +7, 6, 5, 4, 2, 2, 2, 2, 256, 221, 149, 77,,,, +7, 6, 5, 4, 2, 2, 2, 2, 384, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 1, 1, 128, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 1, 1, 256, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 1, 1, 384, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 1, 2, 128, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 1, 2, 256, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 1, 2, 384, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 1, 3, 128, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 1, 3, 256, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 1, 3, 384, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 2, 1, 128, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 2, 1, 256, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 2, 1, 384, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 2, 2, 128, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 2, 2, 256, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 2, 2, 384, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 2, 3, 128, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 2, 3, 256, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 2, 3, 384, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 3, 1, 128, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 3, 1, 256, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 3, 1, 384, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 3, 2, 128, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 3, 2, 256, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 3, 2, 384, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 3, 3, 128, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 3, 3, 256, 221, 149, 77,,,, +7, 6, 5, 4, 3, 3, 3, 3, 384, 221, 149, 77,,,, +7, 6, 5, 5, 1, 1, 1, 1, 128, 240, 185, 95,,,, +7, 6, 5, 5, 1, 1, 1, 1, 256, 240, 185, 95,,,, +7, 6, 5, 5, 1, 1, 1, 1, 384, 240, 185, 95,,,, +7, 6, 5, 5, 2, 2, 1, 1, 128, 240, 185, 95,,,, +7, 6, 5, 5, 2, 2, 1, 1, 256, 240, 185, 95,,,, +7, 6, 5, 5, 2, 2, 1, 1, 384, 240, 185, 95,,,, +7, 6, 5, 5, 2, 2, 1, 2, 128, 240, 185, 95,,,, +7, 6, 5, 5, 2, 2, 1, 2, 256, 240, 185, 95,,,, +7, 6, 5, 5, 2, 2, 1, 2, 384, 240, 185, 95,,,, +7, 6, 5, 5, 2, 2, 2, 1, 128, 240, 185, 95,,,, +7, 6, 5, 5, 2, 2, 2, 1, 256, 240, 185, 95,,,, +7, 6, 5, 5, 2, 2, 2, 1, 384, 240, 185, 95,,,, +7, 6, 5, 5, 2, 2, 2, 2, 128, 240, 185, 95,,,, +7, 6, 5, 5, 2, 2, 2, 2, 256, 240, 185, 95,,,, +7, 6, 5, 5, 2, 2, 2, 2, 384, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 1, 1, 128, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 1, 1, 256, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 1, 1, 384, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 1, 2, 128, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 1, 2, 256, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 1, 2, 384, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 1, 3, 128, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 1, 3, 256, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 1, 3, 384, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 2, 1, 128, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 2, 1, 256, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 2, 1, 384, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 2, 2, 128, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 2, 2, 256, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 2, 2, 384, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 2, 3, 128, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 2, 3, 256, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 2, 3, 384, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 3, 1, 128, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 3, 1, 256, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 3, 1, 384, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 3, 2, 128, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 3, 2, 256, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 3, 2, 384, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 3, 3, 128, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 3, 3, 256, 240, 185, 95,,,, +7, 6, 5, 5, 3, 3, 3, 3, 384, 240, 185, 95,,,, +7, 6, 5, 6, 1, 1, 1, 1, 128, 240, 221, 113,,,, +7, 6, 5, 6, 1, 1, 1, 1, 256, 240, 221, 113,,,, +7, 6, 5, 6, 1, 1, 1, 1, 384, 240, 221, 113,,,, +7, 6, 5, 6, 2, 2, 1, 1, 128, 240, 221, 113,,,, +7, 6, 5, 6, 2, 2, 1, 1, 256, 240, 221, 113,,,, +7, 6, 5, 6, 2, 2, 1, 1, 384, 240, 221, 113,,,, +7, 6, 5, 6, 2, 2, 1, 2, 128, 240, 221, 113,,,, +7, 6, 5, 6, 2, 2, 1, 2, 256, 240, 221, 113,,,, +7, 6, 5, 6, 2, 2, 1, 2, 384, 240, 221, 113,,,, +7, 6, 5, 6, 2, 2, 2, 1, 128, 240, 221, 113,,,, +7, 6, 5, 6, 2, 2, 2, 1, 256, 240, 221, 113,,,, +7, 6, 5, 6, 2, 2, 2, 1, 384, 240, 221, 113,,,, +7, 6, 5, 6, 2, 2, 2, 2, 128, 240, 221, 113,,,, +7, 6, 5, 6, 2, 2, 2, 2, 256, 240, 221, 113,,,, +7, 6, 5, 6, 2, 2, 2, 2, 384, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 1, 1, 128, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 1, 1, 256, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 1, 1, 384, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 1, 2, 128, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 1, 2, 256, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 1, 2, 384, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 1, 3, 128, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 1, 3, 256, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 1, 3, 384, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 2, 1, 128, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 2, 1, 256, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 2, 1, 384, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 2, 2, 128, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 2, 2, 256, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 2, 2, 384, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 2, 3, 128, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 2, 3, 256, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 2, 3, 384, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 3, 1, 128, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 3, 1, 256, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 3, 1, 384, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 3, 2, 128, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 3, 2, 256, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 3, 2, 384, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 3, 3, 128, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 3, 3, 256, 240, 221, 113,,,, +7, 6, 5, 6, 3, 3, 3, 3, 384, 240, 221, 113,,,, +7, 6, 6, 1, 1, 1, 1, 1, 128, 59, 41, 23,,,, +7, 6, 6, 1, 1, 1, 1, 1, 256, 59, 41, 23,,,, +7, 6, 6, 1, 1, 1, 1, 1, 384, 59, 41, 23,,,, +7, 6, 6, 1, 2, 2, 1, 1, 128, 59, 41, 23,,,, +7, 6, 6, 1, 2, 2, 1, 1, 256, 59, 41, 23,,,, +7, 6, 6, 1, 2, 2, 1, 1, 384, 59, 41, 23,,,, +7, 6, 6, 1, 2, 2, 1, 2, 128, 59, 41, 23,,,, +7, 6, 6, 1, 2, 2, 1, 2, 256, 59, 41, 23,,,, +7, 6, 6, 1, 2, 2, 1, 2, 384, 59, 41, 23,,,, +7, 6, 6, 1, 2, 2, 2, 1, 128, 59, 41, 23,,,, +7, 6, 6, 1, 2, 2, 2, 1, 256, 59, 41, 23,,,, +7, 6, 6, 1, 2, 2, 2, 1, 384, 59, 41, 23,,,, +7, 6, 6, 1, 2, 2, 2, 2, 128, 59, 41, 23,,,, +7, 6, 6, 1, 2, 2, 2, 2, 256, 59, 41, 23,,,, +7, 6, 6, 1, 2, 2, 2, 2, 384, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 1, 1, 128, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 1, 1, 256, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 1, 1, 384, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 1, 2, 128, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 1, 2, 256, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 1, 2, 384, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 1, 3, 128, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 1, 3, 256, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 1, 3, 384, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 2, 1, 128, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 2, 1, 256, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 2, 1, 384, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 2, 2, 128, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 2, 2, 256, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 2, 2, 384, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 2, 3, 128, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 2, 3, 256, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 2, 3, 384, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 3, 1, 128, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 3, 1, 256, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 3, 1, 384, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 3, 2, 128, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 3, 2, 256, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 3, 2, 384, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 3, 3, 128, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 3, 3, 256, 59, 41, 23,,,, +7, 6, 6, 1, 3, 3, 3, 3, 384, 59, 41, 23,,,, +7, 6, 6, 2, 1, 1, 1, 1, 128, 113, 77, 41,,,, +7, 6, 6, 2, 1, 1, 1, 1, 256, 113, 77, 41,,,, +7, 6, 6, 2, 1, 1, 1, 1, 384, 113, 77, 41,,,, +7, 6, 6, 2, 2, 2, 1, 1, 128, 113, 77, 41,,,, +7, 6, 6, 2, 2, 2, 1, 1, 256, 113, 77, 41,,,, +7, 6, 6, 2, 2, 2, 1, 1, 384, 113, 77, 41,,,, +7, 6, 6, 2, 2, 2, 1, 2, 128, 113, 77, 41,,,, +7, 6, 6, 2, 2, 2, 1, 2, 256, 113, 77, 41,,,, +7, 6, 6, 2, 2, 2, 1, 2, 384, 113, 77, 41,,,, +7, 6, 6, 2, 2, 2, 2, 1, 128, 113, 77, 41,,,, +7, 6, 6, 2, 2, 2, 2, 1, 256, 113, 77, 41,,,, +7, 6, 6, 2, 2, 2, 2, 1, 384, 113, 77, 41,,,, +7, 6, 6, 2, 2, 2, 2, 2, 128, 113, 77, 41,,,, +7, 6, 6, 2, 2, 2, 2, 2, 256, 113, 77, 41,,,, +7, 6, 6, 2, 2, 2, 2, 2, 384, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 1, 1, 128, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 1, 1, 256, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 1, 1, 384, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 1, 2, 128, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 1, 2, 256, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 1, 2, 384, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 1, 3, 128, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 1, 3, 256, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 1, 3, 384, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 2, 1, 128, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 2, 1, 256, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 2, 1, 384, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 2, 2, 128, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 2, 2, 256, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 2, 2, 384, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 2, 3, 128, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 2, 3, 256, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 2, 3, 384, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 3, 1, 128, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 3, 1, 256, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 3, 1, 384, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 3, 2, 128, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 3, 2, 256, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 3, 2, 384, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 3, 3, 128, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 3, 3, 256, 113, 77, 41,,,, +7, 6, 6, 2, 3, 3, 3, 3, 384, 113, 77, 41,,,, +7, 6, 6, 3, 1, 1, 1, 1, 128, 167, 113, 59,,,, +7, 6, 6, 3, 1, 1, 1, 1, 256, 167, 113, 59,,,, +7, 6, 6, 3, 1, 1, 1, 1, 384, 167, 113, 59,,,, +7, 6, 6, 3, 2, 2, 1, 1, 128, 167, 113, 59,,,, +7, 6, 6, 3, 2, 2, 1, 1, 256, 167, 113, 59,,,, +7, 6, 6, 3, 2, 2, 1, 1, 384, 167, 113, 59,,,, +7, 6, 6, 3, 2, 2, 1, 2, 128, 167, 113, 59,,,, +7, 6, 6, 3, 2, 2, 1, 2, 256, 167, 113, 59,,,, +7, 6, 6, 3, 2, 2, 1, 2, 384, 167, 113, 59,,,, +7, 6, 6, 3, 2, 2, 2, 1, 128, 167, 113, 59,,,, +7, 6, 6, 3, 2, 2, 2, 1, 256, 167, 113, 59,,,, +7, 6, 6, 3, 2, 2, 2, 1, 384, 167, 113, 59,,,, +7, 6, 6, 3, 2, 2, 2, 2, 128, 167, 113, 59,,,, +7, 6, 6, 3, 2, 2, 2, 2, 256, 167, 113, 59,,,, +7, 6, 6, 3, 2, 2, 2, 2, 384, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 1, 1, 128, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 1, 1, 256, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 1, 1, 384, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 1, 2, 128, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 1, 2, 256, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 1, 2, 384, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 1, 3, 128, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 1, 3, 256, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 1, 3, 384, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 2, 1, 128, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 2, 1, 256, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 2, 1, 384, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 2, 2, 128, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 2, 2, 256, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 2, 2, 384, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 2, 3, 128, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 2, 3, 256, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 2, 3, 384, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 3, 1, 128, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 3, 1, 256, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 3, 1, 384, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 3, 2, 128, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 3, 2, 256, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 3, 2, 384, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 3, 3, 128, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 3, 3, 256, 167, 113, 59,,,, +7, 6, 6, 3, 3, 3, 3, 3, 384, 167, 113, 59,,,, +7, 6, 6, 4, 1, 1, 1, 1, 128, 221, 149, 77,,,, +7, 6, 6, 4, 1, 1, 1, 1, 256, 221, 149, 77,,,, +7, 6, 6, 4, 1, 1, 1, 1, 384, 221, 149, 77,,,, +7, 6, 6, 4, 2, 2, 1, 1, 128, 221, 149, 77,,,, +7, 6, 6, 4, 2, 2, 1, 1, 256, 221, 149, 77,,,, +7, 6, 6, 4, 2, 2, 1, 1, 384, 221, 149, 77,,,, +7, 6, 6, 4, 2, 2, 1, 2, 128, 221, 149, 77,,,, +7, 6, 6, 4, 2, 2, 1, 2, 256, 221, 149, 77,,,, +7, 6, 6, 4, 2, 2, 1, 2, 384, 221, 149, 77,,,, +7, 6, 6, 4, 2, 2, 2, 1, 128, 221, 149, 77,,,, +7, 6, 6, 4, 2, 2, 2, 1, 256, 221, 149, 77,,,, +7, 6, 6, 4, 2, 2, 2, 1, 384, 221, 149, 77,,,, +7, 6, 6, 4, 2, 2, 2, 2, 128, 221, 149, 77,,,, +7, 6, 6, 4, 2, 2, 2, 2, 256, 221, 149, 77,,,, +7, 6, 6, 4, 2, 2, 2, 2, 384, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 1, 1, 128, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 1, 1, 256, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 1, 1, 384, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 1, 2, 128, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 1, 2, 256, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 1, 2, 384, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 1, 3, 128, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 1, 3, 256, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 1, 3, 384, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 2, 1, 128, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 2, 1, 256, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 2, 1, 384, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 2, 2, 128, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 2, 2, 256, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 2, 2, 384, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 2, 3, 128, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 2, 3, 256, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 2, 3, 384, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 3, 1, 128, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 3, 1, 256, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 3, 1, 384, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 3, 2, 128, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 3, 2, 256, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 3, 2, 384, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 3, 3, 128, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 3, 3, 256, 221, 149, 77,,,, +7, 6, 6, 4, 3, 3, 3, 3, 384, 221, 149, 77,,,, +7, 6, 6, 5, 1, 1, 1, 1, 128, 240, 185, 95,,,, +7, 6, 6, 5, 1, 1, 1, 1, 256, 240, 185, 95,,,, +7, 6, 6, 5, 1, 1, 1, 1, 384, 240, 185, 95,,,, +7, 6, 6, 5, 2, 2, 1, 1, 128, 240, 185, 95,,,, +7, 6, 6, 5, 2, 2, 1, 1, 256, 240, 185, 95,,,, +7, 6, 6, 5, 2, 2, 1, 1, 384, 240, 185, 95,,,, +7, 6, 6, 5, 2, 2, 1, 2, 128, 240, 185, 95,,,, +7, 6, 6, 5, 2, 2, 1, 2, 256, 240, 185, 95,,,, +7, 6, 6, 5, 2, 2, 1, 2, 384, 240, 185, 95,,,, +7, 6, 6, 5, 2, 2, 2, 1, 128, 240, 185, 95,,,, +7, 6, 6, 5, 2, 2, 2, 1, 256, 240, 185, 95,,,, +7, 6, 6, 5, 2, 2, 2, 1, 384, 240, 185, 95,,,, +7, 6, 6, 5, 2, 2, 2, 2, 128, 240, 185, 95,,,, +7, 6, 6, 5, 2, 2, 2, 2, 256, 240, 185, 95,,,, +7, 6, 6, 5, 2, 2, 2, 2, 384, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 1, 1, 128, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 1, 1, 256, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 1, 1, 384, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 1, 2, 128, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 1, 2, 256, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 1, 2, 384, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 1, 3, 128, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 1, 3, 256, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 1, 3, 384, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 2, 1, 128, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 2, 1, 256, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 2, 1, 384, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 2, 2, 128, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 2, 2, 256, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 2, 2, 384, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 2, 3, 128, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 2, 3, 256, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 2, 3, 384, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 3, 1, 128, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 3, 1, 256, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 3, 1, 384, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 3, 2, 128, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 3, 2, 256, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 3, 2, 384, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 3, 3, 128, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 3, 3, 256, 240, 185, 95,,,, +7, 6, 6, 5, 3, 3, 3, 3, 384, 240, 185, 95,,,, +7, 6, 6, 6, 1, 1, 1, 1, 128, 240, 221, 113,,,, +7, 6, 6, 6, 1, 1, 1, 1, 256, 240, 221, 113,,,, +7, 6, 6, 6, 1, 1, 1, 1, 384, 240, 221, 113,,,, +7, 6, 6, 6, 2, 2, 1, 1, 128, 240, 221, 113,,,, +7, 6, 6, 6, 2, 2, 1, 1, 256, 240, 221, 113,,,, +7, 6, 6, 6, 2, 2, 1, 1, 384, 240, 221, 113,,,, +7, 6, 6, 6, 2, 2, 1, 2, 128, 240, 221, 113,,,, +7, 6, 6, 6, 2, 2, 1, 2, 256, 240, 221, 113,,,, +7, 6, 6, 6, 2, 2, 1, 2, 384, 240, 221, 113,,,, +7, 6, 6, 6, 2, 2, 2, 1, 128, 240, 221, 113,,,, +7, 6, 6, 6, 2, 2, 2, 1, 256, 240, 221, 113,,,, +7, 6, 6, 6, 2, 2, 2, 1, 384, 240, 221, 113,,,, +7, 6, 6, 6, 2, 2, 2, 2, 128, 240, 221, 113,,,, +7, 6, 6, 6, 2, 2, 2, 2, 256, 240, 221, 113,,,, +7, 6, 6, 6, 2, 2, 2, 2, 384, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 1, 1, 128, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 1, 1, 256, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 1, 1, 384, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 1, 2, 128, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 1, 2, 256, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 1, 2, 384, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 1, 3, 128, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 1, 3, 256, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 1, 3, 384, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 2, 1, 128, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 2, 1, 256, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 2, 1, 384, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 2, 2, 128, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 2, 2, 256, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 2, 2, 384, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 2, 3, 128, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 2, 3, 256, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 2, 3, 384, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 3, 1, 128, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 3, 1, 256, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 3, 1, 384, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 3, 2, 128, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 3, 2, 256, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 3, 2, 384, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 3, 3, 128, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 3, 3, 256, 240, 221, 113,,,, +7, 6, 6, 6, 3, 3, 3, 3, 384, 240, 221, 113,,,, +7, 6, 7, 1, 1, 1, 1, 1, 128, 59, 41, 23,,,, +7, 6, 7, 1, 1, 1, 1, 1, 256, 59, 41, 23,,,, +7, 6, 7, 1, 1, 1, 1, 1, 384, 59, 41, 23,,,, +7, 6, 7, 1, 2, 2, 1, 1, 128, 59, 41, 23,,,, +7, 6, 7, 1, 2, 2, 1, 1, 256, 59, 41, 23,,,, +7, 6, 7, 1, 2, 2, 1, 1, 384, 59, 41, 23,,,, +7, 6, 7, 1, 2, 2, 1, 2, 128, 59, 41, 23,,,, +7, 6, 7, 1, 2, 2, 1, 2, 256, 59, 41, 23,,,, +7, 6, 7, 1, 2, 2, 1, 2, 384, 59, 41, 23,,,, +7, 6, 7, 1, 2, 2, 2, 1, 128, 59, 41, 23,,,, +7, 6, 7, 1, 2, 2, 2, 1, 256, 59, 41, 23,,,, +7, 6, 7, 1, 2, 2, 2, 1, 384, 59, 41, 23,,,, +7, 6, 7, 1, 2, 2, 2, 2, 128, 59, 41, 23,,,, +7, 6, 7, 1, 2, 2, 2, 2, 256, 59, 41, 23,,,, +7, 6, 7, 1, 2, 2, 2, 2, 384, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 1, 1, 128, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 1, 1, 256, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 1, 1, 384, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 1, 2, 128, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 1, 2, 256, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 1, 2, 384, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 1, 3, 128, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 1, 3, 256, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 1, 3, 384, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 2, 1, 128, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 2, 1, 256, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 2, 1, 384, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 2, 2, 128, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 2, 2, 256, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 2, 2, 384, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 2, 3, 128, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 2, 3, 256, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 2, 3, 384, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 3, 1, 128, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 3, 1, 256, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 3, 1, 384, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 3, 2, 128, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 3, 2, 256, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 3, 2, 384, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 3, 3, 128, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 3, 3, 256, 59, 41, 23,,,, +7, 6, 7, 1, 3, 3, 3, 3, 384, 59, 41, 23,,,, +7, 6, 7, 2, 1, 1, 1, 1, 128, 113, 77, 41,,,, +7, 6, 7, 2, 1, 1, 1, 1, 256, 113, 77, 41,,,, +7, 6, 7, 2, 1, 1, 1, 1, 384, 113, 77, 41,,,, +7, 6, 7, 2, 2, 2, 1, 1, 128, 113, 77, 41,,,, +7, 6, 7, 2, 2, 2, 1, 1, 256, 113, 77, 41,,,, +7, 6, 7, 2, 2, 2, 1, 1, 384, 113, 77, 41,,,, +7, 6, 7, 2, 2, 2, 1, 2, 128, 113, 77, 41,,,, +7, 6, 7, 2, 2, 2, 1, 2, 256, 113, 77, 41,,,, +7, 6, 7, 2, 2, 2, 1, 2, 384, 113, 77, 41,,,, +7, 6, 7, 2, 2, 2, 2, 1, 128, 113, 77, 41,,,, +7, 6, 7, 2, 2, 2, 2, 1, 256, 113, 77, 41,,,, +7, 6, 7, 2, 2, 2, 2, 1, 384, 113, 77, 41,,,, +7, 6, 7, 2, 2, 2, 2, 2, 128, 113, 77, 41,,,, +7, 6, 7, 2, 2, 2, 2, 2, 256, 113, 77, 41,,,, +7, 6, 7, 2, 2, 2, 2, 2, 384, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 1, 1, 128, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 1, 1, 256, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 1, 1, 384, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 1, 2, 128, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 1, 2, 256, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 1, 2, 384, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 1, 3, 128, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 1, 3, 256, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 1, 3, 384, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 2, 1, 128, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 2, 1, 256, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 2, 1, 384, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 2, 2, 128, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 2, 2, 256, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 2, 2, 384, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 2, 3, 128, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 2, 3, 256, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 2, 3, 384, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 3, 1, 128, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 3, 1, 256, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 3, 1, 384, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 3, 2, 128, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 3, 2, 256, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 3, 2, 384, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 3, 3, 128, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 3, 3, 256, 113, 77, 41,,,, +7, 6, 7, 2, 3, 3, 3, 3, 384, 113, 77, 41,,,, +7, 6, 7, 3, 1, 1, 1, 1, 128, 167, 113, 59,,,, +7, 6, 7, 3, 1, 1, 1, 1, 256, 167, 113, 59,,,, +7, 6, 7, 3, 1, 1, 1, 1, 384, 167, 113, 59,,,, +7, 6, 7, 3, 2, 2, 1, 1, 128, 167, 113, 59,,,, +7, 6, 7, 3, 2, 2, 1, 1, 256, 167, 113, 59,,,, +7, 6, 7, 3, 2, 2, 1, 1, 384, 167, 113, 59,,,, +7, 6, 7, 3, 2, 2, 1, 2, 128, 167, 113, 59,,,, +7, 6, 7, 3, 2, 2, 1, 2, 256, 167, 113, 59,,,, +7, 6, 7, 3, 2, 2, 1, 2, 384, 167, 113, 59,,,, +7, 6, 7, 3, 2, 2, 2, 1, 128, 167, 113, 59,,,, +7, 6, 7, 3, 2, 2, 2, 1, 256, 167, 113, 59,,,, +7, 6, 7, 3, 2, 2, 2, 1, 384, 167, 113, 59,,,, +7, 6, 7, 3, 2, 2, 2, 2, 128, 167, 113, 59,,,, +7, 6, 7, 3, 2, 2, 2, 2, 256, 167, 113, 59,,,, +7, 6, 7, 3, 2, 2, 2, 2, 384, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 1, 1, 128, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 1, 1, 256, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 1, 1, 384, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 1, 2, 128, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 1, 2, 256, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 1, 2, 384, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 1, 3, 128, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 1, 3, 256, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 1, 3, 384, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 2, 1, 128, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 2, 1, 256, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 2, 1, 384, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 2, 2, 128, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 2, 2, 256, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 2, 2, 384, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 2, 3, 128, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 2, 3, 256, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 2, 3, 384, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 3, 1, 128, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 3, 1, 256, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 3, 1, 384, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 3, 2, 128, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 3, 2, 256, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 3, 2, 384, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 3, 3, 128, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 3, 3, 256, 167, 113, 59,,,, +7, 6, 7, 3, 3, 3, 3, 3, 384, 167, 113, 59,,,, +7, 6, 7, 4, 1, 1, 1, 1, 128, 221, 149, 77,,,, +7, 6, 7, 4, 1, 1, 1, 1, 256, 221, 149, 77,,,, +7, 6, 7, 4, 1, 1, 1, 1, 384, 221, 149, 77,,,, +7, 6, 7, 4, 2, 2, 1, 1, 128, 221, 149, 77,,,, +7, 6, 7, 4, 2, 2, 1, 1, 256, 221, 149, 77,,,, +7, 6, 7, 4, 2, 2, 1, 1, 384, 221, 149, 77,,,, +7, 6, 7, 4, 2, 2, 1, 2, 128, 221, 149, 77,,,, +7, 6, 7, 4, 2, 2, 1, 2, 256, 221, 149, 77,,,, +7, 6, 7, 4, 2, 2, 1, 2, 384, 221, 149, 77,,,, +7, 6, 7, 4, 2, 2, 2, 1, 128, 221, 149, 77,,,, +7, 6, 7, 4, 2, 2, 2, 1, 256, 221, 149, 77,,,, +7, 6, 7, 4, 2, 2, 2, 1, 384, 221, 149, 77,,,, +7, 6, 7, 4, 2, 2, 2, 2, 128, 221, 149, 77,,,, +7, 6, 7, 4, 2, 2, 2, 2, 256, 221, 149, 77,,,, +7, 6, 7, 4, 2, 2, 2, 2, 384, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 1, 1, 128, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 1, 1, 256, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 1, 1, 384, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 1, 2, 128, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 1, 2, 256, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 1, 2, 384, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 1, 3, 128, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 1, 3, 256, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 1, 3, 384, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 2, 1, 128, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 2, 1, 256, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 2, 1, 384, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 2, 2, 128, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 2, 2, 256, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 2, 2, 384, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 2, 3, 128, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 2, 3, 256, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 2, 3, 384, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 3, 1, 128, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 3, 1, 256, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 3, 1, 384, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 3, 2, 128, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 3, 2, 256, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 3, 2, 384, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 3, 3, 128, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 3, 3, 256, 221, 149, 77,,,, +7, 6, 7, 4, 3, 3, 3, 3, 384, 221, 149, 77,,,, +7, 6, 7, 5, 1, 1, 1, 1, 128, 240, 185, 95,,,, +7, 6, 7, 5, 1, 1, 1, 1, 256, 240, 185, 95,,,, +7, 6, 7, 5, 1, 1, 1, 1, 384, 240, 185, 95,,,, +7, 6, 7, 5, 2, 2, 1, 1, 128, 240, 185, 95,,,, +7, 6, 7, 5, 2, 2, 1, 1, 256, 240, 185, 95,,,, +7, 6, 7, 5, 2, 2, 1, 1, 384, 240, 185, 95,,,, +7, 6, 7, 5, 2, 2, 1, 2, 128, 240, 185, 95,,,, +7, 6, 7, 5, 2, 2, 1, 2, 256, 240, 185, 95,,,, +7, 6, 7, 5, 2, 2, 1, 2, 384, 240, 185, 95,,,, +7, 6, 7, 5, 2, 2, 2, 1, 128, 240, 185, 95,,,, +7, 6, 7, 5, 2, 2, 2, 1, 256, 240, 185, 95,,,, +7, 6, 7, 5, 2, 2, 2, 1, 384, 240, 185, 95,,,, +7, 6, 7, 5, 2, 2, 2, 2, 128, 240, 185, 95,,,, +7, 6, 7, 5, 2, 2, 2, 2, 256, 240, 185, 95,,,, +7, 6, 7, 5, 2, 2, 2, 2, 384, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 1, 1, 128, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 1, 1, 256, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 1, 1, 384, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 1, 2, 128, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 1, 2, 256, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 1, 2, 384, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 1, 3, 128, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 1, 3, 256, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 1, 3, 384, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 2, 1, 128, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 2, 1, 256, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 2, 1, 384, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 2, 2, 128, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 2, 2, 256, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 2, 2, 384, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 2, 3, 128, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 2, 3, 256, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 2, 3, 384, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 3, 1, 128, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 3, 1, 256, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 3, 1, 384, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 3, 2, 128, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 3, 2, 256, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 3, 2, 384, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 3, 3, 128, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 3, 3, 256, 240, 185, 95,,,, +7, 6, 7, 5, 3, 3, 3, 3, 384, 240, 185, 95,,,, +7, 6, 7, 6, 1, 1, 1, 1, 128, 240, 221, 113,,,, +7, 6, 7, 6, 1, 1, 1, 1, 256, 240, 221, 113,,,, +7, 6, 7, 6, 1, 1, 1, 1, 384, 240, 221, 113,,,, +7, 6, 7, 6, 2, 2, 1, 1, 128, 240, 221, 113,,,, +7, 6, 7, 6, 2, 2, 1, 1, 256, 240, 221, 113,,,, +7, 6, 7, 6, 2, 2, 1, 1, 384, 240, 221, 113,,,, +7, 6, 7, 6, 2, 2, 1, 2, 128, 240, 221, 113,,,, +7, 6, 7, 6, 2, 2, 1, 2, 256, 240, 221, 113,,,, +7, 6, 7, 6, 2, 2, 1, 2, 384, 240, 221, 113,,,, +7, 6, 7, 6, 2, 2, 2, 1, 128, 240, 221, 113,,,, +7, 6, 7, 6, 2, 2, 2, 1, 256, 240, 221, 113,,,, +7, 6, 7, 6, 2, 2, 2, 1, 384, 240, 221, 113,,,, +7, 6, 7, 6, 2, 2, 2, 2, 128, 240, 221, 113,,,, +7, 6, 7, 6, 2, 2, 2, 2, 256, 240, 221, 113,,,, +7, 6, 7, 6, 2, 2, 2, 2, 384, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 1, 1, 128, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 1, 1, 256, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 1, 1, 384, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 1, 2, 128, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 1, 2, 256, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 1, 2, 384, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 1, 3, 128, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 1, 3, 256, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 1, 3, 384, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 2, 1, 128, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 2, 1, 256, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 2, 1, 384, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 2, 2, 128, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 2, 2, 256, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 2, 2, 384, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 2, 3, 128, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 2, 3, 256, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 2, 3, 384, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 3, 1, 128, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 3, 1, 256, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 3, 1, 384, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 3, 2, 128, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 3, 2, 256, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 3, 2, 384, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 3, 3, 128, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 3, 3, 256, 240, 221, 113,,,, +7, 6, 7, 6, 3, 3, 3, 3, 384, 240, 221, 113,,,, +7, 7, 1, 1, 1, 1, 1, 1, 128, 60, 42,,,,, +7, 7, 1, 1, 1, 1, 1, 1, 256, 60, 42,,,,, +7, 7, 1, 1, 1, 1, 1, 1, 384, 60, 42,,,,, +7, 7, 1, 1, 2, 2, 1, 1, 128, 60, 42,,,,, +7, 7, 1, 1, 2, 2, 1, 1, 256, 60, 42,,,,, +7, 7, 1, 1, 2, 2, 1, 1, 384, 60, 42,,,,, +7, 7, 1, 1, 2, 2, 1, 2, 128, 60, 42,,,,, +7, 7, 1, 1, 2, 2, 1, 2, 256, 60, 42,,,,, +7, 7, 1, 1, 2, 2, 1, 2, 384, 60, 42,,,,, +7, 7, 1, 1, 2, 2, 2, 1, 128, 60, 42,,,,, +7, 7, 1, 1, 2, 2, 2, 1, 256, 60, 42,,,,, +7, 7, 1, 1, 2, 2, 2, 1, 384, 60, 42,,,,, +7, 7, 1, 1, 2, 2, 2, 2, 128, 60, 42,,,,, +7, 7, 1, 1, 2, 2, 2, 2, 256, 60, 42,,,,, +7, 7, 1, 1, 2, 2, 2, 2, 384, 60, 42,,,,, +7, 7, 1, 1, 3, 3, 1, 1, 128, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 1, 1, 256, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 1, 1, 384, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 1, 2, 128, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 1, 2, 256, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 1, 2, 384, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 1, 3, 128, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 1, 3, 256, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 1, 3, 384, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 2, 1, 128, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 2, 1, 256, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 2, 1, 384, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 2, 2, 128, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 2, 2, 256, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 2, 2, 384, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 2, 3, 128, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 2, 3, 256, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 2, 3, 384, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 3, 1, 128, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 3, 1, 256, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 3, 1, 384, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 3, 2, 128, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 3, 2, 256, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 3, 2, 384, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 3, 3, 128, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 3, 3, 256, 59, 42,,,,, +7, 7, 1, 1, 3, 3, 3, 3, 384, 59, 42,,,,, +7, 7, 1, 2, 1, 1, 1, 1, 128, 114, 78,,,,, +7, 7, 1, 2, 1, 1, 1, 1, 256, 114, 78,,,,, +7, 7, 1, 2, 1, 1, 1, 1, 384, 114, 78,,,,, +7, 7, 1, 2, 2, 2, 1, 1, 128, 114, 78,,,,, +7, 7, 1, 2, 2, 2, 1, 1, 256, 114, 78,,,,, +7, 7, 1, 2, 2, 2, 1, 1, 384, 114, 78,,,,, +7, 7, 1, 2, 2, 2, 1, 2, 128, 114, 78,,,,, +7, 7, 1, 2, 2, 2, 1, 2, 256, 114, 78,,,,, +7, 7, 1, 2, 2, 2, 1, 2, 384, 114, 78,,,,, +7, 7, 1, 2, 2, 2, 2, 1, 128, 114, 78,,,,, +7, 7, 1, 2, 2, 2, 2, 1, 256, 114, 78,,,,, +7, 7, 1, 2, 2, 2, 2, 1, 384, 114, 78,,,,, +7, 7, 1, 2, 2, 2, 2, 2, 128, 114, 78,,,,, +7, 7, 1, 2, 2, 2, 2, 2, 256, 114, 78,,,,, +7, 7, 1, 2, 2, 2, 2, 2, 384, 114, 78,,,,, +7, 7, 1, 2, 3, 3, 1, 1, 128, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 1, 1, 256, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 1, 1, 384, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 1, 2, 128, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 1, 2, 256, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 1, 2, 384, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 1, 3, 128, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 1, 3, 256, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 1, 3, 384, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 2, 1, 128, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 2, 1, 256, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 2, 1, 384, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 2, 2, 128, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 2, 2, 256, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 2, 2, 384, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 2, 3, 128, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 2, 3, 256, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 2, 3, 384, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 3, 1, 128, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 3, 1, 256, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 3, 1, 384, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 3, 2, 128, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 3, 2, 256, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 3, 2, 384, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 3, 3, 128, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 3, 3, 256, 112, 78,,,,, +7, 7, 1, 2, 3, 3, 3, 3, 384, 112, 78,,,,, +7, 7, 1, 3, 1, 1, 1, 1, 128, 168, 114,,,,, +7, 7, 1, 3, 1, 1, 1, 1, 256, 168, 114,,,,, +7, 7, 1, 3, 1, 1, 1, 1, 384, 168, 114,,,,, +7, 7, 1, 3, 2, 2, 1, 1, 128, 168, 114,,,,, +7, 7, 1, 3, 2, 2, 1, 1, 256, 168, 114,,,,, +7, 7, 1, 3, 2, 2, 1, 1, 384, 168, 114,,,,, +7, 7, 1, 3, 2, 2, 1, 2, 128, 168, 114,,,,, +7, 7, 1, 3, 2, 2, 1, 2, 256, 168, 114,,,,, +7, 7, 1, 3, 2, 2, 1, 2, 384, 168, 114,,,,, +7, 7, 1, 3, 2, 2, 2, 1, 128, 168, 114,,,,, +7, 7, 1, 3, 2, 2, 2, 1, 256, 168, 114,,,,, +7, 7, 1, 3, 2, 2, 2, 1, 384, 168, 114,,,,, +7, 7, 1, 3, 2, 2, 2, 2, 128, 168, 114,,,,, +7, 7, 1, 3, 2, 2, 2, 2, 256, 168, 114,,,,, +7, 7, 1, 3, 2, 2, 2, 2, 384, 168, 114,,,,, +7, 7, 1, 3, 3, 3, 1, 1, 128, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 1, 1, 256, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 1, 1, 384, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 1, 2, 128, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 1, 2, 256, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 1, 2, 384, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 1, 3, 128, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 1, 3, 256, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 1, 3, 384, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 2, 1, 128, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 2, 1, 256, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 2, 1, 384, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 2, 2, 128, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 2, 2, 256, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 2, 2, 384, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 2, 3, 128, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 2, 3, 256, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 2, 3, 384, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 3, 1, 128, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 3, 1, 256, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 3, 1, 384, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 3, 2, 128, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 3, 2, 256, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 3, 2, 384, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 3, 3, 128, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 3, 3, 256, 165, 114,,,,, +7, 7, 1, 3, 3, 3, 3, 3, 384, 165, 114,,,,, +7, 7, 1, 4, 1, 1, 1, 1, 128, 222, 150,,,,, +7, 7, 1, 4, 1, 1, 1, 1, 256, 222, 150,,,,, +7, 7, 1, 4, 1, 1, 1, 1, 384, 222, 150,,,,, +7, 7, 1, 4, 2, 2, 1, 1, 128, 222, 150,,,,, +7, 7, 1, 4, 2, 2, 1, 1, 256, 222, 150,,,,, +7, 7, 1, 4, 2, 2, 1, 1, 384, 222, 150,,,,, +7, 7, 1, 4, 2, 2, 1, 2, 128, 222, 150,,,,, +7, 7, 1, 4, 2, 2, 1, 2, 256, 222, 150,,,,, +7, 7, 1, 4, 2, 2, 1, 2, 384, 222, 150,,,,, +7, 7, 1, 4, 2, 2, 2, 1, 128, 222, 150,,,,, +7, 7, 1, 4, 2, 2, 2, 1, 256, 222, 150,,,,, +7, 7, 1, 4, 2, 2, 2, 1, 384, 222, 150,,,,, +7, 7, 1, 4, 2, 2, 2, 2, 128, 222, 150,,,,, +7, 7, 1, 4, 2, 2, 2, 2, 256, 222, 150,,,,, +7, 7, 1, 4, 2, 2, 2, 2, 384, 222, 150,,,,, +7, 7, 1, 4, 3, 3, 1, 1, 128, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 1, 1, 256, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 1, 1, 384, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 1, 2, 128, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 1, 2, 256, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 1, 2, 384, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 1, 3, 128, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 1, 3, 256, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 1, 3, 384, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 2, 1, 128, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 2, 1, 256, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 2, 1, 384, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 2, 2, 128, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 2, 2, 256, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 2, 2, 384, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 2, 3, 128, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 2, 3, 256, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 2, 3, 384, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 3, 1, 128, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 3, 1, 256, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 3, 1, 384, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 3, 2, 128, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 3, 2, 256, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 3, 2, 384, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 3, 3, 128, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 3, 3, 256, 218, 150,,,,, +7, 7, 1, 4, 3, 3, 3, 3, 384, 218, 150,,,,, +7, 7, 1, 5, 1, 1, 1, 1, 128, 240, 186,,,,, +7, 7, 1, 5, 1, 1, 1, 1, 256, 240, 186,,,,, +7, 7, 1, 5, 1, 1, 1, 1, 384, 240, 186,,,,, +7, 7, 1, 5, 2, 2, 1, 1, 128, 240, 186,,,,, +7, 7, 1, 5, 2, 2, 1, 1, 256, 240, 186,,,,, +7, 7, 1, 5, 2, 2, 1, 1, 384, 240, 186,,,,, +7, 7, 1, 5, 2, 2, 1, 2, 128, 240, 186,,,,, +7, 7, 1, 5, 2, 2, 1, 2, 256, 240, 186,,,,, +7, 7, 1, 5, 2, 2, 1, 2, 384, 240, 186,,,,, +7, 7, 1, 5, 2, 2, 2, 1, 128, 240, 186,,,,, +7, 7, 1, 5, 2, 2, 2, 1, 256, 240, 186,,,,, +7, 7, 1, 5, 2, 2, 2, 1, 384, 240, 186,,,,, +7, 7, 1, 5, 2, 2, 2, 2, 128, 240, 186,,,,, +7, 7, 1, 5, 2, 2, 2, 2, 256, 240, 186,,,,, +7, 7, 1, 5, 2, 2, 2, 2, 384, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 1, 1, 128, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 1, 1, 256, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 1, 1, 384, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 1, 2, 128, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 1, 2, 256, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 1, 2, 384, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 1, 3, 128, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 1, 3, 256, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 1, 3, 384, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 2, 1, 128, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 2, 1, 256, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 2, 1, 384, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 2, 2, 128, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 2, 2, 256, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 2, 2, 384, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 2, 3, 128, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 2, 3, 256, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 2, 3, 384, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 3, 1, 128, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 3, 1, 256, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 3, 1, 384, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 3, 2, 128, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 3, 2, 256, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 3, 2, 384, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 3, 3, 128, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 3, 3, 256, 240, 186,,,,, +7, 7, 1, 5, 3, 3, 3, 3, 384, 240, 186,,,,, +7, 7, 1, 6, 1, 1, 1, 1, 128, 240, 222,,,,, +7, 7, 1, 6, 1, 1, 1, 1, 256, 240, 222,,,,, +7, 7, 1, 6, 1, 1, 1, 1, 384, 240, 222,,,,, +7, 7, 1, 6, 2, 2, 1, 1, 128, 240, 222,,,,, +7, 7, 1, 6, 2, 2, 1, 1, 256, 240, 222,,,,, +7, 7, 1, 6, 2, 2, 1, 1, 384, 240, 222,,,,, +7, 7, 1, 6, 2, 2, 1, 2, 128, 240, 222,,,,, +7, 7, 1, 6, 2, 2, 1, 2, 256, 240, 222,,,,, +7, 7, 1, 6, 2, 2, 1, 2, 384, 240, 222,,,,, +7, 7, 1, 6, 2, 2, 2, 1, 128, 240, 222,,,,, +7, 7, 1, 6, 2, 2, 2, 1, 256, 240, 222,,,,, +7, 7, 1, 6, 2, 2, 2, 1, 384, 240, 222,,,,, +7, 7, 1, 6, 2, 2, 2, 2, 128, 240, 222,,,,, +7, 7, 1, 6, 2, 2, 2, 2, 256, 240, 222,,,,, +7, 7, 1, 6, 2, 2, 2, 2, 384, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 1, 1, 128, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 1, 1, 256, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 1, 1, 384, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 1, 2, 128, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 1, 2, 256, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 1, 2, 384, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 1, 3, 128, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 1, 3, 256, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 1, 3, 384, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 2, 1, 128, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 2, 1, 256, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 2, 1, 384, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 2, 2, 128, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 2, 2, 256, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 2, 2, 384, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 2, 3, 128, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 2, 3, 256, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 2, 3, 384, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 3, 1, 128, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 3, 1, 256, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 3, 1, 384, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 3, 2, 128, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 3, 2, 256, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 3, 2, 384, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 3, 3, 128, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 3, 3, 256, 240, 222,,,,, +7, 7, 1, 6, 3, 3, 3, 3, 384, 240, 222,,,,, +7, 7, 1, 7, 1, 1, 1, 1, 128, 240, 240,,,,, +7, 7, 1, 7, 1, 1, 1, 1, 256, 240, 240,,,,, +7, 7, 1, 7, 1, 1, 1, 1, 384, 240, 240,,,,, +7, 7, 1, 7, 2, 2, 1, 1, 128, 240, 240,,,,, +7, 7, 1, 7, 2, 2, 1, 1, 256, 240, 240,,,,, +7, 7, 1, 7, 2, 2, 1, 1, 384, 240, 240,,,,, +7, 7, 1, 7, 2, 2, 1, 2, 128, 240, 240,,,,, +7, 7, 1, 7, 2, 2, 1, 2, 256, 240, 240,,,,, +7, 7, 1, 7, 2, 2, 1, 2, 384, 240, 240,,,,, +7, 7, 1, 7, 2, 2, 2, 1, 128, 240, 240,,,,, +7, 7, 1, 7, 2, 2, 2, 1, 256, 240, 240,,,,, +7, 7, 1, 7, 2, 2, 2, 1, 384, 240, 240,,,,, +7, 7, 1, 7, 2, 2, 2, 2, 128, 240, 240,,,,, +7, 7, 1, 7, 2, 2, 2, 2, 256, 240, 240,,,,, +7, 7, 1, 7, 2, 2, 2, 2, 384, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 1, 1, 128, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 1, 1, 256, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 1, 1, 384, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 1, 2, 128, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 1, 2, 256, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 1, 2, 384, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 1, 3, 128, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 1, 3, 256, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 1, 3, 384, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 2, 1, 128, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 2, 1, 256, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 2, 1, 384, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 2, 2, 128, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 2, 2, 256, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 2, 2, 384, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 2, 3, 128, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 2, 3, 256, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 2, 3, 384, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 3, 1, 128, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 3, 1, 256, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 3, 1, 384, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 3, 2, 128, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 3, 2, 256, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 3, 2, 384, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 3, 3, 128, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 3, 3, 256, 240, 240,,,,, +7, 7, 1, 7, 3, 3, 3, 3, 384, 240, 240,,,,, +7, 7, 2, 1, 1, 1, 1, 1, 128, 60, 42,,,,, +7, 7, 2, 1, 1, 1, 1, 1, 256, 60, 42,,,,, +7, 7, 2, 1, 1, 1, 1, 1, 384, 60, 42,,,,, +7, 7, 2, 1, 2, 2, 1, 1, 128, 60, 42,,,,, +7, 7, 2, 1, 2, 2, 1, 1, 256, 60, 42,,,,, +7, 7, 2, 1, 2, 2, 1, 1, 384, 60, 42,,,,, +7, 7, 2, 1, 2, 2, 1, 2, 128, 60, 42,,,,, +7, 7, 2, 1, 2, 2, 1, 2, 256, 60, 42,,,,, +7, 7, 2, 1, 2, 2, 1, 2, 384, 60, 42,,,,, +7, 7, 2, 1, 2, 2, 2, 1, 128, 60, 42,,,,, +7, 7, 2, 1, 2, 2, 2, 1, 256, 60, 42,,,,, +7, 7, 2, 1, 2, 2, 2, 1, 384, 60, 42,,,,, +7, 7, 2, 1, 2, 2, 2, 2, 128, 60, 42,,,,, +7, 7, 2, 1, 2, 2, 2, 2, 256, 60, 42,,,,, +7, 7, 2, 1, 2, 2, 2, 2, 384, 60, 42,,,,, +7, 7, 2, 1, 3, 3, 1, 1, 128, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 1, 1, 256, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 1, 1, 384, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 1, 2, 128, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 1, 2, 256, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 1, 2, 384, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 1, 3, 128, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 1, 3, 256, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 1, 3, 384, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 2, 1, 128, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 2, 1, 256, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 2, 1, 384, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 2, 2, 128, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 2, 2, 256, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 2, 2, 384, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 2, 3, 128, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 2, 3, 256, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 2, 3, 384, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 3, 1, 128, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 3, 1, 256, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 3, 1, 384, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 3, 2, 128, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 3, 2, 256, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 3, 2, 384, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 3, 3, 128, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 3, 3, 256, 59, 42,,,,, +7, 7, 2, 1, 3, 3, 3, 3, 384, 59, 42,,,,, +7, 7, 2, 2, 1, 1, 1, 1, 128, 114, 78,,,,, +7, 7, 2, 2, 1, 1, 1, 1, 256, 114, 78,,,,, +7, 7, 2, 2, 1, 1, 1, 1, 384, 114, 78,,,,, +7, 7, 2, 2, 2, 2, 1, 1, 128, 114, 78,,,,, +7, 7, 2, 2, 2, 2, 1, 1, 256, 114, 78,,,,, +7, 7, 2, 2, 2, 2, 1, 1, 384, 114, 78,,,,, +7, 7, 2, 2, 2, 2, 1, 2, 128, 114, 78,,,,, +7, 7, 2, 2, 2, 2, 1, 2, 256, 114, 78,,,,, +7, 7, 2, 2, 2, 2, 1, 2, 384, 114, 78,,,,, +7, 7, 2, 2, 2, 2, 2, 1, 128, 114, 78,,,,, +7, 7, 2, 2, 2, 2, 2, 1, 256, 114, 78,,,,, +7, 7, 2, 2, 2, 2, 2, 1, 384, 114, 78,,,,, +7, 7, 2, 2, 2, 2, 2, 2, 128, 114, 78,,,,, +7, 7, 2, 2, 2, 2, 2, 2, 256, 114, 78,,,,, +7, 7, 2, 2, 2, 2, 2, 2, 384, 114, 78,,,,, +7, 7, 2, 2, 3, 3, 1, 1, 128, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 1, 1, 256, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 1, 1, 384, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 1, 2, 128, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 1, 2, 256, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 1, 2, 384, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 1, 3, 128, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 1, 3, 256, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 1, 3, 384, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 2, 1, 128, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 2, 1, 256, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 2, 1, 384, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 2, 2, 128, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 2, 2, 256, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 2, 2, 384, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 2, 3, 128, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 2, 3, 256, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 2, 3, 384, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 3, 1, 128, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 3, 1, 256, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 3, 1, 384, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 3, 2, 128, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 3, 2, 256, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 3, 2, 384, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 3, 3, 128, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 3, 3, 256, 112, 78,,,,, +7, 7, 2, 2, 3, 3, 3, 3, 384, 112, 78,,,,, +7, 7, 2, 3, 1, 1, 1, 1, 128, 168, 114,,,,, +7, 7, 2, 3, 1, 1, 1, 1, 256, 168, 114,,,,, +7, 7, 2, 3, 1, 1, 1, 1, 384, 168, 114,,,,, +7, 7, 2, 3, 2, 2, 1, 1, 128, 168, 114,,,,, +7, 7, 2, 3, 2, 2, 1, 1, 256, 168, 114,,,,, +7, 7, 2, 3, 2, 2, 1, 1, 384, 168, 114,,,,, +7, 7, 2, 3, 2, 2, 1, 2, 128, 168, 114,,,,, +7, 7, 2, 3, 2, 2, 1, 2, 256, 168, 114,,,,, +7, 7, 2, 3, 2, 2, 1, 2, 384, 168, 114,,,,, +7, 7, 2, 3, 2, 2, 2, 1, 128, 168, 114,,,,, +7, 7, 2, 3, 2, 2, 2, 1, 256, 168, 114,,,,, +7, 7, 2, 3, 2, 2, 2, 1, 384, 168, 114,,,,, +7, 7, 2, 3, 2, 2, 2, 2, 128, 168, 114,,,,, +7, 7, 2, 3, 2, 2, 2, 2, 256, 168, 114,,,,, +7, 7, 2, 3, 2, 2, 2, 2, 384, 168, 114,,,,, +7, 7, 2, 3, 3, 3, 1, 1, 128, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 1, 1, 256, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 1, 1, 384, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 1, 2, 128, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 1, 2, 256, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 1, 2, 384, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 1, 3, 128, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 1, 3, 256, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 1, 3, 384, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 2, 1, 128, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 2, 1, 256, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 2, 1, 384, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 2, 2, 128, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 2, 2, 256, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 2, 2, 384, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 2, 3, 128, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 2, 3, 256, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 2, 3, 384, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 3, 1, 128, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 3, 1, 256, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 3, 1, 384, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 3, 2, 128, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 3, 2, 256, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 3, 2, 384, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 3, 3, 128, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 3, 3, 256, 165, 114,,,,, +7, 7, 2, 3, 3, 3, 3, 3, 384, 165, 114,,,,, +7, 7, 2, 4, 1, 1, 1, 1, 128, 222, 150,,,,, +7, 7, 2, 4, 1, 1, 1, 1, 256, 222, 150,,,,, +7, 7, 2, 4, 1, 1, 1, 1, 384, 222, 150,,,,, +7, 7, 2, 4, 2, 2, 1, 1, 128, 222, 150,,,,, +7, 7, 2, 4, 2, 2, 1, 1, 256, 222, 150,,,,, +7, 7, 2, 4, 2, 2, 1, 1, 384, 222, 150,,,,, +7, 7, 2, 4, 2, 2, 1, 2, 128, 222, 150,,,,, +7, 7, 2, 4, 2, 2, 1, 2, 256, 222, 150,,,,, +7, 7, 2, 4, 2, 2, 1, 2, 384, 222, 150,,,,, +7, 7, 2, 4, 2, 2, 2, 1, 128, 222, 150,,,,, +7, 7, 2, 4, 2, 2, 2, 1, 256, 222, 150,,,,, +7, 7, 2, 4, 2, 2, 2, 1, 384, 222, 150,,,,, +7, 7, 2, 4, 2, 2, 2, 2, 128, 222, 150,,,,, +7, 7, 2, 4, 2, 2, 2, 2, 256, 222, 150,,,,, +7, 7, 2, 4, 2, 2, 2, 2, 384, 222, 150,,,,, +7, 7, 2, 4, 3, 3, 1, 1, 128, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 1, 1, 256, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 1, 1, 384, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 1, 2, 128, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 1, 2, 256, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 1, 2, 384, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 1, 3, 128, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 1, 3, 256, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 1, 3, 384, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 2, 1, 128, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 2, 1, 256, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 2, 1, 384, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 2, 2, 128, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 2, 2, 256, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 2, 2, 384, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 2, 3, 128, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 2, 3, 256, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 2, 3, 384, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 3, 1, 128, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 3, 1, 256, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 3, 1, 384, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 3, 2, 128, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 3, 2, 256, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 3, 2, 384, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 3, 3, 128, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 3, 3, 256, 218, 150,,,,, +7, 7, 2, 4, 3, 3, 3, 3, 384, 218, 150,,,,, +7, 7, 2, 5, 1, 1, 1, 1, 128, 240, 186,,,,, +7, 7, 2, 5, 1, 1, 1, 1, 256, 240, 186,,,,, +7, 7, 2, 5, 1, 1, 1, 1, 384, 240, 186,,,,, +7, 7, 2, 5, 2, 2, 1, 1, 128, 240, 186,,,,, +7, 7, 2, 5, 2, 2, 1, 1, 256, 240, 186,,,,, +7, 7, 2, 5, 2, 2, 1, 1, 384, 240, 186,,,,, +7, 7, 2, 5, 2, 2, 1, 2, 128, 240, 186,,,,, +7, 7, 2, 5, 2, 2, 1, 2, 256, 240, 186,,,,, +7, 7, 2, 5, 2, 2, 1, 2, 384, 240, 186,,,,, +7, 7, 2, 5, 2, 2, 2, 1, 128, 240, 186,,,,, +7, 7, 2, 5, 2, 2, 2, 1, 256, 240, 186,,,,, +7, 7, 2, 5, 2, 2, 2, 1, 384, 240, 186,,,,, +7, 7, 2, 5, 2, 2, 2, 2, 128, 240, 186,,,,, +7, 7, 2, 5, 2, 2, 2, 2, 256, 240, 186,,,,, +7, 7, 2, 5, 2, 2, 2, 2, 384, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 1, 1, 128, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 1, 1, 256, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 1, 1, 384, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 1, 2, 128, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 1, 2, 256, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 1, 2, 384, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 1, 3, 128, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 1, 3, 256, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 1, 3, 384, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 2, 1, 128, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 2, 1, 256, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 2, 1, 384, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 2, 2, 128, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 2, 2, 256, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 2, 2, 384, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 2, 3, 128, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 2, 3, 256, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 2, 3, 384, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 3, 1, 128, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 3, 1, 256, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 3, 1, 384, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 3, 2, 128, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 3, 2, 256, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 3, 2, 384, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 3, 3, 128, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 3, 3, 256, 240, 186,,,,, +7, 7, 2, 5, 3, 3, 3, 3, 384, 240, 186,,,,, +7, 7, 2, 6, 1, 1, 1, 1, 128, 240, 222,,,,, +7, 7, 2, 6, 1, 1, 1, 1, 256, 240, 222,,,,, +7, 7, 2, 6, 1, 1, 1, 1, 384, 240, 222,,,,, +7, 7, 2, 6, 2, 2, 1, 1, 128, 240, 222,,,,, +7, 7, 2, 6, 2, 2, 1, 1, 256, 240, 222,,,,, +7, 7, 2, 6, 2, 2, 1, 1, 384, 240, 222,,,,, +7, 7, 2, 6, 2, 2, 1, 2, 128, 240, 222,,,,, +7, 7, 2, 6, 2, 2, 1, 2, 256, 240, 222,,,,, +7, 7, 2, 6, 2, 2, 1, 2, 384, 240, 222,,,,, +7, 7, 2, 6, 2, 2, 2, 1, 128, 240, 222,,,,, +7, 7, 2, 6, 2, 2, 2, 1, 256, 240, 222,,,,, +7, 7, 2, 6, 2, 2, 2, 1, 384, 240, 222,,,,, +7, 7, 2, 6, 2, 2, 2, 2, 128, 240, 222,,,,, +7, 7, 2, 6, 2, 2, 2, 2, 256, 240, 222,,,,, +7, 7, 2, 6, 2, 2, 2, 2, 384, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 1, 1, 128, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 1, 1, 256, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 1, 1, 384, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 1, 2, 128, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 1, 2, 256, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 1, 2, 384, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 1, 3, 128, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 1, 3, 256, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 1, 3, 384, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 2, 1, 128, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 2, 1, 256, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 2, 1, 384, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 2, 2, 128, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 2, 2, 256, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 2, 2, 384, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 2, 3, 128, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 2, 3, 256, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 2, 3, 384, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 3, 1, 128, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 3, 1, 256, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 3, 1, 384, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 3, 2, 128, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 3, 2, 256, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 3, 2, 384, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 3, 3, 128, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 3, 3, 256, 240, 222,,,,, +7, 7, 2, 6, 3, 3, 3, 3, 384, 240, 222,,,,, +7, 7, 2, 7, 1, 1, 1, 1, 128, 240, 240,,,,, +7, 7, 2, 7, 1, 1, 1, 1, 256, 240, 240,,,,, +7, 7, 2, 7, 1, 1, 1, 1, 384, 240, 240,,,,, +7, 7, 2, 7, 2, 2, 1, 1, 128, 240, 240,,,,, +7, 7, 2, 7, 2, 2, 1, 1, 256, 240, 240,,,,, +7, 7, 2, 7, 2, 2, 1, 1, 384, 240, 240,,,,, +7, 7, 2, 7, 2, 2, 1, 2, 128, 240, 240,,,,, +7, 7, 2, 7, 2, 2, 1, 2, 256, 240, 240,,,,, +7, 7, 2, 7, 2, 2, 1, 2, 384, 240, 240,,,,, +7, 7, 2, 7, 2, 2, 2, 1, 128, 240, 240,,,,, +7, 7, 2, 7, 2, 2, 2, 1, 256, 240, 240,,,,, +7, 7, 2, 7, 2, 2, 2, 1, 384, 240, 240,,,,, +7, 7, 2, 7, 2, 2, 2, 2, 128, 240, 240,,,,, +7, 7, 2, 7, 2, 2, 2, 2, 256, 240, 240,,,,, +7, 7, 2, 7, 2, 2, 2, 2, 384, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 1, 1, 128, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 1, 1, 256, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 1, 1, 384, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 1, 2, 128, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 1, 2, 256, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 1, 2, 384, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 1, 3, 128, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 1, 3, 256, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 1, 3, 384, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 2, 1, 128, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 2, 1, 256, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 2, 1, 384, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 2, 2, 128, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 2, 2, 256, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 2, 2, 384, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 2, 3, 128, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 2, 3, 256, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 2, 3, 384, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 3, 1, 128, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 3, 1, 256, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 3, 1, 384, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 3, 2, 128, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 3, 2, 256, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 3, 2, 384, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 3, 3, 128, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 3, 3, 256, 240, 240,,,,, +7, 7, 2, 7, 3, 3, 3, 3, 384, 240, 240,,,,, +7, 7, 3, 1, 1, 1, 1, 1, 128, 60, 42,,,,, +7, 7, 3, 1, 1, 1, 1, 1, 256, 60, 42,,,,, +7, 7, 3, 1, 1, 1, 1, 1, 384, 60, 42,,,,, +7, 7, 3, 1, 2, 2, 1, 1, 128, 60, 42,,,,, +7, 7, 3, 1, 2, 2, 1, 1, 256, 60, 42,,,,, +7, 7, 3, 1, 2, 2, 1, 1, 384, 60, 42,,,,, +7, 7, 3, 1, 2, 2, 1, 2, 128, 60, 42,,,,, +7, 7, 3, 1, 2, 2, 1, 2, 256, 60, 42,,,,, +7, 7, 3, 1, 2, 2, 1, 2, 384, 60, 42,,,,, +7, 7, 3, 1, 2, 2, 2, 1, 128, 60, 42,,,,, +7, 7, 3, 1, 2, 2, 2, 1, 256, 60, 42,,,,, +7, 7, 3, 1, 2, 2, 2, 1, 384, 60, 42,,,,, +7, 7, 3, 1, 2, 2, 2, 2, 128, 60, 42,,,,, +7, 7, 3, 1, 2, 2, 2, 2, 256, 60, 42,,,,, +7, 7, 3, 1, 2, 2, 2, 2, 384, 60, 42,,,,, +7, 7, 3, 1, 3, 3, 1, 1, 128, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 1, 1, 256, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 1, 1, 384, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 1, 2, 128, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 1, 2, 256, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 1, 2, 384, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 1, 3, 128, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 1, 3, 256, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 1, 3, 384, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 2, 1, 128, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 2, 1, 256, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 2, 1, 384, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 2, 2, 128, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 2, 2, 256, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 2, 2, 384, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 2, 3, 128, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 2, 3, 256, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 2, 3, 384, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 3, 1, 128, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 3, 1, 256, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 3, 1, 384, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 3, 2, 128, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 3, 2, 256, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 3, 2, 384, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 3, 3, 128, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 3, 3, 256, 59, 42,,,,, +7, 7, 3, 1, 3, 3, 3, 3, 384, 59, 42,,,,, +7, 7, 3, 2, 1, 1, 1, 1, 128, 114, 78,,,,, +7, 7, 3, 2, 1, 1, 1, 1, 256, 114, 78,,,,, +7, 7, 3, 2, 1, 1, 1, 1, 384, 114, 78,,,,, +7, 7, 3, 2, 2, 2, 1, 1, 128, 114, 78,,,,, +7, 7, 3, 2, 2, 2, 1, 1, 256, 114, 78,,,,, +7, 7, 3, 2, 2, 2, 1, 1, 384, 114, 78,,,,, +7, 7, 3, 2, 2, 2, 1, 2, 128, 114, 78,,,,, +7, 7, 3, 2, 2, 2, 1, 2, 256, 114, 78,,,,, +7, 7, 3, 2, 2, 2, 1, 2, 384, 114, 78,,,,, +7, 7, 3, 2, 2, 2, 2, 1, 128, 114, 78,,,,, +7, 7, 3, 2, 2, 2, 2, 1, 256, 114, 78,,,,, +7, 7, 3, 2, 2, 2, 2, 1, 384, 114, 78,,,,, +7, 7, 3, 2, 2, 2, 2, 2, 128, 114, 78,,,,, +7, 7, 3, 2, 2, 2, 2, 2, 256, 114, 78,,,,, +7, 7, 3, 2, 2, 2, 2, 2, 384, 114, 78,,,,, +7, 7, 3, 2, 3, 3, 1, 1, 128, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 1, 1, 256, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 1, 1, 384, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 1, 2, 128, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 1, 2, 256, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 1, 2, 384, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 1, 3, 128, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 1, 3, 256, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 1, 3, 384, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 2, 1, 128, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 2, 1, 256, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 2, 1, 384, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 2, 2, 128, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 2, 2, 256, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 2, 2, 384, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 2, 3, 128, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 2, 3, 256, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 2, 3, 384, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 3, 1, 128, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 3, 1, 256, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 3, 1, 384, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 3, 2, 128, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 3, 2, 256, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 3, 2, 384, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 3, 3, 128, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 3, 3, 256, 112, 78,,,,, +7, 7, 3, 2, 3, 3, 3, 3, 384, 112, 78,,,,, +7, 7, 3, 3, 1, 1, 1, 1, 128, 168, 114,,,,, +7, 7, 3, 3, 1, 1, 1, 1, 256, 168, 114,,,,, +7, 7, 3, 3, 1, 1, 1, 1, 384, 168, 114,,,,, +7, 7, 3, 3, 2, 2, 1, 1, 128, 168, 114,,,,, +7, 7, 3, 3, 2, 2, 1, 1, 256, 168, 114,,,,, +7, 7, 3, 3, 2, 2, 1, 1, 384, 168, 114,,,,, +7, 7, 3, 3, 2, 2, 1, 2, 128, 168, 114,,,,, +7, 7, 3, 3, 2, 2, 1, 2, 256, 168, 114,,,,, +7, 7, 3, 3, 2, 2, 1, 2, 384, 168, 114,,,,, +7, 7, 3, 3, 2, 2, 2, 1, 128, 168, 114,,,,, +7, 7, 3, 3, 2, 2, 2, 1, 256, 168, 114,,,,, +7, 7, 3, 3, 2, 2, 2, 1, 384, 168, 114,,,,, +7, 7, 3, 3, 2, 2, 2, 2, 128, 168, 114,,,,, +7, 7, 3, 3, 2, 2, 2, 2, 256, 168, 114,,,,, +7, 7, 3, 3, 2, 2, 2, 2, 384, 168, 114,,,,, +7, 7, 3, 3, 3, 3, 1, 1, 128, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 1, 1, 256, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 1, 1, 384, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 1, 2, 128, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 1, 2, 256, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 1, 2, 384, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 1, 3, 128, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 1, 3, 256, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 1, 3, 384, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 2, 1, 128, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 2, 1, 256, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 2, 1, 384, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 2, 2, 128, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 2, 2, 256, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 2, 2, 384, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 2, 3, 128, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 2, 3, 256, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 2, 3, 384, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 3, 1, 128, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 3, 1, 256, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 3, 1, 384, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 3, 2, 128, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 3, 2, 256, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 3, 2, 384, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 3, 3, 128, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 3, 3, 256, 165, 114,,,,, +7, 7, 3, 3, 3, 3, 3, 3, 384, 165, 114,,,,, +7, 7, 3, 4, 1, 1, 1, 1, 128, 222, 150,,,,, +7, 7, 3, 4, 1, 1, 1, 1, 256, 222, 150,,,,, +7, 7, 3, 4, 1, 1, 1, 1, 384, 222, 150,,,,, +7, 7, 3, 4, 2, 2, 1, 1, 128, 222, 150,,,,, +7, 7, 3, 4, 2, 2, 1, 1, 256, 222, 150,,,,, +7, 7, 3, 4, 2, 2, 1, 1, 384, 222, 150,,,,, +7, 7, 3, 4, 2, 2, 1, 2, 128, 222, 150,,,,, +7, 7, 3, 4, 2, 2, 1, 2, 256, 222, 150,,,,, +7, 7, 3, 4, 2, 2, 1, 2, 384, 222, 150,,,,, +7, 7, 3, 4, 2, 2, 2, 1, 128, 222, 150,,,,, +7, 7, 3, 4, 2, 2, 2, 1, 256, 222, 150,,,,, +7, 7, 3, 4, 2, 2, 2, 1, 384, 222, 150,,,,, +7, 7, 3, 4, 2, 2, 2, 2, 128, 222, 150,,,,, +7, 7, 3, 4, 2, 2, 2, 2, 256, 222, 150,,,,, +7, 7, 3, 4, 2, 2, 2, 2, 384, 222, 150,,,,, +7, 7, 3, 4, 3, 3, 1, 1, 128, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 1, 1, 256, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 1, 1, 384, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 1, 2, 128, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 1, 2, 256, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 1, 2, 384, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 1, 3, 128, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 1, 3, 256, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 1, 3, 384, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 2, 1, 128, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 2, 1, 256, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 2, 1, 384, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 2, 2, 128, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 2, 2, 256, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 2, 2, 384, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 2, 3, 128, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 2, 3, 256, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 2, 3, 384, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 3, 1, 128, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 3, 1, 256, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 3, 1, 384, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 3, 2, 128, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 3, 2, 256, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 3, 2, 384, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 3, 3, 128, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 3, 3, 256, 218, 150,,,,, +7, 7, 3, 4, 3, 3, 3, 3, 384, 218, 150,,,,, +7, 7, 3, 5, 1, 1, 1, 1, 128, 240, 186,,,,, +7, 7, 3, 5, 1, 1, 1, 1, 256, 240, 186,,,,, +7, 7, 3, 5, 1, 1, 1, 1, 384, 240, 186,,,,, +7, 7, 3, 5, 2, 2, 1, 1, 128, 240, 186,,,,, +7, 7, 3, 5, 2, 2, 1, 1, 256, 240, 186,,,,, +7, 7, 3, 5, 2, 2, 1, 1, 384, 240, 186,,,,, +7, 7, 3, 5, 2, 2, 1, 2, 128, 240, 186,,,,, +7, 7, 3, 5, 2, 2, 1, 2, 256, 240, 186,,,,, +7, 7, 3, 5, 2, 2, 1, 2, 384, 240, 186,,,,, +7, 7, 3, 5, 2, 2, 2, 1, 128, 240, 186,,,,, +7, 7, 3, 5, 2, 2, 2, 1, 256, 240, 186,,,,, +7, 7, 3, 5, 2, 2, 2, 1, 384, 240, 186,,,,, +7, 7, 3, 5, 2, 2, 2, 2, 128, 240, 186,,,,, +7, 7, 3, 5, 2, 2, 2, 2, 256, 240, 186,,,,, +7, 7, 3, 5, 2, 2, 2, 2, 384, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 1, 1, 128, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 1, 1, 256, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 1, 1, 384, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 1, 2, 128, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 1, 2, 256, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 1, 2, 384, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 1, 3, 128, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 1, 3, 256, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 1, 3, 384, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 2, 1, 128, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 2, 1, 256, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 2, 1, 384, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 2, 2, 128, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 2, 2, 256, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 2, 2, 384, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 2, 3, 128, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 2, 3, 256, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 2, 3, 384, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 3, 1, 128, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 3, 1, 256, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 3, 1, 384, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 3, 2, 128, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 3, 2, 256, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 3, 2, 384, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 3, 3, 128, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 3, 3, 256, 240, 186,,,,, +7, 7, 3, 5, 3, 3, 3, 3, 384, 240, 186,,,,, +7, 7, 3, 6, 1, 1, 1, 1, 128, 240, 222,,,,, +7, 7, 3, 6, 1, 1, 1, 1, 256, 240, 222,,,,, +7, 7, 3, 6, 1, 1, 1, 1, 384, 240, 222,,,,, +7, 7, 3, 6, 2, 2, 1, 1, 128, 240, 222,,,,, +7, 7, 3, 6, 2, 2, 1, 1, 256, 240, 222,,,,, +7, 7, 3, 6, 2, 2, 1, 1, 384, 240, 222,,,,, +7, 7, 3, 6, 2, 2, 1, 2, 128, 240, 222,,,,, +7, 7, 3, 6, 2, 2, 1, 2, 256, 240, 222,,,,, +7, 7, 3, 6, 2, 2, 1, 2, 384, 240, 222,,,,, +7, 7, 3, 6, 2, 2, 2, 1, 128, 240, 222,,,,, +7, 7, 3, 6, 2, 2, 2, 1, 256, 240, 222,,,,, +7, 7, 3, 6, 2, 2, 2, 1, 384, 240, 222,,,,, +7, 7, 3, 6, 2, 2, 2, 2, 128, 240, 222,,,,, +7, 7, 3, 6, 2, 2, 2, 2, 256, 240, 222,,,,, +7, 7, 3, 6, 2, 2, 2, 2, 384, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 1, 1, 128, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 1, 1, 256, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 1, 1, 384, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 1, 2, 128, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 1, 2, 256, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 1, 2, 384, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 1, 3, 128, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 1, 3, 256, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 1, 3, 384, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 2, 1, 128, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 2, 1, 256, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 2, 1, 384, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 2, 2, 128, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 2, 2, 256, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 2, 2, 384, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 2, 3, 128, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 2, 3, 256, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 2, 3, 384, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 3, 1, 128, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 3, 1, 256, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 3, 1, 384, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 3, 2, 128, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 3, 2, 256, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 3, 2, 384, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 3, 3, 128, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 3, 3, 256, 240, 222,,,,, +7, 7, 3, 6, 3, 3, 3, 3, 384, 240, 222,,,,, +7, 7, 3, 7, 1, 1, 1, 1, 128, 240, 240,,,,, +7, 7, 3, 7, 1, 1, 1, 1, 256, 240, 240,,,,, +7, 7, 3, 7, 1, 1, 1, 1, 384, 240, 240,,,,, +7, 7, 3, 7, 2, 2, 1, 1, 128, 240, 240,,,,, +7, 7, 3, 7, 2, 2, 1, 1, 256, 240, 240,,,,, +7, 7, 3, 7, 2, 2, 1, 1, 384, 240, 240,,,,, +7, 7, 3, 7, 2, 2, 1, 2, 128, 240, 240,,,,, +7, 7, 3, 7, 2, 2, 1, 2, 256, 240, 240,,,,, +7, 7, 3, 7, 2, 2, 1, 2, 384, 240, 240,,,,, +7, 7, 3, 7, 2, 2, 2, 1, 128, 240, 240,,,,, +7, 7, 3, 7, 2, 2, 2, 1, 256, 240, 240,,,,, +7, 7, 3, 7, 2, 2, 2, 1, 384, 240, 240,,,,, +7, 7, 3, 7, 2, 2, 2, 2, 128, 240, 240,,,,, +7, 7, 3, 7, 2, 2, 2, 2, 256, 240, 240,,,,, +7, 7, 3, 7, 2, 2, 2, 2, 384, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 1, 1, 128, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 1, 1, 256, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 1, 1, 384, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 1, 2, 128, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 1, 2, 256, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 1, 2, 384, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 1, 3, 128, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 1, 3, 256, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 1, 3, 384, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 2, 1, 128, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 2, 1, 256, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 2, 1, 384, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 2, 2, 128, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 2, 2, 256, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 2, 2, 384, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 2, 3, 128, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 2, 3, 256, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 2, 3, 384, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 3, 1, 128, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 3, 1, 256, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 3, 1, 384, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 3, 2, 128, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 3, 2, 256, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 3, 2, 384, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 3, 3, 128, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 3, 3, 256, 240, 240,,,,, +7, 7, 3, 7, 3, 3, 3, 3, 384, 240, 240,,,,, +7, 7, 4, 1, 1, 1, 1, 1, 128, 60, 42,,,,, +7, 7, 4, 1, 1, 1, 1, 1, 256, 60, 42,,,,, +7, 7, 4, 1, 1, 1, 1, 1, 384, 60, 42,,,,, +7, 7, 4, 1, 2, 2, 1, 1, 128, 60, 42,,,,, +7, 7, 4, 1, 2, 2, 1, 1, 256, 60, 42,,,,, +7, 7, 4, 1, 2, 2, 1, 1, 384, 60, 42,,,,, +7, 7, 4, 1, 2, 2, 1, 2, 128, 60, 42,,,,, +7, 7, 4, 1, 2, 2, 1, 2, 256, 60, 42,,,,, +7, 7, 4, 1, 2, 2, 1, 2, 384, 60, 42,,,,, +7, 7, 4, 1, 2, 2, 2, 1, 128, 60, 42,,,,, +7, 7, 4, 1, 2, 2, 2, 1, 256, 60, 42,,,,, +7, 7, 4, 1, 2, 2, 2, 1, 384, 60, 42,,,,, +7, 7, 4, 1, 2, 2, 2, 2, 128, 60, 42,,,,, +7, 7, 4, 1, 2, 2, 2, 2, 256, 60, 42,,,,, +7, 7, 4, 1, 2, 2, 2, 2, 384, 60, 42,,,,, +7, 7, 4, 1, 3, 3, 1, 1, 128, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 1, 1, 256, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 1, 1, 384, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 1, 2, 128, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 1, 2, 256, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 1, 2, 384, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 1, 3, 128, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 1, 3, 256, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 1, 3, 384, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 2, 1, 128, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 2, 1, 256, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 2, 1, 384, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 2, 2, 128, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 2, 2, 256, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 2, 2, 384, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 2, 3, 128, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 2, 3, 256, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 2, 3, 384, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 3, 1, 128, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 3, 1, 256, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 3, 1, 384, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 3, 2, 128, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 3, 2, 256, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 3, 2, 384, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 3, 3, 128, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 3, 3, 256, 59, 42,,,,, +7, 7, 4, 1, 3, 3, 3, 3, 384, 59, 42,,,,, +7, 7, 4, 2, 1, 1, 1, 1, 128, 114, 78,,,,, +7, 7, 4, 2, 1, 1, 1, 1, 256, 114, 78,,,,, +7, 7, 4, 2, 1, 1, 1, 1, 384, 114, 78,,,,, +7, 7, 4, 2, 2, 2, 1, 1, 128, 114, 78,,,,, +7, 7, 4, 2, 2, 2, 1, 1, 256, 114, 78,,,,, +7, 7, 4, 2, 2, 2, 1, 1, 384, 114, 78,,,,, +7, 7, 4, 2, 2, 2, 1, 2, 128, 114, 78,,,,, +7, 7, 4, 2, 2, 2, 1, 2, 256, 114, 78,,,,, +7, 7, 4, 2, 2, 2, 1, 2, 384, 114, 78,,,,, +7, 7, 4, 2, 2, 2, 2, 1, 128, 114, 78,,,,, +7, 7, 4, 2, 2, 2, 2, 1, 256, 114, 78,,,,, +7, 7, 4, 2, 2, 2, 2, 1, 384, 114, 78,,,,, +7, 7, 4, 2, 2, 2, 2, 2, 128, 114, 78,,,,, +7, 7, 4, 2, 2, 2, 2, 2, 256, 114, 78,,,,, +7, 7, 4, 2, 2, 2, 2, 2, 384, 114, 78,,,,, +7, 7, 4, 2, 3, 3, 1, 1, 128, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 1, 1, 256, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 1, 1, 384, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 1, 2, 128, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 1, 2, 256, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 1, 2, 384, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 1, 3, 128, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 1, 3, 256, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 1, 3, 384, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 2, 1, 128, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 2, 1, 256, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 2, 1, 384, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 2, 2, 128, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 2, 2, 256, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 2, 2, 384, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 2, 3, 128, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 2, 3, 256, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 2, 3, 384, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 3, 1, 128, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 3, 1, 256, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 3, 1, 384, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 3, 2, 128, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 3, 2, 256, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 3, 2, 384, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 3, 3, 128, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 3, 3, 256, 112, 78,,,,, +7, 7, 4, 2, 3, 3, 3, 3, 384, 112, 78,,,,, +7, 7, 4, 3, 1, 1, 1, 1, 128, 168, 114,,,,, +7, 7, 4, 3, 1, 1, 1, 1, 256, 168, 114,,,,, +7, 7, 4, 3, 1, 1, 1, 1, 384, 168, 114,,,,, +7, 7, 4, 3, 2, 2, 1, 1, 128, 168, 114,,,,, +7, 7, 4, 3, 2, 2, 1, 1, 256, 168, 114,,,,, +7, 7, 4, 3, 2, 2, 1, 1, 384, 168, 114,,,,, +7, 7, 4, 3, 2, 2, 1, 2, 128, 168, 114,,,,, +7, 7, 4, 3, 2, 2, 1, 2, 256, 168, 114,,,,, +7, 7, 4, 3, 2, 2, 1, 2, 384, 168, 114,,,,, +7, 7, 4, 3, 2, 2, 2, 1, 128, 168, 114,,,,, +7, 7, 4, 3, 2, 2, 2, 1, 256, 168, 114,,,,, +7, 7, 4, 3, 2, 2, 2, 1, 384, 168, 114,,,,, +7, 7, 4, 3, 2, 2, 2, 2, 128, 168, 114,,,,, +7, 7, 4, 3, 2, 2, 2, 2, 256, 168, 114,,,,, +7, 7, 4, 3, 2, 2, 2, 2, 384, 168, 114,,,,, +7, 7, 4, 3, 3, 3, 1, 1, 128, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 1, 1, 256, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 1, 1, 384, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 1, 2, 128, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 1, 2, 256, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 1, 2, 384, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 1, 3, 128, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 1, 3, 256, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 1, 3, 384, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 2, 1, 128, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 2, 1, 256, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 2, 1, 384, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 2, 2, 128, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 2, 2, 256, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 2, 2, 384, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 2, 3, 128, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 2, 3, 256, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 2, 3, 384, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 3, 1, 128, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 3, 1, 256, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 3, 1, 384, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 3, 2, 128, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 3, 2, 256, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 3, 2, 384, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 3, 3, 128, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 3, 3, 256, 165, 114,,,,, +7, 7, 4, 3, 3, 3, 3, 3, 384, 165, 114,,,,, +7, 7, 4, 4, 1, 1, 1, 1, 128, 222, 150,,,,, +7, 7, 4, 4, 1, 1, 1, 1, 256, 222, 150,,,,, +7, 7, 4, 4, 1, 1, 1, 1, 384, 222, 150,,,,, +7, 7, 4, 4, 2, 2, 1, 1, 128, 222, 150,,,,, +7, 7, 4, 4, 2, 2, 1, 1, 256, 222, 150,,,,, +7, 7, 4, 4, 2, 2, 1, 1, 384, 222, 150,,,,, +7, 7, 4, 4, 2, 2, 1, 2, 128, 222, 150,,,,, +7, 7, 4, 4, 2, 2, 1, 2, 256, 222, 150,,,,, +7, 7, 4, 4, 2, 2, 1, 2, 384, 222, 150,,,,, +7, 7, 4, 4, 2, 2, 2, 1, 128, 222, 150,,,,, +7, 7, 4, 4, 2, 2, 2, 1, 256, 222, 150,,,,, +7, 7, 4, 4, 2, 2, 2, 1, 384, 222, 150,,,,, +7, 7, 4, 4, 2, 2, 2, 2, 128, 222, 150,,,,, +7, 7, 4, 4, 2, 2, 2, 2, 256, 222, 150,,,,, +7, 7, 4, 4, 2, 2, 2, 2, 384, 222, 150,,,,, +7, 7, 4, 4, 3, 3, 1, 1, 128, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 1, 1, 256, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 1, 1, 384, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 1, 2, 128, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 1, 2, 256, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 1, 2, 384, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 1, 3, 128, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 1, 3, 256, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 1, 3, 384, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 2, 1, 128, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 2, 1, 256, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 2, 1, 384, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 2, 2, 128, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 2, 2, 256, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 2, 2, 384, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 2, 3, 128, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 2, 3, 256, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 2, 3, 384, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 3, 1, 128, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 3, 1, 256, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 3, 1, 384, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 3, 2, 128, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 3, 2, 256, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 3, 2, 384, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 3, 3, 128, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 3, 3, 256, 218, 150,,,,, +7, 7, 4, 4, 3, 3, 3, 3, 384, 218, 150,,,,, +7, 7, 4, 5, 1, 1, 1, 1, 128, 240, 186,,,,, +7, 7, 4, 5, 1, 1, 1, 1, 256, 240, 186,,,,, +7, 7, 4, 5, 1, 1, 1, 1, 384, 240, 186,,,,, +7, 7, 4, 5, 2, 2, 1, 1, 128, 240, 186,,,,, +7, 7, 4, 5, 2, 2, 1, 1, 256, 240, 186,,,,, +7, 7, 4, 5, 2, 2, 1, 1, 384, 240, 186,,,,, +7, 7, 4, 5, 2, 2, 1, 2, 128, 240, 186,,,,, +7, 7, 4, 5, 2, 2, 1, 2, 256, 240, 186,,,,, +7, 7, 4, 5, 2, 2, 1, 2, 384, 240, 186,,,,, +7, 7, 4, 5, 2, 2, 2, 1, 128, 240, 186,,,,, +7, 7, 4, 5, 2, 2, 2, 1, 256, 240, 186,,,,, +7, 7, 4, 5, 2, 2, 2, 1, 384, 240, 186,,,,, +7, 7, 4, 5, 2, 2, 2, 2, 128, 240, 186,,,,, +7, 7, 4, 5, 2, 2, 2, 2, 256, 240, 186,,,,, +7, 7, 4, 5, 2, 2, 2, 2, 384, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 1, 1, 128, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 1, 1, 256, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 1, 1, 384, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 1, 2, 128, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 1, 2, 256, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 1, 2, 384, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 1, 3, 128, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 1, 3, 256, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 1, 3, 384, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 2, 1, 128, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 2, 1, 256, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 2, 1, 384, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 2, 2, 128, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 2, 2, 256, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 2, 2, 384, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 2, 3, 128, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 2, 3, 256, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 2, 3, 384, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 3, 1, 128, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 3, 1, 256, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 3, 1, 384, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 3, 2, 128, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 3, 2, 256, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 3, 2, 384, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 3, 3, 128, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 3, 3, 256, 240, 186,,,,, +7, 7, 4, 5, 3, 3, 3, 3, 384, 240, 186,,,,, +7, 7, 4, 6, 1, 1, 1, 1, 128, 240, 222,,,,, +7, 7, 4, 6, 1, 1, 1, 1, 256, 240, 222,,,,, +7, 7, 4, 6, 1, 1, 1, 1, 384, 240, 222,,,,, +7, 7, 4, 6, 2, 2, 1, 1, 128, 240, 222,,,,, +7, 7, 4, 6, 2, 2, 1, 1, 256, 240, 222,,,,, +7, 7, 4, 6, 2, 2, 1, 1, 384, 240, 222,,,,, +7, 7, 4, 6, 2, 2, 1, 2, 128, 240, 222,,,,, +7, 7, 4, 6, 2, 2, 1, 2, 256, 240, 222,,,,, +7, 7, 4, 6, 2, 2, 1, 2, 384, 240, 222,,,,, +7, 7, 4, 6, 2, 2, 2, 1, 128, 240, 222,,,,, +7, 7, 4, 6, 2, 2, 2, 1, 256, 240, 222,,,,, +7, 7, 4, 6, 2, 2, 2, 1, 384, 240, 222,,,,, +7, 7, 4, 6, 2, 2, 2, 2, 128, 240, 222,,,,, +7, 7, 4, 6, 2, 2, 2, 2, 256, 240, 222,,,,, +7, 7, 4, 6, 2, 2, 2, 2, 384, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 1, 1, 128, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 1, 1, 256, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 1, 1, 384, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 1, 2, 128, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 1, 2, 256, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 1, 2, 384, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 1, 3, 128, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 1, 3, 256, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 1, 3, 384, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 2, 1, 128, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 2, 1, 256, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 2, 1, 384, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 2, 2, 128, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 2, 2, 256, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 2, 2, 384, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 2, 3, 128, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 2, 3, 256, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 2, 3, 384, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 3, 1, 128, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 3, 1, 256, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 3, 1, 384, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 3, 2, 128, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 3, 2, 256, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 3, 2, 384, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 3, 3, 128, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 3, 3, 256, 240, 222,,,,, +7, 7, 4, 6, 3, 3, 3, 3, 384, 240, 222,,,,, +7, 7, 4, 7, 1, 1, 1, 1, 128, 240, 240,,,,, +7, 7, 4, 7, 1, 1, 1, 1, 256, 240, 240,,,,, +7, 7, 4, 7, 1, 1, 1, 1, 384, 240, 240,,,,, +7, 7, 4, 7, 2, 2, 1, 1, 128, 240, 240,,,,, +7, 7, 4, 7, 2, 2, 1, 1, 256, 240, 240,,,,, +7, 7, 4, 7, 2, 2, 1, 1, 384, 240, 240,,,,, +7, 7, 4, 7, 2, 2, 1, 2, 128, 240, 240,,,,, +7, 7, 4, 7, 2, 2, 1, 2, 256, 240, 240,,,,, +7, 7, 4, 7, 2, 2, 1, 2, 384, 240, 240,,,,, +7, 7, 4, 7, 2, 2, 2, 1, 128, 240, 240,,,,, +7, 7, 4, 7, 2, 2, 2, 1, 256, 240, 240,,,,, +7, 7, 4, 7, 2, 2, 2, 1, 384, 240, 240,,,,, +7, 7, 4, 7, 2, 2, 2, 2, 128, 240, 240,,,,, +7, 7, 4, 7, 2, 2, 2, 2, 256, 240, 240,,,,, +7, 7, 4, 7, 2, 2, 2, 2, 384, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 1, 1, 128, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 1, 1, 256, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 1, 1, 384, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 1, 2, 128, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 1, 2, 256, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 1, 2, 384, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 1, 3, 128, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 1, 3, 256, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 1, 3, 384, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 2, 1, 128, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 2, 1, 256, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 2, 1, 384, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 2, 2, 128, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 2, 2, 256, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 2, 2, 384, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 2, 3, 128, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 2, 3, 256, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 2, 3, 384, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 3, 1, 128, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 3, 1, 256, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 3, 1, 384, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 3, 2, 128, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 3, 2, 256, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 3, 2, 384, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 3, 3, 128, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 3, 3, 256, 240, 240,,,,, +7, 7, 4, 7, 3, 3, 3, 3, 384, 240, 240,,,,, +7, 7, 5, 1, 1, 1, 1, 1, 128, 60, 42,,,,, +7, 7, 5, 1, 1, 1, 1, 1, 256, 60, 42,,,,, +7, 7, 5, 1, 1, 1, 1, 1, 384, 60, 42,,,,, +7, 7, 5, 1, 2, 2, 1, 1, 128, 60, 42,,,,, +7, 7, 5, 1, 2, 2, 1, 1, 256, 60, 42,,,,, +7, 7, 5, 1, 2, 2, 1, 1, 384, 60, 42,,,,, +7, 7, 5, 1, 2, 2, 1, 2, 128, 60, 42,,,,, +7, 7, 5, 1, 2, 2, 1, 2, 256, 60, 42,,,,, +7, 7, 5, 1, 2, 2, 1, 2, 384, 60, 42,,,,, +7, 7, 5, 1, 2, 2, 2, 1, 128, 60, 42,,,,, +7, 7, 5, 1, 2, 2, 2, 1, 256, 60, 42,,,,, +7, 7, 5, 1, 2, 2, 2, 1, 384, 60, 42,,,,, +7, 7, 5, 1, 2, 2, 2, 2, 128, 60, 42,,,,, +7, 7, 5, 1, 2, 2, 2, 2, 256, 60, 42,,,,, +7, 7, 5, 1, 2, 2, 2, 2, 384, 60, 42,,,,, +7, 7, 5, 1, 3, 3, 1, 1, 128, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 1, 1, 256, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 1, 1, 384, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 1, 2, 128, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 1, 2, 256, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 1, 2, 384, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 1, 3, 128, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 1, 3, 256, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 1, 3, 384, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 2, 1, 128, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 2, 1, 256, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 2, 1, 384, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 2, 2, 128, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 2, 2, 256, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 2, 2, 384, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 2, 3, 128, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 2, 3, 256, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 2, 3, 384, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 3, 1, 128, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 3, 1, 256, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 3, 1, 384, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 3, 2, 128, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 3, 2, 256, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 3, 2, 384, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 3, 3, 128, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 3, 3, 256, 59, 42,,,,, +7, 7, 5, 1, 3, 3, 3, 3, 384, 59, 42,,,,, +7, 7, 5, 2, 1, 1, 1, 1, 128, 114, 78,,,,, +7, 7, 5, 2, 1, 1, 1, 1, 256, 114, 78,,,,, +7, 7, 5, 2, 1, 1, 1, 1, 384, 114, 78,,,,, +7, 7, 5, 2, 2, 2, 1, 1, 128, 114, 78,,,,, +7, 7, 5, 2, 2, 2, 1, 1, 256, 114, 78,,,,, +7, 7, 5, 2, 2, 2, 1, 1, 384, 114, 78,,,,, +7, 7, 5, 2, 2, 2, 1, 2, 128, 114, 78,,,,, +7, 7, 5, 2, 2, 2, 1, 2, 256, 114, 78,,,,, +7, 7, 5, 2, 2, 2, 1, 2, 384, 114, 78,,,,, +7, 7, 5, 2, 2, 2, 2, 1, 128, 114, 78,,,,, +7, 7, 5, 2, 2, 2, 2, 1, 256, 114, 78,,,,, +7, 7, 5, 2, 2, 2, 2, 1, 384, 114, 78,,,,, +7, 7, 5, 2, 2, 2, 2, 2, 128, 114, 78,,,,, +7, 7, 5, 2, 2, 2, 2, 2, 256, 114, 78,,,,, +7, 7, 5, 2, 2, 2, 2, 2, 384, 114, 78,,,,, +7, 7, 5, 2, 3, 3, 1, 1, 128, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 1, 1, 256, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 1, 1, 384, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 1, 2, 128, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 1, 2, 256, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 1, 2, 384, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 1, 3, 128, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 1, 3, 256, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 1, 3, 384, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 2, 1, 128, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 2, 1, 256, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 2, 1, 384, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 2, 2, 128, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 2, 2, 256, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 2, 2, 384, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 2, 3, 128, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 2, 3, 256, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 2, 3, 384, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 3, 1, 128, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 3, 1, 256, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 3, 1, 384, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 3, 2, 128, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 3, 2, 256, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 3, 2, 384, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 3, 3, 128, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 3, 3, 256, 112, 78,,,,, +7, 7, 5, 2, 3, 3, 3, 3, 384, 112, 78,,,,, +7, 7, 5, 3, 1, 1, 1, 1, 128, 168, 114,,,,, +7, 7, 5, 3, 1, 1, 1, 1, 256, 168, 114,,,,, +7, 7, 5, 3, 1, 1, 1, 1, 384, 168, 114,,,,, +7, 7, 5, 3, 2, 2, 1, 1, 128, 168, 114,,,,, +7, 7, 5, 3, 2, 2, 1, 1, 256, 168, 114,,,,, +7, 7, 5, 3, 2, 2, 1, 1, 384, 168, 114,,,,, +7, 7, 5, 3, 2, 2, 1, 2, 128, 168, 114,,,,, +7, 7, 5, 3, 2, 2, 1, 2, 256, 168, 114,,,,, +7, 7, 5, 3, 2, 2, 1, 2, 384, 168, 114,,,,, +7, 7, 5, 3, 2, 2, 2, 1, 128, 168, 114,,,,, +7, 7, 5, 3, 2, 2, 2, 1, 256, 168, 114,,,,, +7, 7, 5, 3, 2, 2, 2, 1, 384, 168, 114,,,,, +7, 7, 5, 3, 2, 2, 2, 2, 128, 168, 114,,,,, +7, 7, 5, 3, 2, 2, 2, 2, 256, 168, 114,,,,, +7, 7, 5, 3, 2, 2, 2, 2, 384, 168, 114,,,,, +7, 7, 5, 3, 3, 3, 1, 1, 128, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 1, 1, 256, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 1, 1, 384, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 1, 2, 128, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 1, 2, 256, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 1, 2, 384, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 1, 3, 128, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 1, 3, 256, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 1, 3, 384, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 2, 1, 128, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 2, 1, 256, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 2, 1, 384, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 2, 2, 128, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 2, 2, 256, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 2, 2, 384, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 2, 3, 128, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 2, 3, 256, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 2, 3, 384, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 3, 1, 128, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 3, 1, 256, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 3, 1, 384, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 3, 2, 128, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 3, 2, 256, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 3, 2, 384, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 3, 3, 128, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 3, 3, 256, 165, 114,,,,, +7, 7, 5, 3, 3, 3, 3, 3, 384, 165, 114,,,,, +7, 7, 5, 4, 1, 1, 1, 1, 128, 222, 150,,,,, +7, 7, 5, 4, 1, 1, 1, 1, 256, 222, 150,,,,, +7, 7, 5, 4, 1, 1, 1, 1, 384, 222, 150,,,,, +7, 7, 5, 4, 2, 2, 1, 1, 128, 222, 150,,,,, +7, 7, 5, 4, 2, 2, 1, 1, 256, 222, 150,,,,, +7, 7, 5, 4, 2, 2, 1, 1, 384, 222, 150,,,,, +7, 7, 5, 4, 2, 2, 1, 2, 128, 222, 150,,,,, +7, 7, 5, 4, 2, 2, 1, 2, 256, 222, 150,,,,, +7, 7, 5, 4, 2, 2, 1, 2, 384, 222, 150,,,,, +7, 7, 5, 4, 2, 2, 2, 1, 128, 222, 150,,,,, +7, 7, 5, 4, 2, 2, 2, 1, 256, 222, 150,,,,, +7, 7, 5, 4, 2, 2, 2, 1, 384, 222, 150,,,,, +7, 7, 5, 4, 2, 2, 2, 2, 128, 222, 150,,,,, +7, 7, 5, 4, 2, 2, 2, 2, 256, 222, 150,,,,, +7, 7, 5, 4, 2, 2, 2, 2, 384, 222, 150,,,,, +7, 7, 5, 4, 3, 3, 1, 1, 128, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 1, 1, 256, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 1, 1, 384, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 1, 2, 128, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 1, 2, 256, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 1, 2, 384, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 1, 3, 128, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 1, 3, 256, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 1, 3, 384, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 2, 1, 128, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 2, 1, 256, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 2, 1, 384, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 2, 2, 128, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 2, 2, 256, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 2, 2, 384, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 2, 3, 128, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 2, 3, 256, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 2, 3, 384, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 3, 1, 128, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 3, 1, 256, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 3, 1, 384, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 3, 2, 128, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 3, 2, 256, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 3, 2, 384, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 3, 3, 128, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 3, 3, 256, 218, 150,,,,, +7, 7, 5, 4, 3, 3, 3, 3, 384, 218, 150,,,,, +7, 7, 5, 5, 1, 1, 1, 1, 128, 240, 186,,,,, +7, 7, 5, 5, 1, 1, 1, 1, 256, 240, 186,,,,, +7, 7, 5, 5, 1, 1, 1, 1, 384, 240, 186,,,,, +7, 7, 5, 5, 2, 2, 1, 1, 128, 240, 186,,,,, +7, 7, 5, 5, 2, 2, 1, 1, 256, 240, 186,,,,, +7, 7, 5, 5, 2, 2, 1, 1, 384, 240, 186,,,,, +7, 7, 5, 5, 2, 2, 1, 2, 128, 240, 186,,,,, +7, 7, 5, 5, 2, 2, 1, 2, 256, 240, 186,,,,, +7, 7, 5, 5, 2, 2, 1, 2, 384, 240, 186,,,,, +7, 7, 5, 5, 2, 2, 2, 1, 128, 240, 186,,,,, +7, 7, 5, 5, 2, 2, 2, 1, 256, 240, 186,,,,, +7, 7, 5, 5, 2, 2, 2, 1, 384, 240, 186,,,,, +7, 7, 5, 5, 2, 2, 2, 2, 128, 240, 186,,,,, +7, 7, 5, 5, 2, 2, 2, 2, 256, 240, 186,,,,, +7, 7, 5, 5, 2, 2, 2, 2, 384, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 1, 1, 128, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 1, 1, 256, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 1, 1, 384, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 1, 2, 128, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 1, 2, 256, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 1, 2, 384, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 1, 3, 128, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 1, 3, 256, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 1, 3, 384, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 2, 1, 128, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 2, 1, 256, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 2, 1, 384, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 2, 2, 128, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 2, 2, 256, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 2, 2, 384, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 2, 3, 128, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 2, 3, 256, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 2, 3, 384, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 3, 1, 128, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 3, 1, 256, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 3, 1, 384, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 3, 2, 128, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 3, 2, 256, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 3, 2, 384, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 3, 3, 128, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 3, 3, 256, 240, 186,,,,, +7, 7, 5, 5, 3, 3, 3, 3, 384, 240, 186,,,,, +7, 7, 5, 6, 1, 1, 1, 1, 128, 240, 222,,,,, +7, 7, 5, 6, 1, 1, 1, 1, 256, 240, 222,,,,, +7, 7, 5, 6, 1, 1, 1, 1, 384, 240, 222,,,,, +7, 7, 5, 6, 2, 2, 1, 1, 128, 240, 222,,,,, +7, 7, 5, 6, 2, 2, 1, 1, 256, 240, 222,,,,, +7, 7, 5, 6, 2, 2, 1, 1, 384, 240, 222,,,,, +7, 7, 5, 6, 2, 2, 1, 2, 128, 240, 222,,,,, +7, 7, 5, 6, 2, 2, 1, 2, 256, 240, 222,,,,, +7, 7, 5, 6, 2, 2, 1, 2, 384, 240, 222,,,,, +7, 7, 5, 6, 2, 2, 2, 1, 128, 240, 222,,,,, +7, 7, 5, 6, 2, 2, 2, 1, 256, 240, 222,,,,, +7, 7, 5, 6, 2, 2, 2, 1, 384, 240, 222,,,,, +7, 7, 5, 6, 2, 2, 2, 2, 128, 240, 222,,,,, +7, 7, 5, 6, 2, 2, 2, 2, 256, 240, 222,,,,, +7, 7, 5, 6, 2, 2, 2, 2, 384, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 1, 1, 128, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 1, 1, 256, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 1, 1, 384, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 1, 2, 128, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 1, 2, 256, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 1, 2, 384, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 1, 3, 128, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 1, 3, 256, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 1, 3, 384, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 2, 1, 128, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 2, 1, 256, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 2, 1, 384, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 2, 2, 128, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 2, 2, 256, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 2, 2, 384, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 2, 3, 128, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 2, 3, 256, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 2, 3, 384, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 3, 1, 128, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 3, 1, 256, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 3, 1, 384, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 3, 2, 128, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 3, 2, 256, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 3, 2, 384, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 3, 3, 128, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 3, 3, 256, 240, 222,,,,, +7, 7, 5, 6, 3, 3, 3, 3, 384, 240, 222,,,,, +7, 7, 5, 7, 1, 1, 1, 1, 128, 240, 240,,,,, +7, 7, 5, 7, 1, 1, 1, 1, 256, 240, 240,,,,, +7, 7, 5, 7, 1, 1, 1, 1, 384, 240, 240,,,,, +7, 7, 5, 7, 2, 2, 1, 1, 128, 240, 240,,,,, +7, 7, 5, 7, 2, 2, 1, 1, 256, 240, 240,,,,, +7, 7, 5, 7, 2, 2, 1, 1, 384, 240, 240,,,,, +7, 7, 5, 7, 2, 2, 1, 2, 128, 240, 240,,,,, +7, 7, 5, 7, 2, 2, 1, 2, 256, 240, 240,,,,, +7, 7, 5, 7, 2, 2, 1, 2, 384, 240, 240,,,,, +7, 7, 5, 7, 2, 2, 2, 1, 128, 240, 240,,,,, +7, 7, 5, 7, 2, 2, 2, 1, 256, 240, 240,,,,, +7, 7, 5, 7, 2, 2, 2, 1, 384, 240, 240,,,,, +7, 7, 5, 7, 2, 2, 2, 2, 128, 240, 240,,,,, +7, 7, 5, 7, 2, 2, 2, 2, 256, 240, 240,,,,, +7, 7, 5, 7, 2, 2, 2, 2, 384, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 1, 1, 128, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 1, 1, 256, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 1, 1, 384, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 1, 2, 128, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 1, 2, 256, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 1, 2, 384, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 1, 3, 128, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 1, 3, 256, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 1, 3, 384, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 2, 1, 128, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 2, 1, 256, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 2, 1, 384, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 2, 2, 128, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 2, 2, 256, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 2, 2, 384, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 2, 3, 128, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 2, 3, 256, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 2, 3, 384, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 3, 1, 128, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 3, 1, 256, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 3, 1, 384, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 3, 2, 128, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 3, 2, 256, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 3, 2, 384, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 3, 3, 128, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 3, 3, 256, 240, 240,,,,, +7, 7, 5, 7, 3, 3, 3, 3, 384, 240, 240,,,,, +7, 7, 6, 1, 1, 1, 1, 1, 128, 60, 42,,,,, +7, 7, 6, 1, 1, 1, 1, 1, 256, 60, 42,,,,, +7, 7, 6, 1, 1, 1, 1, 1, 384, 60, 42,,,,, +7, 7, 6, 1, 2, 2, 1, 1, 128, 60, 42,,,,, +7, 7, 6, 1, 2, 2, 1, 1, 256, 60, 42,,,,, +7, 7, 6, 1, 2, 2, 1, 1, 384, 60, 42,,,,, +7, 7, 6, 1, 2, 2, 1, 2, 128, 60, 42,,,,, +7, 7, 6, 1, 2, 2, 1, 2, 256, 60, 42,,,,, +7, 7, 6, 1, 2, 2, 1, 2, 384, 60, 42,,,,, +7, 7, 6, 1, 2, 2, 2, 1, 128, 60, 42,,,,, +7, 7, 6, 1, 2, 2, 2, 1, 256, 60, 42,,,,, +7, 7, 6, 1, 2, 2, 2, 1, 384, 60, 42,,,,, +7, 7, 6, 1, 2, 2, 2, 2, 128, 60, 42,,,,, +7, 7, 6, 1, 2, 2, 2, 2, 256, 60, 42,,,,, +7, 7, 6, 1, 2, 2, 2, 2, 384, 60, 42,,,,, +7, 7, 6, 1, 3, 3, 1, 1, 128, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 1, 1, 256, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 1, 1, 384, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 1, 2, 128, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 1, 2, 256, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 1, 2, 384, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 1, 3, 128, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 1, 3, 256, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 1, 3, 384, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 2, 1, 128, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 2, 1, 256, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 2, 1, 384, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 2, 2, 128, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 2, 2, 256, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 2, 2, 384, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 2, 3, 128, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 2, 3, 256, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 2, 3, 384, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 3, 1, 128, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 3, 1, 256, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 3, 1, 384, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 3, 2, 128, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 3, 2, 256, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 3, 2, 384, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 3, 3, 128, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 3, 3, 256, 59, 42,,,,, +7, 7, 6, 1, 3, 3, 3, 3, 384, 59, 42,,,,, +7, 7, 6, 2, 1, 1, 1, 1, 128, 114, 78,,,,, +7, 7, 6, 2, 1, 1, 1, 1, 256, 114, 78,,,,, +7, 7, 6, 2, 1, 1, 1, 1, 384, 114, 78,,,,, +7, 7, 6, 2, 2, 2, 1, 1, 128, 114, 78,,,,, +7, 7, 6, 2, 2, 2, 1, 1, 256, 114, 78,,,,, +7, 7, 6, 2, 2, 2, 1, 1, 384, 114, 78,,,,, +7, 7, 6, 2, 2, 2, 1, 2, 128, 114, 78,,,,, +7, 7, 6, 2, 2, 2, 1, 2, 256, 114, 78,,,,, +7, 7, 6, 2, 2, 2, 1, 2, 384, 114, 78,,,,, +7, 7, 6, 2, 2, 2, 2, 1, 128, 114, 78,,,,, +7, 7, 6, 2, 2, 2, 2, 1, 256, 114, 78,,,,, +7, 7, 6, 2, 2, 2, 2, 1, 384, 114, 78,,,,, +7, 7, 6, 2, 2, 2, 2, 2, 128, 114, 78,,,,, +7, 7, 6, 2, 2, 2, 2, 2, 256, 114, 78,,,,, +7, 7, 6, 2, 2, 2, 2, 2, 384, 114, 78,,,,, +7, 7, 6, 2, 3, 3, 1, 1, 128, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 1, 1, 256, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 1, 1, 384, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 1, 2, 128, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 1, 2, 256, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 1, 2, 384, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 1, 3, 128, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 1, 3, 256, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 1, 3, 384, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 2, 1, 128, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 2, 1, 256, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 2, 1, 384, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 2, 2, 128, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 2, 2, 256, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 2, 2, 384, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 2, 3, 128, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 2, 3, 256, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 2, 3, 384, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 3, 1, 128, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 3, 1, 256, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 3, 1, 384, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 3, 2, 128, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 3, 2, 256, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 3, 2, 384, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 3, 3, 128, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 3, 3, 256, 112, 78,,,,, +7, 7, 6, 2, 3, 3, 3, 3, 384, 112, 78,,,,, +7, 7, 6, 3, 1, 1, 1, 1, 128, 168, 114,,,,, +7, 7, 6, 3, 1, 1, 1, 1, 256, 168, 114,,,,, +7, 7, 6, 3, 1, 1, 1, 1, 384, 168, 114,,,,, +7, 7, 6, 3, 2, 2, 1, 1, 128, 168, 114,,,,, +7, 7, 6, 3, 2, 2, 1, 1, 256, 168, 114,,,,, +7, 7, 6, 3, 2, 2, 1, 1, 384, 168, 114,,,,, +7, 7, 6, 3, 2, 2, 1, 2, 128, 168, 114,,,,, +7, 7, 6, 3, 2, 2, 1, 2, 256, 168, 114,,,,, +7, 7, 6, 3, 2, 2, 1, 2, 384, 168, 114,,,,, +7, 7, 6, 3, 2, 2, 2, 1, 128, 168, 114,,,,, +7, 7, 6, 3, 2, 2, 2, 1, 256, 168, 114,,,,, +7, 7, 6, 3, 2, 2, 2, 1, 384, 168, 114,,,,, +7, 7, 6, 3, 2, 2, 2, 2, 128, 168, 114,,,,, +7, 7, 6, 3, 2, 2, 2, 2, 256, 168, 114,,,,, +7, 7, 6, 3, 2, 2, 2, 2, 384, 168, 114,,,,, +7, 7, 6, 3, 3, 3, 1, 1, 128, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 1, 1, 256, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 1, 1, 384, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 1, 2, 128, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 1, 2, 256, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 1, 2, 384, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 1, 3, 128, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 1, 3, 256, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 1, 3, 384, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 2, 1, 128, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 2, 1, 256, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 2, 1, 384, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 2, 2, 128, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 2, 2, 256, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 2, 2, 384, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 2, 3, 128, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 2, 3, 256, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 2, 3, 384, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 3, 1, 128, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 3, 1, 256, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 3, 1, 384, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 3, 2, 128, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 3, 2, 256, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 3, 2, 384, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 3, 3, 128, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 3, 3, 256, 165, 114,,,,, +7, 7, 6, 3, 3, 3, 3, 3, 384, 165, 114,,,,, +7, 7, 6, 4, 1, 1, 1, 1, 128, 222, 150,,,,, +7, 7, 6, 4, 1, 1, 1, 1, 256, 222, 150,,,,, +7, 7, 6, 4, 1, 1, 1, 1, 384, 222, 150,,,,, +7, 7, 6, 4, 2, 2, 1, 1, 128, 222, 150,,,,, +7, 7, 6, 4, 2, 2, 1, 1, 256, 222, 150,,,,, +7, 7, 6, 4, 2, 2, 1, 1, 384, 222, 150,,,,, +7, 7, 6, 4, 2, 2, 1, 2, 128, 222, 150,,,,, +7, 7, 6, 4, 2, 2, 1, 2, 256, 222, 150,,,,, +7, 7, 6, 4, 2, 2, 1, 2, 384, 222, 150,,,,, +7, 7, 6, 4, 2, 2, 2, 1, 128, 222, 150,,,,, +7, 7, 6, 4, 2, 2, 2, 1, 256, 222, 150,,,,, +7, 7, 6, 4, 2, 2, 2, 1, 384, 222, 150,,,,, +7, 7, 6, 4, 2, 2, 2, 2, 128, 222, 150,,,,, +7, 7, 6, 4, 2, 2, 2, 2, 256, 222, 150,,,,, +7, 7, 6, 4, 2, 2, 2, 2, 384, 222, 150,,,,, +7, 7, 6, 4, 3, 3, 1, 1, 128, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 1, 1, 256, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 1, 1, 384, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 1, 2, 128, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 1, 2, 256, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 1, 2, 384, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 1, 3, 128, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 1, 3, 256, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 1, 3, 384, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 2, 1, 128, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 2, 1, 256, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 2, 1, 384, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 2, 2, 128, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 2, 2, 256, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 2, 2, 384, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 2, 3, 128, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 2, 3, 256, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 2, 3, 384, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 3, 1, 128, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 3, 1, 256, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 3, 1, 384, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 3, 2, 128, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 3, 2, 256, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 3, 2, 384, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 3, 3, 128, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 3, 3, 256, 218, 150,,,,, +7, 7, 6, 4, 3, 3, 3, 3, 384, 218, 150,,,,, +7, 7, 6, 5, 1, 1, 1, 1, 128, 240, 186,,,,, +7, 7, 6, 5, 1, 1, 1, 1, 256, 240, 186,,,,, +7, 7, 6, 5, 1, 1, 1, 1, 384, 240, 186,,,,, +7, 7, 6, 5, 2, 2, 1, 1, 128, 240, 186,,,,, +7, 7, 6, 5, 2, 2, 1, 1, 256, 240, 186,,,,, +7, 7, 6, 5, 2, 2, 1, 1, 384, 240, 186,,,,, +7, 7, 6, 5, 2, 2, 1, 2, 128, 240, 186,,,,, +7, 7, 6, 5, 2, 2, 1, 2, 256, 240, 186,,,,, +7, 7, 6, 5, 2, 2, 1, 2, 384, 240, 186,,,,, +7, 7, 6, 5, 2, 2, 2, 1, 128, 240, 186,,,,, +7, 7, 6, 5, 2, 2, 2, 1, 256, 240, 186,,,,, +7, 7, 6, 5, 2, 2, 2, 1, 384, 240, 186,,,,, +7, 7, 6, 5, 2, 2, 2, 2, 128, 240, 186,,,,, +7, 7, 6, 5, 2, 2, 2, 2, 256, 240, 186,,,,, +7, 7, 6, 5, 2, 2, 2, 2, 384, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 1, 1, 128, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 1, 1, 256, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 1, 1, 384, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 1, 2, 128, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 1, 2, 256, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 1, 2, 384, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 1, 3, 128, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 1, 3, 256, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 1, 3, 384, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 2, 1, 128, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 2, 1, 256, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 2, 1, 384, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 2, 2, 128, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 2, 2, 256, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 2, 2, 384, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 2, 3, 128, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 2, 3, 256, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 2, 3, 384, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 3, 1, 128, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 3, 1, 256, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 3, 1, 384, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 3, 2, 128, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 3, 2, 256, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 3, 2, 384, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 3, 3, 128, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 3, 3, 256, 240, 186,,,,, +7, 7, 6, 5, 3, 3, 3, 3, 384, 240, 186,,,,, +7, 7, 6, 6, 1, 1, 1, 1, 128, 240, 222,,,,, +7, 7, 6, 6, 1, 1, 1, 1, 256, 240, 222,,,,, +7, 7, 6, 6, 1, 1, 1, 1, 384, 240, 222,,,,, +7, 7, 6, 6, 2, 2, 1, 1, 128, 240, 222,,,,, +7, 7, 6, 6, 2, 2, 1, 1, 256, 240, 222,,,,, +7, 7, 6, 6, 2, 2, 1, 1, 384, 240, 222,,,,, +7, 7, 6, 6, 2, 2, 1, 2, 128, 240, 222,,,,, +7, 7, 6, 6, 2, 2, 1, 2, 256, 240, 222,,,,, +7, 7, 6, 6, 2, 2, 1, 2, 384, 240, 222,,,,, +7, 7, 6, 6, 2, 2, 2, 1, 128, 240, 222,,,,, +7, 7, 6, 6, 2, 2, 2, 1, 256, 240, 222,,,,, +7, 7, 6, 6, 2, 2, 2, 1, 384, 240, 222,,,,, +7, 7, 6, 6, 2, 2, 2, 2, 128, 240, 222,,,,, +7, 7, 6, 6, 2, 2, 2, 2, 256, 240, 222,,,,, +7, 7, 6, 6, 2, 2, 2, 2, 384, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 1, 1, 128, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 1, 1, 256, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 1, 1, 384, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 1, 2, 128, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 1, 2, 256, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 1, 2, 384, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 1, 3, 128, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 1, 3, 256, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 1, 3, 384, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 2, 1, 128, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 2, 1, 256, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 2, 1, 384, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 2, 2, 128, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 2, 2, 256, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 2, 2, 384, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 2, 3, 128, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 2, 3, 256, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 2, 3, 384, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 3, 1, 128, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 3, 1, 256, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 3, 1, 384, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 3, 2, 128, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 3, 2, 256, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 3, 2, 384, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 3, 3, 128, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 3, 3, 256, 240, 222,,,,, +7, 7, 6, 6, 3, 3, 3, 3, 384, 240, 222,,,,, +7, 7, 6, 7, 1, 1, 1, 1, 128, 240, 240,,,,, +7, 7, 6, 7, 1, 1, 1, 1, 256, 240, 240,,,,, +7, 7, 6, 7, 1, 1, 1, 1, 384, 240, 240,,,,, +7, 7, 6, 7, 2, 2, 1, 1, 128, 240, 240,,,,, +7, 7, 6, 7, 2, 2, 1, 1, 256, 240, 240,,,,, +7, 7, 6, 7, 2, 2, 1, 1, 384, 240, 240,,,,, +7, 7, 6, 7, 2, 2, 1, 2, 128, 240, 240,,,,, +7, 7, 6, 7, 2, 2, 1, 2, 256, 240, 240,,,,, +7, 7, 6, 7, 2, 2, 1, 2, 384, 240, 240,,,,, +7, 7, 6, 7, 2, 2, 2, 1, 128, 240, 240,,,,, +7, 7, 6, 7, 2, 2, 2, 1, 256, 240, 240,,,,, +7, 7, 6, 7, 2, 2, 2, 1, 384, 240, 240,,,,, +7, 7, 6, 7, 2, 2, 2, 2, 128, 240, 240,,,,, +7, 7, 6, 7, 2, 2, 2, 2, 256, 240, 240,,,,, +7, 7, 6, 7, 2, 2, 2, 2, 384, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 1, 1, 128, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 1, 1, 256, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 1, 1, 384, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 1, 2, 128, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 1, 2, 256, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 1, 2, 384, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 1, 3, 128, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 1, 3, 256, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 1, 3, 384, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 2, 1, 128, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 2, 1, 256, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 2, 1, 384, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 2, 2, 128, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 2, 2, 256, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 2, 2, 384, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 2, 3, 128, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 2, 3, 256, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 2, 3, 384, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 3, 1, 128, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 3, 1, 256, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 3, 1, 384, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 3, 2, 128, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 3, 2, 256, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 3, 2, 384, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 3, 3, 128, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 3, 3, 256, 240, 240,,,,, +7, 7, 6, 7, 3, 3, 3, 3, 384, 240, 240,,,,, +7, 7, 7, 1, 1, 1, 1, 1, 128, 60, 42,,,,, +7, 7, 7, 1, 1, 1, 1, 1, 256, 60, 42,,,,, +7, 7, 7, 1, 1, 1, 1, 1, 384, 60, 42,,,,, +7, 7, 7, 1, 2, 2, 1, 1, 128, 60, 42,,,,, +7, 7, 7, 1, 2, 2, 1, 1, 256, 60, 42,,,,, +7, 7, 7, 1, 2, 2, 1, 1, 384, 60, 42,,,,, +7, 7, 7, 1, 2, 2, 1, 2, 128, 60, 42,,,,, +7, 7, 7, 1, 2, 2, 1, 2, 256, 60, 42,,,,, +7, 7, 7, 1, 2, 2, 1, 2, 384, 60, 42,,,,, +7, 7, 7, 1, 2, 2, 2, 1, 128, 60, 42,,,,, +7, 7, 7, 1, 2, 2, 2, 1, 256, 60, 42,,,,, +7, 7, 7, 1, 2, 2, 2, 1, 384, 60, 42,,,,, +7, 7, 7, 1, 2, 2, 2, 2, 128, 60, 42,,,,, +7, 7, 7, 1, 2, 2, 2, 2, 256, 60, 42,,,,, +7, 7, 7, 1, 2, 2, 2, 2, 384, 60, 42,,,,, +7, 7, 7, 1, 3, 3, 1, 1, 128, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 1, 1, 256, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 1, 1, 384, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 1, 2, 128, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 1, 2, 256, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 1, 2, 384, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 1, 3, 128, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 1, 3, 256, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 1, 3, 384, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 2, 1, 128, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 2, 1, 256, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 2, 1, 384, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 2, 2, 128, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 2, 2, 256, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 2, 2, 384, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 2, 3, 128, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 2, 3, 256, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 2, 3, 384, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 3, 1, 128, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 3, 1, 256, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 3, 1, 384, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 3, 2, 128, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 3, 2, 256, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 3, 2, 384, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 3, 3, 128, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 3, 3, 256, 59, 42,,,,, +7, 7, 7, 1, 3, 3, 3, 3, 384, 59, 42,,,,, +7, 7, 7, 2, 1, 1, 1, 1, 128, 114, 78,,,,, +7, 7, 7, 2, 1, 1, 1, 1, 256, 114, 78,,,,, +7, 7, 7, 2, 1, 1, 1, 1, 384, 114, 78,,,,, +7, 7, 7, 2, 2, 2, 1, 1, 128, 114, 78,,,,, +7, 7, 7, 2, 2, 2, 1, 1, 256, 114, 78,,,,, +7, 7, 7, 2, 2, 2, 1, 1, 384, 114, 78,,,,, +7, 7, 7, 2, 2, 2, 1, 2, 128, 114, 78,,,,, +7, 7, 7, 2, 2, 2, 1, 2, 256, 114, 78,,,,, +7, 7, 7, 2, 2, 2, 1, 2, 384, 114, 78,,,,, +7, 7, 7, 2, 2, 2, 2, 1, 128, 114, 78,,,,, +7, 7, 7, 2, 2, 2, 2, 1, 256, 114, 78,,,,, +7, 7, 7, 2, 2, 2, 2, 1, 384, 114, 78,,,,, +7, 7, 7, 2, 2, 2, 2, 2, 128, 114, 78,,,,, +7, 7, 7, 2, 2, 2, 2, 2, 256, 114, 78,,,,, +7, 7, 7, 2, 2, 2, 2, 2, 384, 114, 78,,,,, +7, 7, 7, 2, 3, 3, 1, 1, 128, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 1, 1, 256, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 1, 1, 384, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 1, 2, 128, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 1, 2, 256, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 1, 2, 384, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 1, 3, 128, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 1, 3, 256, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 1, 3, 384, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 2, 1, 128, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 2, 1, 256, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 2, 1, 384, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 2, 2, 128, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 2, 2, 256, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 2, 2, 384, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 2, 3, 128, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 2, 3, 256, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 2, 3, 384, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 3, 1, 128, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 3, 1, 256, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 3, 1, 384, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 3, 2, 128, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 3, 2, 256, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 3, 2, 384, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 3, 3, 128, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 3, 3, 256, 112, 78,,,,, +7, 7, 7, 2, 3, 3, 3, 3, 384, 112, 78,,,,, +7, 7, 7, 3, 1, 1, 1, 1, 128, 168, 114,,,,, +7, 7, 7, 3, 1, 1, 1, 1, 256, 168, 114,,,,, +7, 7, 7, 3, 1, 1, 1, 1, 384, 168, 114,,,,, +7, 7, 7, 3, 2, 2, 1, 1, 128, 168, 114,,,,, +7, 7, 7, 3, 2, 2, 1, 1, 256, 168, 114,,,,, +7, 7, 7, 3, 2, 2, 1, 1, 384, 168, 114,,,,, +7, 7, 7, 3, 2, 2, 1, 2, 128, 168, 114,,,,, +7, 7, 7, 3, 2, 2, 1, 2, 256, 168, 114,,,,, +7, 7, 7, 3, 2, 2, 1, 2, 384, 168, 114,,,,, +7, 7, 7, 3, 2, 2, 2, 1, 128, 168, 114,,,,, +7, 7, 7, 3, 2, 2, 2, 1, 256, 168, 114,,,,, +7, 7, 7, 3, 2, 2, 2, 1, 384, 168, 114,,,,, +7, 7, 7, 3, 2, 2, 2, 2, 128, 168, 114,,,,, +7, 7, 7, 3, 2, 2, 2, 2, 256, 168, 114,,,,, +7, 7, 7, 3, 2, 2, 2, 2, 384, 168, 114,,,,, +7, 7, 7, 3, 3, 3, 1, 1, 128, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 1, 1, 256, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 1, 1, 384, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 1, 2, 128, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 1, 2, 256, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 1, 2, 384, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 1, 3, 128, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 1, 3, 256, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 1, 3, 384, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 2, 1, 128, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 2, 1, 256, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 2, 1, 384, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 2, 2, 128, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 2, 2, 256, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 2, 2, 384, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 2, 3, 128, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 2, 3, 256, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 2, 3, 384, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 3, 1, 128, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 3, 1, 256, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 3, 1, 384, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 3, 2, 128, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 3, 2, 256, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 3, 2, 384, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 3, 3, 128, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 3, 3, 256, 165, 114,,,,, +7, 7, 7, 3, 3, 3, 3, 3, 384, 165, 114,,,,, +7, 7, 7, 4, 1, 1, 1, 1, 128, 222, 150,,,,, +7, 7, 7, 4, 1, 1, 1, 1, 256, 222, 150,,,,, +7, 7, 7, 4, 1, 1, 1, 1, 384, 222, 150,,,,, +7, 7, 7, 4, 2, 2, 1, 1, 128, 222, 150,,,,, +7, 7, 7, 4, 2, 2, 1, 1, 256, 222, 150,,,,, +7, 7, 7, 4, 2, 2, 1, 1, 384, 222, 150,,,,, +7, 7, 7, 4, 2, 2, 1, 2, 128, 222, 150,,,,, +7, 7, 7, 4, 2, 2, 1, 2, 256, 222, 150,,,,, +7, 7, 7, 4, 2, 2, 1, 2, 384, 222, 150,,,,, +7, 7, 7, 4, 2, 2, 2, 1, 128, 222, 150,,,,, +7, 7, 7, 4, 2, 2, 2, 1, 256, 222, 150,,,,, +7, 7, 7, 4, 2, 2, 2, 1, 384, 222, 150,,,,, +7, 7, 7, 4, 2, 2, 2, 2, 128, 222, 150,,,,, +7, 7, 7, 4, 2, 2, 2, 2, 256, 222, 150,,,,, +7, 7, 7, 4, 2, 2, 2, 2, 384, 222, 150,,,,, +7, 7, 7, 4, 3, 3, 1, 1, 128, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 1, 1, 256, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 1, 1, 384, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 1, 2, 128, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 1, 2, 256, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 1, 2, 384, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 1, 3, 128, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 1, 3, 256, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 1, 3, 384, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 2, 1, 128, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 2, 1, 256, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 2, 1, 384, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 2, 2, 128, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 2, 2, 256, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 2, 2, 384, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 2, 3, 128, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 2, 3, 256, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 2, 3, 384, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 3, 1, 128, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 3, 1, 256, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 3, 1, 384, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 3, 2, 128, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 3, 2, 256, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 3, 2, 384, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 3, 3, 128, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 3, 3, 256, 218, 150,,,,, +7, 7, 7, 4, 3, 3, 3, 3, 384, 218, 150,,,,, +7, 7, 7, 5, 1, 1, 1, 1, 128, 240, 186,,,,, +7, 7, 7, 5, 1, 1, 1, 1, 256, 240, 186,,,,, +7, 7, 7, 5, 1, 1, 1, 1, 384, 240, 186,,,,, +7, 7, 7, 5, 2, 2, 1, 1, 128, 240, 186,,,,, +7, 7, 7, 5, 2, 2, 1, 1, 256, 240, 186,,,,, +7, 7, 7, 5, 2, 2, 1, 1, 384, 240, 186,,,,, +7, 7, 7, 5, 2, 2, 1, 2, 128, 240, 186,,,,, +7, 7, 7, 5, 2, 2, 1, 2, 256, 240, 186,,,,, +7, 7, 7, 5, 2, 2, 1, 2, 384, 240, 186,,,,, +7, 7, 7, 5, 2, 2, 2, 1, 128, 240, 186,,,,, +7, 7, 7, 5, 2, 2, 2, 1, 256, 240, 186,,,,, +7, 7, 7, 5, 2, 2, 2, 1, 384, 240, 186,,,,, +7, 7, 7, 5, 2, 2, 2, 2, 128, 240, 186,,,,, +7, 7, 7, 5, 2, 2, 2, 2, 256, 240, 186,,,,, +7, 7, 7, 5, 2, 2, 2, 2, 384, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 1, 1, 128, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 1, 1, 256, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 1, 1, 384, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 1, 2, 128, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 1, 2, 256, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 1, 2, 384, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 1, 3, 128, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 1, 3, 256, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 1, 3, 384, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 2, 1, 128, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 2, 1, 256, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 2, 1, 384, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 2, 2, 128, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 2, 2, 256, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 2, 2, 384, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 2, 3, 128, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 2, 3, 256, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 2, 3, 384, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 3, 1, 128, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 3, 1, 256, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 3, 1, 384, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 3, 2, 128, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 3, 2, 256, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 3, 2, 384, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 3, 3, 128, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 3, 3, 256, 240, 186,,,,, +7, 7, 7, 5, 3, 3, 3, 3, 384, 240, 186,,,,, +7, 7, 7, 6, 1, 1, 1, 1, 128, 240, 222,,,,, +7, 7, 7, 6, 1, 1, 1, 1, 256, 240, 222,,,,, +7, 7, 7, 6, 1, 1, 1, 1, 384, 240, 222,,,,, +7, 7, 7, 6, 2, 2, 1, 1, 128, 240, 222,,,,, +7, 7, 7, 6, 2, 2, 1, 1, 256, 240, 222,,,,, +7, 7, 7, 6, 2, 2, 1, 1, 384, 240, 222,,,,, +7, 7, 7, 6, 2, 2, 1, 2, 128, 240, 222,,,,, +7, 7, 7, 6, 2, 2, 1, 2, 256, 240, 222,,,,, +7, 7, 7, 6, 2, 2, 1, 2, 384, 240, 222,,,,, +7, 7, 7, 6, 2, 2, 2, 1, 128, 240, 222,,,,, +7, 7, 7, 6, 2, 2, 2, 1, 256, 240, 222,,,,, +7, 7, 7, 6, 2, 2, 2, 1, 384, 240, 222,,,,, +7, 7, 7, 6, 2, 2, 2, 2, 128, 240, 222,,,,, +7, 7, 7, 6, 2, 2, 2, 2, 256, 240, 222,,,,, +7, 7, 7, 6, 2, 2, 2, 2, 384, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 1, 1, 128, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 1, 1, 256, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 1, 1, 384, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 1, 2, 128, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 1, 2, 256, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 1, 2, 384, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 1, 3, 128, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 1, 3, 256, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 1, 3, 384, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 2, 1, 128, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 2, 1, 256, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 2, 1, 384, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 2, 2, 128, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 2, 2, 256, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 2, 2, 384, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 2, 3, 128, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 2, 3, 256, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 2, 3, 384, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 3, 1, 128, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 3, 1, 256, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 3, 1, 384, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 3, 2, 128, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 3, 2, 256, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 3, 2, 384, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 3, 3, 128, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 3, 3, 256, 240, 222,,,,, +7, 7, 7, 6, 3, 3, 3, 3, 384, 240, 222,,,,, +7, 7, 7, 7, 1, 1, 1, 1, 128, 240, 240,,,,, +7, 7, 7, 7, 1, 1, 1, 1, 256, 240, 240,,,,, +7, 7, 7, 7, 1, 1, 1, 1, 384, 240, 240,,,,, +7, 7, 7, 7, 2, 2, 1, 1, 128, 240, 240,,,,, +7, 7, 7, 7, 2, 2, 1, 1, 256, 240, 240,,,,, +7, 7, 7, 7, 2, 2, 1, 1, 384, 240, 240,,,,, +7, 7, 7, 7, 2, 2, 1, 2, 128, 240, 240,,,,, +7, 7, 7, 7, 2, 2, 1, 2, 256, 240, 240,,,,, +7, 7, 7, 7, 2, 2, 1, 2, 384, 240, 240,,,,, +7, 7, 7, 7, 2, 2, 2, 1, 128, 240, 240,,,,, +7, 7, 7, 7, 2, 2, 2, 1, 256, 240, 240,,,,, +7, 7, 7, 7, 2, 2, 2, 1, 384, 240, 240,,,,, +7, 7, 7, 7, 2, 2, 2, 2, 128, 240, 240,,,,, +7, 7, 7, 7, 2, 2, 2, 2, 256, 240, 240,,,,, +7, 7, 7, 7, 2, 2, 2, 2, 384, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 1, 1, 128, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 1, 1, 256, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 1, 1, 384, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 1, 2, 128, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 1, 2, 256, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 1, 2, 384, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 1, 3, 128, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 1, 3, 256, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 1, 3, 384, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 2, 1, 128, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 2, 1, 256, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 2, 1, 384, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 2, 2, 128, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 2, 2, 256, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 2, 2, 384, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 2, 3, 128, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 2, 3, 256, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 2, 3, 384, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 3, 1, 128, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 3, 1, 256, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 3, 1, 384, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 3, 2, 128, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 3, 2, 256, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 3, 2, 384, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 3, 3, 128, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 3, 3, 256, 240, 240,,,,, +7, 7, 7, 7, 3, 3, 3, 3, 384, 240, 240,,,,, \ No newline at end of file diff --git a/docs/OV_Runtime_UG/supported_plugins/files/GNA_Maximum_Input_Tensor_Widths_i8.csv b/docs/OV_Runtime_UG/supported_plugins/files/GNA_Maximum_Input_Tensor_Widths_i8.csv new file mode 100644 index 00000000000..64ca2b2cb93 --- /dev/null +++ b/docs/OV_Runtime_UG/supported_plugins/files/GNA_Maximum_Input_Tensor_Widths_i8.csv @@ -0,0 +1,2353 @@ +KH, KW, SH, SW, PH, PW, SH, SW, H, W (Ci=8/Co=256), W (Ci=16/Co=256), W (Ci=32/Co=256), W (Ci=64/Co=256), W (Ci=128/Co=256),W (Ci=256/Co=256), W (Ci=384/Co=256) +1, 1, 1, 1, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240, 240, 240 +1, 1, 1, 1, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240, 240, 170 +1, 1, 1, 1, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240, 170, 113 +1, 2, 1, 1, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240, 240, 240 +1, 2, 1, 1, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240, 240, 170 +1, 2, 1, 1, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240, 170, 113 +1, 2, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240, 240, 240 +1, 2, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240, 240, 170 +1, 2, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240, 170, 113 +1, 3, 1, 1, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +1, 3, 1, 1, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +1, 3, 1, 1, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +1, 3, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +1, 3, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +1, 3, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +1, 3, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +1, 3, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +1, 3, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +1, 4, 1, 1, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +1, 4, 1, 1, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +1, 4, 1, 1, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +1, 4, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +1, 4, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +1, 4, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +1, 4, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +1, 4, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +1, 4, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +1, 4, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +1, 4, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +1, 4, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +1, 5, 1, 1, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +1, 5, 1, 1, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +1, 5, 1, 1, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +1, 5, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +1, 5, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +1, 5, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +1, 5, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +1, 5, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +1, 5, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +1, 5, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +1, 5, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +1, 5, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +1, 5, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +1, 5, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +1, 5, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +1, 6, 1, 1, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 6, 1, 1, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 6, 1, 1, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 6, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 6, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 6, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 6, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 6, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 6, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 6, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 6, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 6, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 6, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 6, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 6, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 6, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 6, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 6, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 7, 1, 1, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 7, 1, 1, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 7, 1, 1, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 7, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 7, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 7, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 7, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 7, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 7, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 7, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 7, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 7, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 7, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 7, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 7, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 7, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 7, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 7, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +1, 7, 1, 7, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +1, 7, 1, 7, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +1, 7, 1, 7, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 1, 1, 1, 1, 1, 1, 1, 128, 192, 192, 192, 192, 192, 192, 192 +2, 1, 1, 1, 1, 1, 1, 1, 256, 192, 192, 192, 192, 192, 192, 170 +2, 1, 1, 1, 1, 1, 1, 1, 384, 192, 192, 192, 192, 192, 170, 113 +2, 1, 2, 1, 1, 1, 1, 1, 128, 192, 192, 192, 192, 192, 192, 192 +2, 1, 2, 1, 1, 1, 1, 1, 256, 192, 192, 192, 192, 192, 192, 170 +2, 1, 2, 1, 1, 1, 1, 1, 384, 192, 192, 192, 192, 192, 170, 113 +2, 2, 1, 1, 1, 1, 1, 1, 128, 193, 193, 193, 193, 193, 193, 129 +2, 2, 1, 1, 1, 1, 1, 1, 256, 193, 193, 193, 193, 193, 193, 129 +2, 2, 1, 1, 1, 1, 1, 1, 384, 193, 193, 193, 193, 193, 170, 113 +2, 2, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240, 240, 240 +2, 2, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240, 240, 170 +2, 2, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240, 170, 113 +2, 2, 2, 1, 1, 1, 1, 1, 128, 193, 193, 193, 193, 193, 193, 129 +2, 2, 2, 1, 1, 1, 1, 1, 256, 193, 193, 193, 193, 193, 193, 129 +2, 2, 2, 1, 1, 1, 1, 1, 384, 193, 193, 193, 193, 193, 170, 113 +2, 2, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240, 240, 240 +2, 2, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240, 240, 170 +2, 2, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240, 170, 113 +2, 3, 1, 1, 1, 1, 1, 1, 128, 194, 194, 194, 194, 194,, +2, 3, 1, 1, 1, 1, 1, 1, 256, 194, 194, 194, 194, 194,, +2, 3, 1, 1, 1, 1, 1, 1, 384, 194, 194, 194, 194, 194,, +2, 3, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +2, 3, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +2, 3, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +2, 3, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +2, 3, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +2, 3, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +2, 3, 2, 1, 1, 1, 1, 1, 128, 194, 194, 194, 194, 194,, +2, 3, 2, 1, 1, 1, 1, 1, 256, 194, 194, 194, 194, 194,, +2, 3, 2, 1, 1, 1, 1, 1, 384, 194, 194, 194, 194, 194,, +2, 3, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +2, 3, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +2, 3, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +2, 3, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +2, 3, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +2, 3, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +2, 4, 1, 1, 1, 1, 1, 1, 128, 195, 195, 195, 195, 195,, +2, 4, 1, 1, 1, 1, 1, 1, 256, 195, 195, 195, 195, 195,, +2, 4, 1, 1, 1, 1, 1, 1, 384, 195, 195, 195, 195, 195,, +2, 4, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +2, 4, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +2, 4, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +2, 4, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +2, 4, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +2, 4, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +2, 4, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +2, 4, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +2, 4, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +2, 4, 2, 1, 1, 1, 1, 1, 128, 195, 195, 195, 195, 195,, +2, 4, 2, 1, 1, 1, 1, 1, 256, 195, 195, 195, 195, 195,, +2, 4, 2, 1, 1, 1, 1, 1, 384, 195, 195, 195, 195, 195,, +2, 4, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +2, 4, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +2, 4, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +2, 4, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +2, 4, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +2, 4, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +2, 4, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +2, 4, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +2, 4, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +2, 5, 1, 1, 1, 1, 1, 1, 128, 196, 196, 196, 196, 132,, +2, 5, 1, 1, 1, 1, 1, 1, 256, 196, 196, 196, 196, 132,, +2, 5, 1, 1, 1, 1, 1, 1, 384, 196, 196, 196, 196, 132,, +2, 5, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +2, 5, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +2, 5, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +2, 5, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +2, 5, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +2, 5, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +2, 5, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +2, 5, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +2, 5, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +2, 5, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +2, 5, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +2, 5, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +2, 5, 2, 1, 1, 1, 1, 1, 128, 196, 196, 196, 196, 132,, +2, 5, 2, 1, 1, 1, 1, 1, 256, 196, 196, 196, 196, 132,, +2, 5, 2, 1, 1, 1, 1, 1, 384, 196, 196, 196, 196, 132,, +2, 5, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +2, 5, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +2, 5, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +2, 5, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +2, 5, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +2, 5, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +2, 5, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +2, 5, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +2, 5, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +2, 5, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +2, 5, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +2, 5, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +2, 6, 1, 1, 1, 1, 1, 1, 128, 197, 197, 197, 197,,, +2, 6, 1, 1, 1, 1, 1, 1, 256, 197, 197, 197, 197,,, +2, 6, 1, 1, 1, 1, 1, 1, 384, 197, 197, 197, 197,,, +2, 6, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 6, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 6, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 6, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 6, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 6, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 6, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 6, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 6, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 6, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 6, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 6, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 6, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 6, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 6, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 6, 2, 1, 1, 1, 1, 1, 128, 197, 197, 197, 197,,, +2, 6, 2, 1, 1, 1, 1, 1, 256, 197, 197, 197, 197,,, +2, 6, 2, 1, 1, 1, 1, 1, 384, 197, 197, 197, 197,,, +2, 6, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 6, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 6, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 6, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 6, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 6, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 6, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 6, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 6, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 6, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 6, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 6, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 6, 2, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 6, 2, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 6, 2, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 7, 1, 1, 1, 1, 1, 1, 128, 198, 198, 198, 198,,, +2, 7, 1, 1, 1, 1, 1, 1, 256, 198, 198, 198, 198,,, +2, 7, 1, 1, 1, 1, 1, 1, 384, 198, 198, 198, 198,,, +2, 7, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 7, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 7, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 7, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 7, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 7, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 7, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 7, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 7, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 7, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 7, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 7, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 7, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 7, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 7, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 7, 1, 7, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 7, 1, 7, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 7, 1, 7, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 7, 2, 1, 1, 1, 1, 1, 128, 198, 198, 198, 198,,, +2, 7, 2, 1, 1, 1, 1, 1, 256, 198, 198, 198, 198,,, +2, 7, 2, 1, 1, 1, 1, 1, 384, 198, 198, 198, 198,,, +2, 7, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 7, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 7, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 7, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 7, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 7, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 7, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 7, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 7, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 7, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 7, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 7, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 7, 2, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 7, 2, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 7, 2, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +2, 7, 2, 7, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +2, 7, 2, 7, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +2, 7, 2, 7, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 1, 1, 1, 1, 1, 1, 1, 128, 128, 128, 128, 128, 128, 128, 85 +3, 1, 1, 1, 1, 1, 1, 1, 256, 128, 128, 128, 128, 128, 128, 85 +3, 1, 1, 1, 1, 1, 1, 1, 384, 128, 128, 128, 128, 128, 128, 85 +3, 1, 2, 1, 1, 1, 1, 1, 128, 128, 128, 128, 128, 128, 128, 85 +3, 1, 2, 1, 1, 1, 1, 1, 256, 128, 128, 128, 128, 128, 128, 85 +3, 1, 2, 1, 1, 1, 1, 1, 384, 128, 128, 128, 128, 128, 128, 85 +3, 1, 3, 1, 1, 1, 1, 1, 128, 128, 128, 128, 128, 128, 128, 85 +3, 1, 3, 1, 1, 1, 1, 1, 256, 128, 128, 128, 128, 128, 128, 85 +3, 1, 3, 1, 1, 1, 1, 1, 384, 128, 128, 128, 128, 128, 128, 85 +3, 2, 1, 1, 1, 1, 1, 1, 128, 129, 129, 129, 129, 129, 86, 43 +3, 2, 1, 1, 1, 1, 1, 1, 256, 129, 129, 129, 129, 129, 86, 43 +3, 2, 1, 1, 1, 1, 1, 1, 384, 129, 129, 129, 129, 129, 86, 43 +3, 2, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240, 171, 85 +3, 2, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240, 171, 85 +3, 2, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240, 170, 85 +3, 2, 2, 1, 1, 1, 1, 1, 128, 129, 129, 129, 129, 129, 86, 43 +3, 2, 2, 1, 1, 1, 1, 1, 256, 129, 129, 129, 129, 129, 86, 43 +3, 2, 2, 1, 1, 1, 1, 1, 384, 129, 129, 129, 129, 129, 86, 43 +3, 2, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240, 171, 85 +3, 2, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240, 171, 85 +3, 2, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240, 170, 85 +3, 2, 3, 1, 1, 1, 1, 1, 128, 129, 129, 129, 129, 129, 86, 43 +3, 2, 3, 1, 1, 1, 1, 1, 256, 129, 129, 129, 129, 129, 86, 43 +3, 2, 3, 1, 1, 1, 1, 1, 384, 129, 129, 129, 129, 129, 86, 43 +3, 2, 3, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240, 171, 85 +3, 2, 3, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240, 171, 85 +3, 2, 3, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240, 170, 85 +3, 3, 1, 1, 1, 1, 1, 1, 128, 130, 130, 130, 130, 87,, +3, 3, 1, 1, 1, 1, 1, 1, 256, 130, 130, 130, 130, 87,, +3, 3, 1, 1, 1, 1, 1, 1, 384, 130, 130, 130, 130, 87,, +3, 3, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 172,, +3, 3, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 172,, +3, 3, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 172,, +3, 3, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +3, 3, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +3, 3, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +3, 3, 2, 1, 1, 1, 1, 1, 128, 130, 130, 130, 130, 87,, +3, 3, 2, 1, 1, 1, 1, 1, 256, 130, 130, 130, 130, 87,, +3, 3, 2, 1, 1, 1, 1, 1, 384, 130, 130, 130, 130, 87,, +3, 3, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 172,, +3, 3, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 172,, +3, 3, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 172,, +3, 3, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +3, 3, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +3, 3, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +3, 3, 3, 1, 1, 1, 1, 1, 128, 130, 130, 130, 130, 87,, +3, 3, 3, 1, 1, 1, 1, 1, 256, 130, 130, 130, 130, 87,, +3, 3, 3, 1, 1, 1, 1, 1, 384, 130, 130, 130, 130, 87,, +3, 3, 3, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 172,, +3, 3, 3, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 172,, +3, 3, 3, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 172,, +3, 3, 3, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +3, 3, 3, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +3, 3, 3, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +3, 4, 1, 1, 1, 1, 1, 1, 128, 131, 131, 131, 131, 88,, +3, 4, 1, 1, 1, 1, 1, 1, 256, 131, 131, 131, 131, 88,, +3, 4, 1, 1, 1, 1, 1, 1, 384, 131, 131, 131, 131, 88,, +3, 4, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 173,, +3, 4, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 173,, +3, 4, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 173,, +3, 4, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +3, 4, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +3, 4, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +3, 4, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +3, 4, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +3, 4, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +3, 4, 2, 1, 1, 1, 1, 1, 128, 131, 131, 131, 131, 88,, +3, 4, 2, 1, 1, 1, 1, 1, 256, 131, 131, 131, 131, 88,, +3, 4, 2, 1, 1, 1, 1, 1, 384, 131, 131, 131, 131, 88,, +3, 4, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 173,, +3, 4, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 173,, +3, 4, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 173,, +3, 4, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +3, 4, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +3, 4, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +3, 4, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +3, 4, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +3, 4, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +3, 4, 3, 1, 1, 1, 1, 1, 128, 131, 131, 131, 131, 88,, +3, 4, 3, 1, 1, 1, 1, 1, 256, 131, 131, 131, 131, 88,, +3, 4, 3, 1, 1, 1, 1, 1, 384, 131, 131, 131, 131, 88,, +3, 4, 3, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 173,, +3, 4, 3, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 173,, +3, 4, 3, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 173,, +3, 4, 3, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +3, 4, 3, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +3, 4, 3, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +3, 4, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +3, 4, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +3, 4, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +3, 5, 1, 1, 1, 1, 1, 1, 128, 132, 132, 132, 132, 89,, +3, 5, 1, 1, 1, 1, 1, 1, 256, 132, 132, 132, 132, 89,, +3, 5, 1, 1, 1, 1, 1, 1, 384, 132, 132, 132, 132, 89,, +3, 5, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 174,, +3, 5, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 174,, +3, 5, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 174,, +3, 5, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +3, 5, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +3, 5, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +3, 5, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +3, 5, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +3, 5, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +3, 5, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +3, 5, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +3, 5, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +3, 5, 2, 1, 1, 1, 1, 1, 128, 132, 132, 132, 132, 89,, +3, 5, 2, 1, 1, 1, 1, 1, 256, 132, 132, 132, 132, 89,, +3, 5, 2, 1, 1, 1, 1, 1, 384, 132, 132, 132, 132, 89,, +3, 5, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 174,, +3, 5, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 174,, +3, 5, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 174,, +3, 5, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +3, 5, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +3, 5, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +3, 5, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +3, 5, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +3, 5, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +3, 5, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +3, 5, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +3, 5, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +3, 5, 3, 1, 1, 1, 1, 1, 128, 132, 132, 132, 132, 89,, +3, 5, 3, 1, 1, 1, 1, 1, 256, 132, 132, 132, 132, 89,, +3, 5, 3, 1, 1, 1, 1, 1, 384, 132, 132, 132, 132, 89,, +3, 5, 3, 2, 1, 1, 1, 1, 128, 240, 240, 240, 240, 174,, +3, 5, 3, 2, 1, 1, 1, 1, 256, 240, 240, 240, 240, 174,, +3, 5, 3, 2, 1, 1, 1, 1, 384, 240, 240, 240, 240, 174,, +3, 5, 3, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +3, 5, 3, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +3, 5, 3, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +3, 5, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +3, 5, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +3, 5, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +3, 5, 3, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +3, 5, 3, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +3, 5, 3, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +3, 6, 1, 1, 1, 1, 1, 1, 128, 133, 133, 133, 90,,, +3, 6, 1, 1, 1, 1, 1, 1, 256, 133, 133, 133, 90,,, +3, 6, 1, 1, 1, 1, 1, 1, 384, 133, 133, 133, 90,,, +3, 6, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 175,,, +3, 6, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 175,,, +3, 6, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 175,,, +3, 6, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 6, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 6, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 6, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 6, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 6, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 6, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 6, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 6, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 6, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 6, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 6, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 6, 2, 1, 1, 1, 1, 1, 128, 133, 133, 133, 90,,, +3, 6, 2, 1, 1, 1, 1, 1, 256, 133, 133, 133, 90,,, +3, 6, 2, 1, 1, 1, 1, 1, 384, 133, 133, 133, 90,,, +3, 6, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 175,,, +3, 6, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 175,,, +3, 6, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 175,,, +3, 6, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 6, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 6, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 6, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 6, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 6, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 6, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 6, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 6, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 6, 2, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 6, 2, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 6, 2, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 6, 3, 1, 1, 1, 1, 1, 128, 133, 133, 133, 90,,, +3, 6, 3, 1, 1, 1, 1, 1, 256, 133, 133, 133, 90,,, +3, 6, 3, 1, 1, 1, 1, 1, 384, 133, 133, 133, 90,,, +3, 6, 3, 2, 1, 1, 1, 1, 128, 240, 240, 240, 175,,, +3, 6, 3, 2, 1, 1, 1, 1, 256, 240, 240, 240, 175,,, +3, 6, 3, 2, 1, 1, 1, 1, 384, 240, 240, 240, 175,,, +3, 6, 3, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 6, 3, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 6, 3, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 6, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 6, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 6, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 6, 3, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 6, 3, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 6, 3, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 6, 3, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 6, 3, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 6, 3, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 7, 1, 1, 1, 1, 1, 1, 128, 134, 134, 134, 91,,, +3, 7, 1, 1, 1, 1, 1, 1, 256, 134, 134, 134, 91,,, +3, 7, 1, 1, 1, 1, 1, 1, 384, 134, 134, 134, 91,,, +3, 7, 1, 2, 1, 1, 1, 1, 128, 240, 240, 240, 176,,, +3, 7, 1, 2, 1, 1, 1, 1, 256, 240, 240, 240, 176,,, +3, 7, 1, 2, 1, 1, 1, 1, 384, 240, 240, 240, 176,,, +3, 7, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 7, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 7, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 7, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 7, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 7, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 7, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 7, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 7, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 7, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 7, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 7, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 7, 1, 7, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 7, 1, 7, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 7, 1, 7, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 7, 2, 1, 1, 1, 1, 1, 128, 134, 134, 134, 91,,, +3, 7, 2, 1, 1, 1, 1, 1, 256, 134, 134, 134, 91,,, +3, 7, 2, 1, 1, 1, 1, 1, 384, 134, 134, 134, 91,,, +3, 7, 2, 2, 1, 1, 1, 1, 128, 240, 240, 240, 176,,, +3, 7, 2, 2, 1, 1, 1, 1, 256, 240, 240, 240, 176,,, +3, 7, 2, 2, 1, 1, 1, 1, 384, 240, 240, 240, 176,,, +3, 7, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 7, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 7, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 7, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 7, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 7, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 7, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 7, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 7, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 7, 2, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 7, 2, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 7, 2, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 7, 2, 7, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 7, 2, 7, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 7, 2, 7, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 7, 3, 1, 1, 1, 1, 1, 128, 134, 134, 134, 91,,, +3, 7, 3, 1, 1, 1, 1, 1, 256, 134, 134, 134, 91,,, +3, 7, 3, 1, 1, 1, 1, 1, 384, 134, 134, 134, 91,,, +3, 7, 3, 2, 1, 1, 1, 1, 128, 240, 240, 240, 176,,, +3, 7, 3, 2, 1, 1, 1, 1, 256, 240, 240, 240, 176,,, +3, 7, 3, 2, 1, 1, 1, 1, 384, 240, 240, 240, 176,,, +3, 7, 3, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 7, 3, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 7, 3, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 7, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 7, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 7, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 7, 3, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 7, 3, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 7, 3, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 7, 3, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 7, 3, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 7, 3, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +3, 7, 3, 7, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +3, 7, 3, 7, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +3, 7, 3, 7, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 1, 1, 1, 1, 1, 1, 1, 128, 96, 96, 96, 96, 96, 96, 64 +4, 1, 1, 1, 1, 1, 1, 1, 256, 96, 96, 96, 96, 96, 96, 64 +4, 1, 1, 1, 1, 1, 1, 1, 384, 96, 96, 96, 96, 96, 96, 64 +4, 1, 2, 1, 1, 1, 1, 1, 128, 96, 96, 96, 96, 96, 96, 64 +4, 1, 2, 1, 1, 1, 1, 1, 256, 96, 96, 96, 96, 96, 96, 64 +4, 1, 2, 1, 1, 1, 1, 1, 384, 96, 96, 96, 96, 96, 96, 64 +4, 1, 3, 1, 1, 1, 1, 1, 128, 96, 96, 96, 96, 96, 96, 64 +4, 1, 3, 1, 1, 1, 1, 1, 256, 96, 96, 96, 96, 96, 96, 64 +4, 1, 3, 1, 1, 1, 1, 1, 384, 96, 96, 96, 96, 96, 96, 64 +4, 1, 4, 1, 1, 1, 1, 1, 128, 96, 96, 96, 96, 96, 96, 64 +4, 1, 4, 1, 1, 1, 1, 1, 256, 96, 96, 96, 96, 96, 96, 64 +4, 1, 4, 1, 1, 1, 1, 1, 384, 96, 96, 96, 96, 96, 96, 64 +4, 2, 1, 1, 1, 1, 1, 1, 128, 97, 97, 97, 97, 97, 65, 33 +4, 2, 1, 1, 1, 1, 1, 1, 256, 97, 97, 97, 97, 97, 65, 33 +4, 2, 1, 1, 1, 1, 1, 1, 384, 97, 97, 97, 97, 97, 65, 33 +4, 2, 1, 2, 1, 1, 1, 1, 128, 193, 193, 193, 193, 193, 129, 65 +4, 2, 1, 2, 1, 1, 1, 1, 256, 193, 193, 193, 193, 193, 129, 65 +4, 2, 1, 2, 1, 1, 1, 1, 384, 193, 193, 193, 193, 193, 129, 65 +4, 2, 2, 1, 1, 1, 1, 1, 128, 97, 97, 97, 97, 97, 65, 33 +4, 2, 2, 1, 1, 1, 1, 1, 256, 97, 97, 97, 97, 97, 65, 33 +4, 2, 2, 1, 1, 1, 1, 1, 384, 97, 97, 97, 97, 97, 65, 33 +4, 2, 2, 2, 1, 1, 1, 1, 128, 193, 193, 193, 193, 193, 129, 65 +4, 2, 2, 2, 1, 1, 1, 1, 256, 193, 193, 193, 193, 193, 129, 65 +4, 2, 2, 2, 1, 1, 1, 1, 384, 193, 193, 193, 193, 193, 129, 65 +4, 2, 3, 1, 1, 1, 1, 1, 128, 97, 97, 97, 97, 97, 65, 33 +4, 2, 3, 1, 1, 1, 1, 1, 256, 97, 97, 97, 97, 97, 65, 33 +4, 2, 3, 1, 1, 1, 1, 1, 384, 97, 97, 97, 97, 97, 65, 33 +4, 2, 3, 2, 1, 1, 1, 1, 128, 193, 193, 193, 193, 193, 129, 65 +4, 2, 3, 2, 1, 1, 1, 1, 256, 193, 193, 193, 193, 193, 129, 65 +4, 2, 3, 2, 1, 1, 1, 1, 384, 193, 193, 193, 193, 193, 129, 65 +4, 2, 4, 1, 1, 1, 1, 1, 128, 97, 97, 97, 97, 97, 65, 33 +4, 2, 4, 1, 1, 1, 1, 1, 256, 97, 97, 97, 97, 97, 65, 33 +4, 2, 4, 1, 1, 1, 1, 1, 384, 97, 97, 97, 97, 97, 65, 33 +4, 2, 4, 2, 1, 1, 1, 1, 128, 193, 193, 193, 193, 193, 129, 65 +4, 2, 4, 2, 1, 1, 1, 1, 256, 193, 193, 193, 193, 193, 129, 65 +4, 2, 4, 2, 1, 1, 1, 1, 384, 193, 193, 193, 193, 193, 129, 65 +4, 3, 1, 1, 1, 1, 1, 1, 128, 98, 98, 98, 98, 66,, +4, 3, 1, 1, 1, 1, 1, 1, 256, 98, 98, 98, 98, 66,, +4, 3, 1, 1, 1, 1, 1, 1, 384, 98, 98, 98, 98, 66,, +4, 3, 1, 2, 1, 1, 1, 1, 128, 194, 194, 194, 194, 130,, +4, 3, 1, 2, 1, 1, 1, 1, 256, 194, 194, 194, 194, 130,, +4, 3, 1, 2, 1, 1, 1, 1, 384, 194, 194, 194, 194, 130,, +4, 3, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 194,, +4, 3, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 194,, +4, 3, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 194,, +4, 3, 2, 1, 1, 1, 1, 1, 128, 98, 98, 98, 98, 66,, +4, 3, 2, 1, 1, 1, 1, 1, 256, 98, 98, 98, 98, 66,, +4, 3, 2, 1, 1, 1, 1, 1, 384, 98, 98, 98, 98, 66,, +4, 3, 2, 2, 1, 1, 1, 1, 128, 194, 194, 194, 194, 130,, +4, 3, 2, 2, 1, 1, 1, 1, 256, 194, 194, 194, 194, 130,, +4, 3, 2, 2, 1, 1, 1, 1, 384, 194, 194, 194, 194, 130,, +4, 3, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 194,, +4, 3, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 194,, +4, 3, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 194,, +4, 3, 3, 1, 1, 1, 1, 1, 128, 98, 98, 98, 98, 66,, +4, 3, 3, 1, 1, 1, 1, 1, 256, 98, 98, 98, 98, 66,, +4, 3, 3, 1, 1, 1, 1, 1, 384, 98, 98, 98, 98, 66,, +4, 3, 3, 2, 1, 1, 1, 1, 128, 194, 194, 194, 194, 130,, +4, 3, 3, 2, 1, 1, 1, 1, 256, 194, 194, 194, 194, 130,, +4, 3, 3, 2, 1, 1, 1, 1, 384, 194, 194, 194, 194, 130,, +4, 3, 3, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 194,, +4, 3, 3, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 194,, +4, 3, 3, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 194,, +4, 3, 4, 1, 1, 1, 1, 1, 128, 98, 98, 98, 98, 66,, +4, 3, 4, 1, 1, 1, 1, 1, 256, 98, 98, 98, 98, 66,, +4, 3, 4, 1, 1, 1, 1, 1, 384, 98, 98, 98, 98, 66,, +4, 3, 4, 2, 1, 1, 1, 1, 128, 194, 194, 194, 194, 130,, +4, 3, 4, 2, 1, 1, 1, 1, 256, 194, 194, 194, 194, 130,, +4, 3, 4, 2, 1, 1, 1, 1, 384, 194, 194, 194, 194, 130,, +4, 3, 4, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 194,, +4, 3, 4, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 194,, +4, 3, 4, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 194,, +4, 4, 1, 1, 1, 1, 1, 1, 128, 99, 99, 99, 99, 67,, +4, 4, 1, 1, 1, 1, 1, 1, 256, 99, 99, 99, 99, 67,, +4, 4, 1, 1, 1, 1, 1, 1, 384, 99, 99, 99, 99, 67,, +4, 4, 1, 2, 1, 1, 1, 1, 128, 195, 195, 195, 195, 131,, +4, 4, 1, 2, 1, 1, 1, 1, 256, 195, 195, 195, 195, 131,, +4, 4, 1, 2, 1, 1, 1, 1, 384, 195, 195, 195, 195, 131,, +4, 4, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 195,, +4, 4, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 195,, +4, 4, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 195,, +4, 4, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +4, 4, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +4, 4, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +4, 4, 2, 1, 1, 1, 1, 1, 128, 99, 99, 99, 99, 67,, +4, 4, 2, 1, 1, 1, 1, 1, 256, 99, 99, 99, 99, 67,, +4, 4, 2, 1, 1, 1, 1, 1, 384, 99, 99, 99, 99, 67,, +4, 4, 2, 2, 1, 1, 1, 1, 128, 195, 195, 195, 195, 131,, +4, 4, 2, 2, 1, 1, 1, 1, 256, 195, 195, 195, 195, 131,, +4, 4, 2, 2, 1, 1, 1, 1, 384, 195, 195, 195, 195, 131,, +4, 4, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 195,, +4, 4, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 195,, +4, 4, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 195,, +4, 4, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +4, 4, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +4, 4, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +4, 4, 3, 1, 1, 1, 1, 1, 128, 99, 99, 99, 99, 67,, +4, 4, 3, 1, 1, 1, 1, 1, 256, 99, 99, 99, 99, 67,, +4, 4, 3, 1, 1, 1, 1, 1, 384, 99, 99, 99, 99, 67,, +4, 4, 3, 2, 1, 1, 1, 1, 128, 195, 195, 195, 195, 131,, +4, 4, 3, 2, 1, 1, 1, 1, 256, 195, 195, 195, 195, 131,, +4, 4, 3, 2, 1, 1, 1, 1, 384, 195, 195, 195, 195, 131,, +4, 4, 3, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 195,, +4, 4, 3, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 195,, +4, 4, 3, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 195,, +4, 4, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +4, 4, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +4, 4, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +4, 4, 4, 1, 1, 1, 1, 1, 128, 99, 99, 99, 99, 67,, +4, 4, 4, 1, 1, 1, 1, 1, 256, 99, 99, 99, 99, 67,, +4, 4, 4, 1, 1, 1, 1, 1, 384, 99, 99, 99, 99, 67,, +4, 4, 4, 2, 1, 1, 1, 1, 128, 195, 195, 195, 195, 131,, +4, 4, 4, 2, 1, 1, 1, 1, 256, 195, 195, 195, 195, 131,, +4, 4, 4, 2, 1, 1, 1, 1, 384, 195, 195, 195, 195, 131,, +4, 4, 4, 3, 1, 1, 1, 1, 128, 240, 240, 240, 240, 195,, +4, 4, 4, 3, 1, 1, 1, 1, 256, 240, 240, 240, 240, 195,, +4, 4, 4, 3, 1, 1, 1, 1, 384, 240, 240, 240, 240, 195,, +4, 4, 4, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 240,, +4, 4, 4, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 240,, +4, 4, 4, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 240,, +4, 5, 1, 1, 1, 1, 1, 1, 128, 100, 100, 100, 68, 36,, +4, 5, 1, 1, 1, 1, 1, 1, 256, 100, 100, 100, 68, 36,, +4, 5, 1, 1, 1, 1, 1, 1, 384, 100, 100, 100, 68, 36,, +4, 5, 1, 2, 1, 1, 1, 1, 128, 196, 196, 196, 132, 68,, +4, 5, 1, 2, 1, 1, 1, 1, 256, 196, 196, 196, 132, 68,, +4, 5, 1, 2, 1, 1, 1, 1, 384, 196, 196, 196, 132, 68,, +4, 5, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 196, 100,, +4, 5, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 196, 100,, +4, 5, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 196, 100,, +4, 5, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 132,, +4, 5, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 132,, +4, 5, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 132,, +4, 5, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240, 164,, +4, 5, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240, 164,, +4, 5, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240, 164,, +4, 5, 2, 1, 1, 1, 1, 1, 128, 100, 100, 100, 68, 36,, +4, 5, 2, 1, 1, 1, 1, 1, 256, 100, 100, 100, 68, 36,, +4, 5, 2, 1, 1, 1, 1, 1, 384, 100, 100, 100, 68, 36,, +4, 5, 2, 2, 1, 1, 1, 1, 128, 196, 196, 196, 132, 68,, +4, 5, 2, 2, 1, 1, 1, 1, 256, 196, 196, 196, 132, 68,, +4, 5, 2, 2, 1, 1, 1, 1, 384, 196, 196, 196, 132, 68,, +4, 5, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 196, 100,, +4, 5, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 196, 100,, +4, 5, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 196, 100,, +4, 5, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 132,, +4, 5, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 132,, +4, 5, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 132,, +4, 5, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240, 164,, +4, 5, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240, 164,, +4, 5, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240, 164,, +4, 5, 3, 1, 1, 1, 1, 1, 128, 100, 100, 100, 68, 36,, +4, 5, 3, 1, 1, 1, 1, 1, 256, 100, 100, 100, 68, 36,, +4, 5, 3, 1, 1, 1, 1, 1, 384, 100, 100, 100, 68, 36,, +4, 5, 3, 2, 1, 1, 1, 1, 128, 196, 196, 196, 132, 68,, +4, 5, 3, 2, 1, 1, 1, 1, 256, 196, 196, 196, 132, 68,, +4, 5, 3, 2, 1, 1, 1, 1, 384, 196, 196, 196, 132, 68,, +4, 5, 3, 3, 1, 1, 1, 1, 128, 240, 240, 240, 196, 100,, +4, 5, 3, 3, 1, 1, 1, 1, 256, 240, 240, 240, 196, 100,, +4, 5, 3, 3, 1, 1, 1, 1, 384, 240, 240, 240, 196, 100,, +4, 5, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 132,, +4, 5, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 132,, +4, 5, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 132,, +4, 5, 3, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240, 164,, +4, 5, 3, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240, 164,, +4, 5, 3, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240, 164,, +4, 5, 4, 1, 1, 1, 1, 1, 128, 100, 100, 100, 68, 36,, +4, 5, 4, 1, 1, 1, 1, 1, 256, 100, 100, 100, 68, 36,, +4, 5, 4, 1, 1, 1, 1, 1, 384, 100, 100, 100, 68, 36,, +4, 5, 4, 2, 1, 1, 1, 1, 128, 196, 196, 196, 132, 68,, +4, 5, 4, 2, 1, 1, 1, 1, 256, 196, 196, 196, 132, 68,, +4, 5, 4, 2, 1, 1, 1, 1, 384, 196, 196, 196, 132, 68,, +4, 5, 4, 3, 1, 1, 1, 1, 128, 240, 240, 240, 196, 100,, +4, 5, 4, 3, 1, 1, 1, 1, 256, 240, 240, 240, 196, 100,, +4, 5, 4, 3, 1, 1, 1, 1, 384, 240, 240, 240, 196, 100,, +4, 5, 4, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240, 132,, +4, 5, 4, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240, 132,, +4, 5, 4, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240, 132,, +4, 5, 4, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240, 164,, +4, 5, 4, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240, 164,, +4, 5, 4, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240, 164,, +4, 6, 1, 1, 1, 1, 1, 1, 128, 101, 101, 101, 69,,, +4, 6, 1, 1, 1, 1, 1, 1, 256, 101, 101, 101, 69,,, +4, 6, 1, 1, 1, 1, 1, 1, 384, 101, 101, 101, 69,,, +4, 6, 1, 2, 1, 1, 1, 1, 128, 197, 197, 197, 133,,, +4, 6, 1, 2, 1, 1, 1, 1, 256, 197, 197, 197, 133,,, +4, 6, 1, 2, 1, 1, 1, 1, 384, 197, 197, 197, 133,,, +4, 6, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 197,,, +4, 6, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 197,,, +4, 6, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 197,,, +4, 6, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 6, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 6, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 6, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 6, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 6, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 6, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 6, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 6, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 6, 2, 1, 1, 1, 1, 1, 128, 101, 101, 101, 69,,, +4, 6, 2, 1, 1, 1, 1, 1, 256, 101, 101, 101, 69,,, +4, 6, 2, 1, 1, 1, 1, 1, 384, 101, 101, 101, 69,,, +4, 6, 2, 2, 1, 1, 1, 1, 128, 197, 197, 197, 133,,, +4, 6, 2, 2, 1, 1, 1, 1, 256, 197, 197, 197, 133,,, +4, 6, 2, 2, 1, 1, 1, 1, 384, 197, 197, 197, 133,,, +4, 6, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 197,,, +4, 6, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 197,,, +4, 6, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 197,,, +4, 6, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 6, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 6, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 6, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 6, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 6, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 6, 2, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 6, 2, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 6, 2, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 6, 3, 1, 1, 1, 1, 1, 128, 101, 101, 101, 69,,, +4, 6, 3, 1, 1, 1, 1, 1, 256, 101, 101, 101, 69,,, +4, 6, 3, 1, 1, 1, 1, 1, 384, 101, 101, 101, 69,,, +4, 6, 3, 2, 1, 1, 1, 1, 128, 197, 197, 197, 133,,, +4, 6, 3, 2, 1, 1, 1, 1, 256, 197, 197, 197, 133,,, +4, 6, 3, 2, 1, 1, 1, 1, 384, 197, 197, 197, 133,,, +4, 6, 3, 3, 1, 1, 1, 1, 128, 240, 240, 240, 197,,, +4, 6, 3, 3, 1, 1, 1, 1, 256, 240, 240, 240, 197,,, +4, 6, 3, 3, 1, 1, 1, 1, 384, 240, 240, 240, 197,,, +4, 6, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 6, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 6, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 6, 3, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 6, 3, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 6, 3, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 6, 3, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 6, 3, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 6, 3, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 6, 4, 1, 1, 1, 1, 1, 128, 101, 101, 101, 69,,, +4, 6, 4, 1, 1, 1, 1, 1, 256, 101, 101, 101, 69,,, +4, 6, 4, 1, 1, 1, 1, 1, 384, 101, 101, 101, 69,,, +4, 6, 4, 2, 1, 1, 1, 1, 128, 197, 197, 197, 133,,, +4, 6, 4, 2, 1, 1, 1, 1, 256, 197, 197, 197, 133,,, +4, 6, 4, 2, 1, 1, 1, 1, 384, 197, 197, 197, 133,,, +4, 6, 4, 3, 1, 1, 1, 1, 128, 240, 240, 240, 197,,, +4, 6, 4, 3, 1, 1, 1, 1, 256, 240, 240, 240, 197,,, +4, 6, 4, 3, 1, 1, 1, 1, 384, 240, 240, 240, 197,,, +4, 6, 4, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 6, 4, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 6, 4, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 6, 4, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 6, 4, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 6, 4, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 6, 4, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 6, 4, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 6, 4, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 7, 1, 1, 1, 1, 1, 1, 128, 102, 102, 102, 70,,, +4, 7, 1, 1, 1, 1, 1, 1, 256, 102, 102, 102, 70,,, +4, 7, 1, 1, 1, 1, 1, 1, 384, 102, 102, 102, 70,,, +4, 7, 1, 2, 1, 1, 1, 1, 128, 198, 198, 198, 134,,, +4, 7, 1, 2, 1, 1, 1, 1, 256, 198, 198, 198, 134,,, +4, 7, 1, 2, 1, 1, 1, 1, 384, 198, 198, 198, 134,,, +4, 7, 1, 3, 1, 1, 1, 1, 128, 240, 240, 240, 198,,, +4, 7, 1, 3, 1, 1, 1, 1, 256, 240, 240, 240, 198,,, +4, 7, 1, 3, 1, 1, 1, 1, 384, 240, 240, 240, 198,,, +4, 7, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 7, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 7, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 7, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 7, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 7, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 7, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 7, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 7, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 7, 1, 7, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 7, 1, 7, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 7, 1, 7, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 7, 2, 1, 1, 1, 1, 1, 128, 102, 102, 102, 70,,, +4, 7, 2, 1, 1, 1, 1, 1, 256, 102, 102, 102, 70,,, +4, 7, 2, 1, 1, 1, 1, 1, 384, 102, 102, 102, 70,,, +4, 7, 2, 2, 1, 1, 1, 1, 128, 198, 198, 198, 134,,, +4, 7, 2, 2, 1, 1, 1, 1, 256, 198, 198, 198, 134,,, +4, 7, 2, 2, 1, 1, 1, 1, 384, 198, 198, 198, 134,,, +4, 7, 2, 3, 1, 1, 1, 1, 128, 240, 240, 240, 198,,, +4, 7, 2, 3, 1, 1, 1, 1, 256, 240, 240, 240, 198,,, +4, 7, 2, 3, 1, 1, 1, 1, 384, 240, 240, 240, 198,,, +4, 7, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 7, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 7, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 7, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 7, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 7, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 7, 2, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 7, 2, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 7, 2, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 7, 2, 7, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 7, 2, 7, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 7, 2, 7, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 7, 3, 1, 1, 1, 1, 1, 128, 102, 102, 102, 70,,, +4, 7, 3, 1, 1, 1, 1, 1, 256, 102, 102, 102, 70,,, +4, 7, 3, 1, 1, 1, 1, 1, 384, 102, 102, 102, 70,,, +4, 7, 3, 2, 1, 1, 1, 1, 128, 198, 198, 198, 134,,, +4, 7, 3, 2, 1, 1, 1, 1, 256, 198, 198, 198, 134,,, +4, 7, 3, 2, 1, 1, 1, 1, 384, 198, 198, 198, 134,,, +4, 7, 3, 3, 1, 1, 1, 1, 128, 240, 240, 240, 198,,, +4, 7, 3, 3, 1, 1, 1, 1, 256, 240, 240, 240, 198,,, +4, 7, 3, 3, 1, 1, 1, 1, 384, 240, 240, 240, 198,,, +4, 7, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 7, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 7, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 7, 3, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 7, 3, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 7, 3, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 7, 3, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 7, 3, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 7, 3, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 7, 3, 7, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 7, 3, 7, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 7, 3, 7, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 7, 4, 1, 1, 1, 1, 1, 128, 102, 102, 102, 70,,, +4, 7, 4, 1, 1, 1, 1, 1, 256, 102, 102, 102, 70,,, +4, 7, 4, 1, 1, 1, 1, 1, 384, 102, 102, 102, 70,,, +4, 7, 4, 2, 1, 1, 1, 1, 128, 198, 198, 198, 134,,, +4, 7, 4, 2, 1, 1, 1, 1, 256, 198, 198, 198, 134,,, +4, 7, 4, 2, 1, 1, 1, 1, 384, 198, 198, 198, 134,,, +4, 7, 4, 3, 1, 1, 1, 1, 128, 240, 240, 240, 198,,, +4, 7, 4, 3, 1, 1, 1, 1, 256, 240, 240, 240, 198,,, +4, 7, 4, 3, 1, 1, 1, 1, 384, 240, 240, 240, 198,,, +4, 7, 4, 4, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 7, 4, 4, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 7, 4, 4, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 7, 4, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 7, 4, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 7, 4, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 7, 4, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 7, 4, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 7, 4, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +4, 7, 4, 7, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +4, 7, 4, 7, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +4, 7, 4, 7, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +5, 1, 1, 1, 1, 1, 1, 1, 128, 76, 76, 76, 76, 76, 51, 51 +5, 1, 1, 1, 1, 1, 1, 1, 256, 76, 76, 76, 76, 76, 51, 51 +5, 1, 1, 1, 1, 1, 1, 1, 384, 76, 76, 76, 76, 76, 51, 51 +5, 1, 2, 1, 1, 1, 1, 1, 128, 76, 76, 76, 76, 76, 51, 51 +5, 1, 2, 1, 1, 1, 1, 1, 256, 76, 76, 76, 76, 76, 51, 51 +5, 1, 2, 1, 1, 1, 1, 1, 384, 76, 76, 76, 76, 76, 51, 51 +5, 1, 3, 1, 1, 1, 1, 1, 128, 76, 76, 76, 76, 76, 51, 51 +5, 1, 3, 1, 1, 1, 1, 1, 256, 76, 76, 76, 76, 76, 51, 51 +5, 1, 3, 1, 1, 1, 1, 1, 384, 76, 76, 76, 76, 76, 51, 51 +5, 1, 4, 1, 1, 1, 1, 1, 128, 76, 76, 76, 76, 76, 51, 51 +5, 1, 4, 1, 1, 1, 1, 1, 256, 76, 76, 76, 76, 76, 51, 51 +5, 1, 4, 1, 1, 1, 1, 1, 384, 76, 76, 76, 76, 76, 51, 51 +5, 1, 5, 1, 1, 1, 1, 1, 128, 76, 76, 76, 76, 76, 51, 51 +5, 1, 5, 1, 1, 1, 1, 1, 256, 76, 76, 76, 76, 76, 51, 51 +5, 1, 5, 1, 1, 1, 1, 1, 384, 76, 76, 76, 76, 76, 51, 51 +5, 2, 1, 1, 1, 1, 1, 1, 128, 77, 77, 77, 77, 52, 26, +5, 2, 1, 1, 1, 1, 1, 1, 256, 77, 77, 77, 77, 52, 26, +5, 2, 1, 1, 1, 1, 1, 1, 384, 77, 77, 77, 77, 52, 26, +5, 2, 1, 2, 1, 1, 1, 1, 128, 153, 153, 153, 153, 103, 51, +5, 2, 1, 2, 1, 1, 1, 1, 256, 153, 153, 153, 153, 103, 51, +5, 2, 1, 2, 1, 1, 1, 1, 384, 153, 153, 153, 153, 103, 51, +5, 2, 2, 1, 1, 1, 1, 1, 128, 77, 77, 77, 77, 52, 26, +5, 2, 2, 1, 1, 1, 1, 1, 256, 77, 77, 77, 77, 52, 26, +5, 2, 2, 1, 1, 1, 1, 1, 384, 77, 77, 77, 77, 52, 26, +5, 2, 2, 2, 1, 1, 1, 1, 128, 153, 153, 153, 153, 103, 51, +5, 2, 2, 2, 1, 1, 1, 1, 256, 153, 153, 153, 153, 103, 51, +5, 2, 2, 2, 1, 1, 1, 1, 384, 153, 153, 153, 153, 103, 51, +5, 2, 3, 1, 1, 1, 1, 1, 128, 77, 77, 77, 77, 52, 26, +5, 2, 3, 1, 1, 1, 1, 1, 256, 77, 77, 77, 77, 52, 26, +5, 2, 3, 1, 1, 1, 1, 1, 384, 77, 77, 77, 77, 52, 26, +5, 2, 3, 2, 1, 1, 1, 1, 128, 153, 153, 153, 153, 103, 51, +5, 2, 3, 2, 1, 1, 1, 1, 256, 153, 153, 153, 153, 103, 51, +5, 2, 3, 2, 1, 1, 1, 1, 384, 153, 153, 153, 153, 103, 51, +5, 2, 4, 1, 1, 1, 1, 1, 128, 77, 77, 77, 77, 52, 26, +5, 2, 4, 1, 1, 1, 1, 1, 256, 77, 77, 77, 77, 52, 26, +5, 2, 4, 1, 1, 1, 1, 1, 384, 77, 77, 77, 77, 52, 26, +5, 2, 4, 2, 1, 1, 1, 1, 128, 153, 153, 153, 153, 103, 51, +5, 2, 4, 2, 1, 1, 1, 1, 256, 153, 153, 153, 153, 103, 51, +5, 2, 4, 2, 1, 1, 1, 1, 384, 153, 153, 153, 153, 103, 51, +5, 2, 5, 1, 1, 1, 1, 1, 128, 77, 77, 77, 77, 52, 26, +5, 2, 5, 1, 1, 1, 1, 1, 256, 77, 77, 77, 77, 52, 26, +5, 2, 5, 1, 1, 1, 1, 1, 384, 77, 77, 77, 77, 52, 26, +5, 2, 5, 2, 1, 1, 1, 1, 128, 153, 153, 153, 153, 103, 51, +5, 2, 5, 2, 1, 1, 1, 1, 256, 153, 153, 153, 153, 103, 51, +5, 2, 5, 2, 1, 1, 1, 1, 384, 153, 153, 153, 153, 103, 51, +5, 3, 1, 1, 1, 1, 1, 1, 128, 78, 78, 78, 78, 53,, +5, 3, 1, 1, 1, 1, 1, 1, 256, 78, 78, 78, 78, 53,, +5, 3, 1, 1, 1, 1, 1, 1, 384, 78, 78, 78, 78, 53,, +5, 3, 1, 2, 1, 1, 1, 1, 128, 154, 154, 154, 154, 104,, +5, 3, 1, 2, 1, 1, 1, 1, 256, 154, 154, 154, 154, 104,, +5, 3, 1, 2, 1, 1, 1, 1, 384, 154, 154, 154, 154, 104,, +5, 3, 1, 3, 1, 1, 1, 1, 128, 230, 230, 230, 230, 155,, +5, 3, 1, 3, 1, 1, 1, 1, 256, 230, 230, 230, 230, 155,, +5, 3, 1, 3, 1, 1, 1, 1, 384, 230, 230, 230, 230, 155,, +5, 3, 2, 1, 1, 1, 1, 1, 128, 78, 78, 78, 78, 53,, +5, 3, 2, 1, 1, 1, 1, 1, 256, 78, 78, 78, 78, 53,, +5, 3, 2, 1, 1, 1, 1, 1, 384, 78, 78, 78, 78, 53,, +5, 3, 2, 2, 1, 1, 1, 1, 128, 154, 154, 154, 154, 104,, +5, 3, 2, 2, 1, 1, 1, 1, 256, 154, 154, 154, 154, 104,, +5, 3, 2, 2, 1, 1, 1, 1, 384, 154, 154, 154, 154, 104,, +5, 3, 2, 3, 1, 1, 1, 1, 128, 230, 230, 230, 230, 155,, +5, 3, 2, 3, 1, 1, 1, 1, 256, 230, 230, 230, 230, 155,, +5, 3, 2, 3, 1, 1, 1, 1, 384, 230, 230, 230, 230, 155,, +5, 3, 3, 1, 1, 1, 1, 1, 128, 78, 78, 78, 78, 53,, +5, 3, 3, 1, 1, 1, 1, 1, 256, 78, 78, 78, 78, 53,, +5, 3, 3, 1, 1, 1, 1, 1, 384, 78, 78, 78, 78, 53,, +5, 3, 3, 2, 1, 1, 1, 1, 128, 154, 154, 154, 154, 104,, +5, 3, 3, 2, 1, 1, 1, 1, 256, 154, 154, 154, 154, 104,, +5, 3, 3, 2, 1, 1, 1, 1, 384, 154, 154, 154, 154, 104,, +5, 3, 3, 3, 1, 1, 1, 1, 128, 230, 230, 230, 230, 155,, +5, 3, 3, 3, 1, 1, 1, 1, 256, 230, 230, 230, 230, 155,, +5, 3, 3, 3, 1, 1, 1, 1, 384, 230, 230, 230, 230, 155,, +5, 3, 4, 1, 1, 1, 1, 1, 128, 78, 78, 78, 78, 53,, +5, 3, 4, 1, 1, 1, 1, 1, 256, 78, 78, 78, 78, 53,, +5, 3, 4, 1, 1, 1, 1, 1, 384, 78, 78, 78, 78, 53,, +5, 3, 4, 2, 1, 1, 1, 1, 128, 154, 154, 154, 154, 104,, +5, 3, 4, 2, 1, 1, 1, 1, 256, 154, 154, 154, 154, 104,, +5, 3, 4, 2, 1, 1, 1, 1, 384, 154, 154, 154, 154, 104,, +5, 3, 4, 3, 1, 1, 1, 1, 128, 230, 230, 230, 230, 155,, +5, 3, 4, 3, 1, 1, 1, 1, 256, 230, 230, 230, 230, 155,, +5, 3, 4, 3, 1, 1, 1, 1, 384, 230, 230, 230, 230, 155,, +5, 3, 5, 1, 1, 1, 1, 1, 128, 78, 78, 78, 78, 53,, +5, 3, 5, 1, 1, 1, 1, 1, 256, 78, 78, 78, 78, 53,, +5, 3, 5, 1, 1, 1, 1, 1, 384, 78, 78, 78, 78, 53,, +5, 3, 5, 2, 1, 1, 1, 1, 128, 154, 154, 154, 154, 104,, +5, 3, 5, 2, 1, 1, 1, 1, 256, 154, 154, 154, 154, 104,, +5, 3, 5, 2, 1, 1, 1, 1, 384, 154, 154, 154, 154, 104,, +5, 3, 5, 3, 1, 1, 1, 1, 128, 230, 230, 230, 230, 155,, +5, 3, 5, 3, 1, 1, 1, 1, 256, 230, 230, 230, 230, 155,, +5, 3, 5, 3, 1, 1, 1, 1, 384, 230, 230, 230, 230, 155,, +5, 4, 1, 1, 1, 1, 1, 1, 128, 79, 79, 79, 54, 28,, +5, 4, 1, 1, 1, 1, 1, 1, 256, 79, 79, 79, 54, 28,, +5, 4, 1, 1, 1, 1, 1, 1, 384, 79, 79, 79, 54, 28,, +5, 4, 1, 2, 1, 1, 1, 1, 128, 155, 155, 155, 105, 53,, +5, 4, 1, 2, 1, 1, 1, 1, 256, 155, 155, 155, 105, 53,, +5, 4, 1, 2, 1, 1, 1, 1, 384, 155, 155, 155, 105, 53,, +5, 4, 1, 3, 1, 1, 1, 1, 128, 231, 231, 231, 156, 78,, +5, 4, 1, 3, 1, 1, 1, 1, 256, 231, 231, 231, 156, 78,, +5, 4, 1, 3, 1, 1, 1, 1, 384, 231, 231, 231, 156, 78,, +5, 4, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 207, 103,, +5, 4, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 207, 103,, +5, 4, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 207, 103,, +5, 4, 2, 1, 1, 1, 1, 1, 128, 79, 79, 79, 54, 28,, +5, 4, 2, 1, 1, 1, 1, 1, 256, 79, 79, 79, 54, 28,, +5, 4, 2, 1, 1, 1, 1, 1, 384, 79, 79, 79, 54, 28,, +5, 4, 2, 2, 1, 1, 1, 1, 128, 155, 155, 155, 105, 53,, +5, 4, 2, 2, 1, 1, 1, 1, 256, 155, 155, 155, 105, 53,, +5, 4, 2, 2, 1, 1, 1, 1, 384, 155, 155, 155, 105, 53,, +5, 4, 2, 3, 1, 1, 1, 1, 128, 231, 231, 231, 156, 78,, +5, 4, 2, 3, 1, 1, 1, 1, 256, 231, 231, 231, 156, 78,, +5, 4, 2, 3, 1, 1, 1, 1, 384, 231, 231, 231, 156, 78,, +5, 4, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 207, 103,, +5, 4, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 207, 103,, +5, 4, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 207, 103,, +5, 4, 3, 1, 1, 1, 1, 1, 128, 79, 79, 79, 54, 28,, +5, 4, 3, 1, 1, 1, 1, 1, 256, 79, 79, 79, 54, 28,, +5, 4, 3, 1, 1, 1, 1, 1, 384, 79, 79, 79, 54, 28,, +5, 4, 3, 2, 1, 1, 1, 1, 128, 155, 155, 155, 105, 53,, +5, 4, 3, 2, 1, 1, 1, 1, 256, 155, 155, 155, 105, 53,, +5, 4, 3, 2, 1, 1, 1, 1, 384, 155, 155, 155, 105, 53,, +5, 4, 3, 3, 1, 1, 1, 1, 128, 231, 231, 231, 156, 78,, +5, 4, 3, 3, 1, 1, 1, 1, 256, 231, 231, 231, 156, 78,, +5, 4, 3, 3, 1, 1, 1, 1, 384, 231, 231, 231, 156, 78,, +5, 4, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240, 207, 103,, +5, 4, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240, 207, 103,, +5, 4, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240, 207, 103,, +5, 4, 4, 1, 1, 1, 1, 1, 128, 79, 79, 79, 54, 28,, +5, 4, 4, 1, 1, 1, 1, 1, 256, 79, 79, 79, 54, 28,, +5, 4, 4, 1, 1, 1, 1, 1, 384, 79, 79, 79, 54, 28,, +5, 4, 4, 2, 1, 1, 1, 1, 128, 155, 155, 155, 105, 53,, +5, 4, 4, 2, 1, 1, 1, 1, 256, 155, 155, 155, 105, 53,, +5, 4, 4, 2, 1, 1, 1, 1, 384, 155, 155, 155, 105, 53,, +5, 4, 4, 3, 1, 1, 1, 1, 128, 231, 231, 231, 156, 78,, +5, 4, 4, 3, 1, 1, 1, 1, 256, 231, 231, 231, 156, 78,, +5, 4, 4, 3, 1, 1, 1, 1, 384, 231, 231, 231, 156, 78,, +5, 4, 4, 4, 1, 1, 1, 1, 128, 240, 240, 240, 207, 103,, +5, 4, 4, 4, 1, 1, 1, 1, 256, 240, 240, 240, 207, 103,, +5, 4, 4, 4, 1, 1, 1, 1, 384, 240, 240, 240, 207, 103,, +5, 4, 5, 1, 1, 1, 1, 1, 128, 79, 79, 79, 54, 28,, +5, 4, 5, 1, 1, 1, 1, 1, 256, 79, 79, 79, 54, 28,, +5, 4, 5, 1, 1, 1, 1, 1, 384, 79, 79, 79, 54, 28,, +5, 4, 5, 2, 1, 1, 1, 1, 128, 155, 155, 155, 105, 53,, +5, 4, 5, 2, 1, 1, 1, 1, 256, 155, 155, 155, 105, 53,, +5, 4, 5, 2, 1, 1, 1, 1, 384, 155, 155, 155, 105, 53,, +5, 4, 5, 3, 1, 1, 1, 1, 128, 231, 231, 231, 156, 78,, +5, 4, 5, 3, 1, 1, 1, 1, 256, 231, 231, 231, 156, 78,, +5, 4, 5, 3, 1, 1, 1, 1, 384, 231, 231, 231, 156, 78,, +5, 4, 5, 4, 1, 1, 1, 1, 128, 240, 240, 240, 207, 103,, +5, 4, 5, 4, 1, 1, 1, 1, 256, 240, 240, 240, 207, 103,, +5, 4, 5, 4, 1, 1, 1, 1, 384, 240, 240, 240, 207, 103,, +5, 5, 1, 1, 1, 1, 1, 1, 128, 80, 80, 80, 55,,, +5, 5, 1, 1, 1, 1, 1, 1, 256, 80, 80, 80, 55,,, +5, 5, 1, 1, 1, 1, 1, 1, 384, 80, 80, 80, 55,,, +5, 5, 1, 2, 1, 1, 1, 1, 128, 156, 156, 156, 106,,, +5, 5, 1, 2, 1, 1, 1, 1, 256, 156, 156, 156, 106,,, +5, 5, 1, 2, 1, 1, 1, 1, 384, 156, 156, 156, 106,,, +5, 5, 1, 3, 1, 1, 1, 1, 128, 232, 232, 232, 157,,, +5, 5, 1, 3, 1, 1, 1, 1, 256, 232, 232, 232, 157,,, +5, 5, 1, 3, 1, 1, 1, 1, 384, 232, 232, 232, 157,,, +5, 5, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 208,,, +5, 5, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 208,,, +5, 5, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 208,,, +5, 5, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +5, 5, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +5, 5, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +5, 5, 2, 1, 1, 1, 1, 1, 128, 80, 80, 80, 55,,, +5, 5, 2, 1, 1, 1, 1, 1, 256, 80, 80, 80, 55,,, +5, 5, 2, 1, 1, 1, 1, 1, 384, 80, 80, 80, 55,,, +5, 5, 2, 2, 1, 1, 1, 1, 128, 156, 156, 156, 106,,, +5, 5, 2, 2, 1, 1, 1, 1, 256, 156, 156, 156, 106,,, +5, 5, 2, 2, 1, 1, 1, 1, 384, 156, 156, 156, 106,,, +5, 5, 2, 3, 1, 1, 1, 1, 128, 232, 232, 232, 157,,, +5, 5, 2, 3, 1, 1, 1, 1, 256, 232, 232, 232, 157,,, +5, 5, 2, 3, 1, 1, 1, 1, 384, 232, 232, 232, 157,,, +5, 5, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 208,,, +5, 5, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 208,,, +5, 5, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 208,,, +5, 5, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +5, 5, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +5, 5, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +5, 5, 3, 1, 1, 1, 1, 1, 128, 80, 80, 80, 55,,, +5, 5, 3, 1, 1, 1, 1, 1, 256, 80, 80, 80, 55,,, +5, 5, 3, 1, 1, 1, 1, 1, 384, 80, 80, 80, 55,,, +5, 5, 3, 2, 1, 1, 1, 1, 128, 156, 156, 156, 106,,, +5, 5, 3, 2, 1, 1, 1, 1, 256, 156, 156, 156, 106,,, +5, 5, 3, 2, 1, 1, 1, 1, 384, 156, 156, 156, 106,,, +5, 5, 3, 3, 1, 1, 1, 1, 128, 232, 232, 232, 157,,, +5, 5, 3, 3, 1, 1, 1, 1, 256, 232, 232, 232, 157,,, +5, 5, 3, 3, 1, 1, 1, 1, 384, 232, 232, 232, 157,,, +5, 5, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240, 208,,, +5, 5, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240, 208,,, +5, 5, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240, 208,,, +5, 5, 3, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +5, 5, 3, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +5, 5, 3, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +5, 5, 4, 1, 1, 1, 1, 1, 128, 80, 80, 80, 55,,, +5, 5, 4, 1, 1, 1, 1, 1, 256, 80, 80, 80, 55,,, +5, 5, 4, 1, 1, 1, 1, 1, 384, 80, 80, 80, 55,,, +5, 5, 4, 2, 1, 1, 1, 1, 128, 156, 156, 156, 106,,, +5, 5, 4, 2, 1, 1, 1, 1, 256, 156, 156, 156, 106,,, +5, 5, 4, 2, 1, 1, 1, 1, 384, 156, 156, 156, 106,,, +5, 5, 4, 3, 1, 1, 1, 1, 128, 232, 232, 232, 157,,, +5, 5, 4, 3, 1, 1, 1, 1, 256, 232, 232, 232, 157,,, +5, 5, 4, 3, 1, 1, 1, 1, 384, 232, 232, 232, 157,,, +5, 5, 4, 4, 1, 1, 1, 1, 128, 240, 240, 240, 208,,, +5, 5, 4, 4, 1, 1, 1, 1, 256, 240, 240, 240, 208,,, +5, 5, 4, 4, 1, 1, 1, 1, 384, 240, 240, 240, 208,,, +5, 5, 4, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +5, 5, 4, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +5, 5, 4, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +5, 5, 5, 1, 1, 1, 1, 1, 128, 80, 80, 80, 55,,, +5, 5, 5, 1, 1, 1, 1, 1, 256, 80, 80, 80, 55,,, +5, 5, 5, 1, 1, 1, 1, 1, 384, 80, 80, 80, 55,,, +5, 5, 5, 2, 1, 1, 1, 1, 128, 156, 156, 156, 106,,, +5, 5, 5, 2, 1, 1, 1, 1, 256, 156, 156, 156, 106,,, +5, 5, 5, 2, 1, 1, 1, 1, 384, 156, 156, 156, 106,,, +5, 5, 5, 3, 1, 1, 1, 1, 128, 232, 232, 232, 157,,, +5, 5, 5, 3, 1, 1, 1, 1, 256, 232, 232, 232, 157,,, +5, 5, 5, 3, 1, 1, 1, 1, 384, 232, 232, 232, 157,,, +5, 5, 5, 4, 1, 1, 1, 1, 128, 240, 240, 240, 208,,, +5, 5, 5, 4, 1, 1, 1, 1, 256, 240, 240, 240, 208,,, +5, 5, 5, 4, 1, 1, 1, 1, 384, 240, 240, 240, 208,,, +5, 5, 5, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +5, 5, 5, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +5, 5, 5, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +5, 6, 1, 1, 1, 1, 1, 1, 128, 81, 81, 81, 56,,, +5, 6, 1, 1, 1, 1, 1, 1, 256, 81, 81, 81, 56,,, +5, 6, 1, 1, 1, 1, 1, 1, 384, 81, 81, 81, 56,,, +5, 6, 1, 2, 1, 1, 1, 1, 128, 157, 157, 157, 107,,, +5, 6, 1, 2, 1, 1, 1, 1, 256, 157, 157, 157, 107,,, +5, 6, 1, 2, 1, 1, 1, 1, 384, 157, 157, 157, 107,,, +5, 6, 1, 3, 1, 1, 1, 1, 128, 233, 233, 233, 158,,, +5, 6, 1, 3, 1, 1, 1, 1, 256, 233, 233, 233, 158,,, +5, 6, 1, 3, 1, 1, 1, 1, 384, 233, 233, 233, 158,,, +5, 6, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 209,,, +5, 6, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 209,,, +5, 6, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 209,,, +5, 6, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +5, 6, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +5, 6, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +5, 6, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +5, 6, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +5, 6, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +5, 6, 2, 1, 1, 1, 1, 1, 128, 81, 81, 81, 56,,, +5, 6, 2, 1, 1, 1, 1, 1, 256, 81, 81, 81, 56,,, +5, 6, 2, 1, 1, 1, 1, 1, 384, 81, 81, 81, 56,,, +5, 6, 2, 2, 1, 1, 1, 1, 128, 157, 157, 157, 107,,, +5, 6, 2, 2, 1, 1, 1, 1, 256, 157, 157, 157, 107,,, +5, 6, 2, 2, 1, 1, 1, 1, 384, 157, 157, 157, 107,,, +5, 6, 2, 3, 1, 1, 1, 1, 128, 233, 233, 233, 158,,, +5, 6, 2, 3, 1, 1, 1, 1, 256, 233, 233, 233, 158,,, +5, 6, 2, 3, 1, 1, 1, 1, 384, 233, 233, 233, 158,,, +5, 6, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 209,,, +5, 6, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 209,,, +5, 6, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 209,,, +5, 6, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +5, 6, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +5, 6, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +5, 6, 2, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +5, 6, 2, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +5, 6, 2, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +5, 6, 3, 1, 1, 1, 1, 1, 128, 81, 81, 81, 56,,, +5, 6, 3, 1, 1, 1, 1, 1, 256, 81, 81, 81, 56,,, +5, 6, 3, 1, 1, 1, 1, 1, 384, 81, 81, 81, 56,,, +5, 6, 3, 2, 1, 1, 1, 1, 128, 157, 157, 157, 107,,, +5, 6, 3, 2, 1, 1, 1, 1, 256, 157, 157, 157, 107,,, +5, 6, 3, 2, 1, 1, 1, 1, 384, 157, 157, 157, 107,,, +5, 6, 3, 3, 1, 1, 1, 1, 128, 233, 233, 233, 158,,, +5, 6, 3, 3, 1, 1, 1, 1, 256, 233, 233, 233, 158,,, +5, 6, 3, 3, 1, 1, 1, 1, 384, 233, 233, 233, 158,,, +5, 6, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240, 209,,, +5, 6, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240, 209,,, +5, 6, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240, 209,,, +5, 6, 3, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +5, 6, 3, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +5, 6, 3, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +5, 6, 3, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +5, 6, 3, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +5, 6, 3, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +5, 6, 4, 1, 1, 1, 1, 1, 128, 81, 81, 81, 56,,, +5, 6, 4, 1, 1, 1, 1, 1, 256, 81, 81, 81, 56,,, +5, 6, 4, 1, 1, 1, 1, 1, 384, 81, 81, 81, 56,,, +5, 6, 4, 2, 1, 1, 1, 1, 128, 157, 157, 157, 107,,, +5, 6, 4, 2, 1, 1, 1, 1, 256, 157, 157, 157, 107,,, +5, 6, 4, 2, 1, 1, 1, 1, 384, 157, 157, 157, 107,,, +5, 6, 4, 3, 1, 1, 1, 1, 128, 233, 233, 233, 158,,, +5, 6, 4, 3, 1, 1, 1, 1, 256, 233, 233, 233, 158,,, +5, 6, 4, 3, 1, 1, 1, 1, 384, 233, 233, 233, 158,,, +5, 6, 4, 4, 1, 1, 1, 1, 128, 240, 240, 240, 209,,, +5, 6, 4, 4, 1, 1, 1, 1, 256, 240, 240, 240, 209,,, +5, 6, 4, 4, 1, 1, 1, 1, 384, 240, 240, 240, 209,,, +5, 6, 4, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +5, 6, 4, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +5, 6, 4, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +5, 6, 4, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +5, 6, 4, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +5, 6, 4, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +5, 6, 5, 1, 1, 1, 1, 1, 128, 81, 81, 81, 56,,, +5, 6, 5, 1, 1, 1, 1, 1, 256, 81, 81, 81, 56,,, +5, 6, 5, 1, 1, 1, 1, 1, 384, 81, 81, 81, 56,,, +5, 6, 5, 2, 1, 1, 1, 1, 128, 157, 157, 157, 107,,, +5, 6, 5, 2, 1, 1, 1, 1, 256, 157, 157, 157, 107,,, +5, 6, 5, 2, 1, 1, 1, 1, 384, 157, 157, 157, 107,,, +5, 6, 5, 3, 1, 1, 1, 1, 128, 233, 233, 233, 158,,, +5, 6, 5, 3, 1, 1, 1, 1, 256, 233, 233, 233, 158,,, +5, 6, 5, 3, 1, 1, 1, 1, 384, 233, 233, 233, 158,,, +5, 6, 5, 4, 1, 1, 1, 1, 128, 240, 240, 240, 209,,, +5, 6, 5, 4, 1, 1, 1, 1, 256, 240, 240, 240, 209,,, +5, 6, 5, 4, 1, 1, 1, 1, 384, 240, 240, 240, 209,,, +5, 6, 5, 5, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +5, 6, 5, 5, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +5, 6, 5, 5, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +5, 6, 5, 6, 1, 1, 1, 1, 128, 240, 240, 240, 240,,, +5, 6, 5, 6, 1, 1, 1, 1, 256, 240, 240, 240, 240,,, +5, 6, 5, 6, 1, 1, 1, 1, 384, 240, 240, 240, 240,,, +5, 7, 1, 1, 1, 1, 1, 1, 128, 82, 82, 57, 31,,, +5, 7, 1, 1, 1, 1, 1, 1, 256, 82, 82, 57, 31,,, +5, 7, 1, 1, 1, 1, 1, 1, 384, 82, 82, 57, 31,,, +5, 7, 1, 2, 1, 1, 1, 1, 128, 158, 158, 108, 56,,, +5, 7, 1, 2, 1, 1, 1, 1, 256, 158, 158, 108, 56,,, +5, 7, 1, 2, 1, 1, 1, 1, 384, 158, 158, 108, 56,,, +5, 7, 1, 3, 1, 1, 1, 1, 128, 234, 234, 159, 81,,, +5, 7, 1, 3, 1, 1, 1, 1, 256, 234, 234, 159, 81,,, +5, 7, 1, 3, 1, 1, 1, 1, 384, 234, 234, 159, 81,,, +5, 7, 1, 4, 1, 1, 1, 1, 128, 240, 240, 210, 106,,, +5, 7, 1, 4, 1, 1, 1, 1, 256, 240, 240, 210, 106,,, +5, 7, 1, 4, 1, 1, 1, 1, 384, 240, 240, 210, 106,,, +5, 7, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 131,,, +5, 7, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 131,,, +5, 7, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 131,,, +5, 7, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240, 156,,, +5, 7, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240, 156,,, +5, 7, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240, 156,,, +5, 7, 1, 7, 1, 1, 1, 1, 128, 240, 240, 240, 181,,, +5, 7, 1, 7, 1, 1, 1, 1, 256, 240, 240, 240, 181,,, +5, 7, 1, 7, 1, 1, 1, 1, 384, 240, 240, 240, 181,,, +5, 7, 2, 1, 1, 1, 1, 1, 128, 82, 82, 57, 31,,, +5, 7, 2, 1, 1, 1, 1, 1, 256, 82, 82, 57, 31,,, +5, 7, 2, 1, 1, 1, 1, 1, 384, 82, 82, 57, 31,,, +5, 7, 2, 2, 1, 1, 1, 1, 128, 158, 158, 108, 56,,, +5, 7, 2, 2, 1, 1, 1, 1, 256, 158, 158, 108, 56,,, +5, 7, 2, 2, 1, 1, 1, 1, 384, 158, 158, 108, 56,,, +5, 7, 2, 3, 1, 1, 1, 1, 128, 234, 234, 159, 81,,, +5, 7, 2, 3, 1, 1, 1, 1, 256, 234, 234, 159, 81,,, +5, 7, 2, 3, 1, 1, 1, 1, 384, 234, 234, 159, 81,,, +5, 7, 2, 4, 1, 1, 1, 1, 128, 240, 240, 210, 106,,, +5, 7, 2, 4, 1, 1, 1, 1, 256, 240, 240, 210, 106,,, +5, 7, 2, 4, 1, 1, 1, 1, 384, 240, 240, 210, 106,,, +5, 7, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240, 131,,, +5, 7, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240, 131,,, +5, 7, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240, 131,,, +5, 7, 2, 6, 1, 1, 1, 1, 128, 240, 240, 240, 156,,, +5, 7, 2, 6, 1, 1, 1, 1, 256, 240, 240, 240, 156,,, +5, 7, 2, 6, 1, 1, 1, 1, 384, 240, 240, 240, 156,,, +5, 7, 2, 7, 1, 1, 1, 1, 128, 240, 240, 240, 181,,, +5, 7, 2, 7, 1, 1, 1, 1, 256, 240, 240, 240, 181,,, +5, 7, 2, 7, 1, 1, 1, 1, 384, 240, 240, 240, 181,,, +5, 7, 3, 1, 1, 1, 1, 1, 128, 82, 82, 57, 31,,, +5, 7, 3, 1, 1, 1, 1, 1, 256, 82, 82, 57, 31,,, +5, 7, 3, 1, 1, 1, 1, 1, 384, 82, 82, 57, 31,,, +5, 7, 3, 2, 1, 1, 1, 1, 128, 158, 158, 108, 56,,, +5, 7, 3, 2, 1, 1, 1, 1, 256, 158, 158, 108, 56,,, +5, 7, 3, 2, 1, 1, 1, 1, 384, 158, 158, 108, 56,,, +5, 7, 3, 3, 1, 1, 1, 1, 128, 234, 234, 159, 81,,, +5, 7, 3, 3, 1, 1, 1, 1, 256, 234, 234, 159, 81,,, +5, 7, 3, 3, 1, 1, 1, 1, 384, 234, 234, 159, 81,,, +5, 7, 3, 4, 1, 1, 1, 1, 128, 240, 240, 210, 106,,, +5, 7, 3, 4, 1, 1, 1, 1, 256, 240, 240, 210, 106,,, +5, 7, 3, 4, 1, 1, 1, 1, 384, 240, 240, 210, 106,,, +5, 7, 3, 5, 1, 1, 1, 1, 128, 240, 240, 240, 131,,, +5, 7, 3, 5, 1, 1, 1, 1, 256, 240, 240, 240, 131,,, +5, 7, 3, 5, 1, 1, 1, 1, 384, 240, 240, 240, 131,,, +5, 7, 3, 6, 1, 1, 1, 1, 128, 240, 240, 240, 156,,, +5, 7, 3, 6, 1, 1, 1, 1, 256, 240, 240, 240, 156,,, +5, 7, 3, 6, 1, 1, 1, 1, 384, 240, 240, 240, 156,,, +5, 7, 3, 7, 1, 1, 1, 1, 128, 240, 240, 240, 181,,, +5, 7, 3, 7, 1, 1, 1, 1, 256, 240, 240, 240, 181,,, +5, 7, 3, 7, 1, 1, 1, 1, 384, 240, 240, 240, 181,,, +5, 7, 4, 1, 1, 1, 1, 1, 128, 82, 82, 57, 31,,, +5, 7, 4, 1, 1, 1, 1, 1, 256, 82, 82, 57, 31,,, +5, 7, 4, 1, 1, 1, 1, 1, 384, 82, 82, 57, 31,,, +5, 7, 4, 2, 1, 1, 1, 1, 128, 158, 158, 108, 56,,, +5, 7, 4, 2, 1, 1, 1, 1, 256, 158, 158, 108, 56,,, +5, 7, 4, 2, 1, 1, 1, 1, 384, 158, 158, 108, 56,,, +5, 7, 4, 3, 1, 1, 1, 1, 128, 234, 234, 159, 81,,, +5, 7, 4, 3, 1, 1, 1, 1, 256, 234, 234, 159, 81,,, +5, 7, 4, 3, 1, 1, 1, 1, 384, 234, 234, 159, 81,,, +5, 7, 4, 4, 1, 1, 1, 1, 128, 240, 240, 210, 106,,, +5, 7, 4, 4, 1, 1, 1, 1, 256, 240, 240, 210, 106,,, +5, 7, 4, 4, 1, 1, 1, 1, 384, 240, 240, 210, 106,,, +5, 7, 4, 5, 1, 1, 1, 1, 128, 240, 240, 240, 131,,, +5, 7, 4, 5, 1, 1, 1, 1, 256, 240, 240, 240, 131,,, +5, 7, 4, 5, 1, 1, 1, 1, 384, 240, 240, 240, 131,,, +5, 7, 4, 6, 1, 1, 1, 1, 128, 240, 240, 240, 156,,, +5, 7, 4, 6, 1, 1, 1, 1, 256, 240, 240, 240, 156,,, +5, 7, 4, 6, 1, 1, 1, 1, 384, 240, 240, 240, 156,,, +5, 7, 4, 7, 1, 1, 1, 1, 128, 240, 240, 240, 181,,, +5, 7, 4, 7, 1, 1, 1, 1, 256, 240, 240, 240, 181,,, +5, 7, 4, 7, 1, 1, 1, 1, 384, 240, 240, 240, 181,,, +5, 7, 5, 1, 1, 1, 1, 1, 128, 82, 82, 57, 31,,, +5, 7, 5, 1, 1, 1, 1, 1, 256, 82, 82, 57, 31,,, +5, 7, 5, 1, 1, 1, 1, 1, 384, 82, 82, 57, 31,,, +5, 7, 5, 2, 1, 1, 1, 1, 128, 158, 158, 108, 56,,, +5, 7, 5, 2, 1, 1, 1, 1, 256, 158, 158, 108, 56,,, +5, 7, 5, 2, 1, 1, 1, 1, 384, 158, 158, 108, 56,,, +5, 7, 5, 3, 1, 1, 1, 1, 128, 234, 234, 159, 81,,, +5, 7, 5, 3, 1, 1, 1, 1, 256, 234, 234, 159, 81,,, +5, 7, 5, 3, 1, 1, 1, 1, 384, 234, 234, 159, 81,,, +5, 7, 5, 4, 1, 1, 1, 1, 128, 240, 240, 210, 106,,, +5, 7, 5, 4, 1, 1, 1, 1, 256, 240, 240, 210, 106,,, +5, 7, 5, 4, 1, 1, 1, 1, 384, 240, 240, 210, 106,,, +5, 7, 5, 5, 1, 1, 1, 1, 128, 240, 240, 240, 131,,, +5, 7, 5, 5, 1, 1, 1, 1, 256, 240, 240, 240, 131,,, +5, 7, 5, 5, 1, 1, 1, 1, 384, 240, 240, 240, 131,,, +5, 7, 5, 6, 1, 1, 1, 1, 128, 240, 240, 240, 156,,, +5, 7, 5, 6, 1, 1, 1, 1, 256, 240, 240, 240, 156,,, +5, 7, 5, 6, 1, 1, 1, 1, 384, 240, 240, 240, 156,,, +5, 7, 5, 7, 1, 1, 1, 1, 128, 240, 240, 240, 181,,, +5, 7, 5, 7, 1, 1, 1, 1, 256, 240, 240, 240, 181,,, +5, 7, 5, 7, 1, 1, 1, 1, 384, 240, 240, 240, 181,,, +6, 1, 1, 1, 1, 1, 1, 1, 128, 64, 64, 64, 64, 64, 42, 21 +6, 1, 1, 1, 1, 1, 1, 1, 256, 64, 64, 64, 64, 64, 42, 21 +6, 1, 1, 1, 1, 1, 1, 1, 384, 64, 64, 64, 64, 64, 42, 21 +6, 1, 2, 1, 1, 1, 1, 1, 128, 64, 64, 64, 64, 64, 42, 21 +6, 1, 2, 1, 1, 1, 1, 1, 256, 64, 64, 64, 64, 64, 42, 21 +6, 1, 2, 1, 1, 1, 1, 1, 384, 64, 64, 64, 64, 64, 42, 21 +6, 1, 3, 1, 1, 1, 1, 1, 128, 64, 64, 64, 64, 64, 42, 21 +6, 1, 3, 1, 1, 1, 1, 1, 256, 64, 64, 64, 64, 64, 42, 21 +6, 1, 3, 1, 1, 1, 1, 1, 384, 64, 64, 64, 64, 64, 42, 21 +6, 1, 4, 1, 1, 1, 1, 1, 128, 64, 64, 64, 64, 64, 42, 21 +6, 1, 4, 1, 1, 1, 1, 1, 256, 64, 64, 64, 64, 64, 42, 21 +6, 1, 4, 1, 1, 1, 1, 1, 384, 64, 64, 64, 64, 64, 42, 21 +6, 1, 5, 1, 1, 1, 1, 1, 128, 64, 64, 64, 64, 64, 42, 21 +6, 1, 5, 1, 1, 1, 1, 1, 256, 64, 64, 64, 64, 64, 42, 21 +6, 1, 5, 1, 1, 1, 1, 1, 384, 64, 64, 64, 64, 64, 42, 21 +6, 1, 6, 1, 1, 1, 1, 1, 128, 64, 64, 64, 64, 64, 42, 21 +6, 1, 6, 1, 1, 1, 1, 1, 256, 64, 64, 64, 64, 64, 42, 21 +6, 1, 6, 1, 1, 1, 1, 1, 384, 64, 64, 64, 64, 64, 42, 21 +6, 2, 1, 1, 1, 1, 1, 1, 128, 65, 65, 65, 65, 43, 22, +6, 2, 1, 1, 1, 1, 1, 1, 256, 65, 65, 65, 65, 43, 22, +6, 2, 1, 1, 1, 1, 1, 1, 384, 65, 65, 65, 65, 43, 22, +6, 2, 1, 2, 1, 1, 1, 1, 128, 129, 129, 129, 129, 85, 43, +6, 2, 1, 2, 1, 1, 1, 1, 256, 129, 129, 129, 129, 85, 43, +6, 2, 1, 2, 1, 1, 1, 1, 384, 129, 129, 129, 129, 85, 43, +6, 2, 2, 1, 1, 1, 1, 1, 128, 65, 65, 65, 65, 43, 22, +6, 2, 2, 1, 1, 1, 1, 1, 256, 65, 65, 65, 65, 43, 22, +6, 2, 2, 1, 1, 1, 1, 1, 384, 65, 65, 65, 65, 43, 22, +6, 2, 2, 2, 1, 1, 1, 1, 128, 129, 129, 129, 129, 85, 43, +6, 2, 2, 2, 1, 1, 1, 1, 256, 129, 129, 129, 129, 85, 43, +6, 2, 2, 2, 1, 1, 1, 1, 384, 129, 129, 129, 129, 85, 43, +6, 2, 3, 1, 1, 1, 1, 1, 128, 65, 65, 65, 65, 43, 22, +6, 2, 3, 1, 1, 1, 1, 1, 256, 65, 65, 65, 65, 43, 22, +6, 2, 3, 1, 1, 1, 1, 1, 384, 65, 65, 65, 65, 43, 22, +6, 2, 3, 2, 1, 1, 1, 1, 128, 129, 129, 129, 129, 85, 43, +6, 2, 3, 2, 1, 1, 1, 1, 256, 129, 129, 129, 129, 85, 43, +6, 2, 3, 2, 1, 1, 1, 1, 384, 129, 129, 129, 129, 85, 43, +6, 2, 4, 1, 1, 1, 1, 1, 128, 65, 65, 65, 65, 43, 22, +6, 2, 4, 1, 1, 1, 1, 1, 256, 65, 65, 65, 65, 43, 22, +6, 2, 4, 1, 1, 1, 1, 1, 384, 65, 65, 65, 65, 43, 22, +6, 2, 4, 2, 1, 1, 1, 1, 128, 129, 129, 129, 129, 85, 43, +6, 2, 4, 2, 1, 1, 1, 1, 256, 129, 129, 129, 129, 85, 43, +6, 2, 4, 2, 1, 1, 1, 1, 384, 129, 129, 129, 129, 85, 43, +6, 2, 5, 1, 1, 1, 1, 1, 128, 65, 65, 65, 65, 43, 22, +6, 2, 5, 1, 1, 1, 1, 1, 256, 65, 65, 65, 65, 43, 22, +6, 2, 5, 1, 1, 1, 1, 1, 384, 65, 65, 65, 65, 43, 22, +6, 2, 5, 2, 1, 1, 1, 1, 128, 129, 129, 129, 129, 85, 43, +6, 2, 5, 2, 1, 1, 1, 1, 256, 129, 129, 129, 129, 85, 43, +6, 2, 5, 2, 1, 1, 1, 1, 384, 129, 129, 129, 129, 85, 43, +6, 2, 6, 1, 1, 1, 1, 1, 128, 65, 65, 65, 65, 43, 22, +6, 2, 6, 1, 1, 1, 1, 1, 256, 65, 65, 65, 65, 43, 22, +6, 2, 6, 1, 1, 1, 1, 1, 384, 65, 65, 65, 65, 43, 22, +6, 2, 6, 2, 1, 1, 1, 1, 128, 129, 129, 129, 129, 85, 43, +6, 2, 6, 2, 1, 1, 1, 1, 256, 129, 129, 129, 129, 85, 43, +6, 2, 6, 2, 1, 1, 1, 1, 384, 129, 129, 129, 129, 85, 43, +6, 3, 1, 1, 1, 1, 1, 1, 128, 66, 66, 66, 44, 23,, +6, 3, 1, 1, 1, 1, 1, 1, 256, 66, 66, 66, 44, 23,, +6, 3, 1, 1, 1, 1, 1, 1, 384, 66, 66, 66, 44, 23,, +6, 3, 1, 2, 1, 1, 1, 1, 128, 130, 130, 130, 86, 44,, +6, 3, 1, 2, 1, 1, 1, 1, 256, 130, 130, 130, 86, 44,, +6, 3, 1, 2, 1, 1, 1, 1, 384, 130, 130, 130, 86, 44,, +6, 3, 1, 3, 1, 1, 1, 1, 128, 194, 194, 194, 128, 65,, +6, 3, 1, 3, 1, 1, 1, 1, 256, 194, 194, 194, 128, 65,, +6, 3, 1, 3, 1, 1, 1, 1, 384, 194, 194, 194, 128, 65,, +6, 3, 2, 1, 1, 1, 1, 1, 128, 66, 66, 66, 44, 23,, +6, 3, 2, 1, 1, 1, 1, 1, 256, 66, 66, 66, 44, 23,, +6, 3, 2, 1, 1, 1, 1, 1, 384, 66, 66, 66, 44, 23,, +6, 3, 2, 2, 1, 1, 1, 1, 128, 130, 130, 130, 86, 44,, +6, 3, 2, 2, 1, 1, 1, 1, 256, 130, 130, 130, 86, 44,, +6, 3, 2, 2, 1, 1, 1, 1, 384, 130, 130, 130, 86, 44,, +6, 3, 2, 3, 1, 1, 1, 1, 128, 194, 194, 194, 128, 65,, +6, 3, 2, 3, 1, 1, 1, 1, 256, 194, 194, 194, 128, 65,, +6, 3, 2, 3, 1, 1, 1, 1, 384, 194, 194, 194, 128, 65,, +6, 3, 3, 1, 1, 1, 1, 1, 128, 66, 66, 66, 44, 23,, +6, 3, 3, 1, 1, 1, 1, 1, 256, 66, 66, 66, 44, 23,, +6, 3, 3, 1, 1, 1, 1, 1, 384, 66, 66, 66, 44, 23,, +6, 3, 3, 2, 1, 1, 1, 1, 128, 130, 130, 130, 86, 44,, +6, 3, 3, 2, 1, 1, 1, 1, 256, 130, 130, 130, 86, 44,, +6, 3, 3, 2, 1, 1, 1, 1, 384, 130, 130, 130, 86, 44,, +6, 3, 3, 3, 1, 1, 1, 1, 128, 194, 194, 194, 128, 65,, +6, 3, 3, 3, 1, 1, 1, 1, 256, 194, 194, 194, 128, 65,, +6, 3, 3, 3, 1, 1, 1, 1, 384, 194, 194, 194, 128, 65,, +6, 3, 4, 1, 1, 1, 1, 1, 128, 66, 66, 66, 44, 23,, +6, 3, 4, 1, 1, 1, 1, 1, 256, 66, 66, 66, 44, 23,, +6, 3, 4, 1, 1, 1, 1, 1, 384, 66, 66, 66, 44, 23,, +6, 3, 4, 2, 1, 1, 1, 1, 128, 130, 130, 130, 86, 44,, +6, 3, 4, 2, 1, 1, 1, 1, 256, 130, 130, 130, 86, 44,, +6, 3, 4, 2, 1, 1, 1, 1, 384, 130, 130, 130, 86, 44,, +6, 3, 4, 3, 1, 1, 1, 1, 128, 194, 194, 194, 128, 65,, +6, 3, 4, 3, 1, 1, 1, 1, 256, 194, 194, 194, 128, 65,, +6, 3, 4, 3, 1, 1, 1, 1, 384, 194, 194, 194, 128, 65,, +6, 3, 5, 1, 1, 1, 1, 1, 128, 66, 66, 66, 44, 23,, +6, 3, 5, 1, 1, 1, 1, 1, 256, 66, 66, 66, 44, 23,, +6, 3, 5, 1, 1, 1, 1, 1, 384, 66, 66, 66, 44, 23,, +6, 3, 5, 2, 1, 1, 1, 1, 128, 130, 130, 130, 86, 44,, +6, 3, 5, 2, 1, 1, 1, 1, 256, 130, 130, 130, 86, 44,, +6, 3, 5, 2, 1, 1, 1, 1, 384, 130, 130, 130, 86, 44,, +6, 3, 5, 3, 1, 1, 1, 1, 128, 194, 194, 194, 128, 65,, +6, 3, 5, 3, 1, 1, 1, 1, 256, 194, 194, 194, 128, 65,, +6, 3, 5, 3, 1, 1, 1, 1, 384, 194, 194, 194, 128, 65,, +6, 3, 6, 1, 1, 1, 1, 1, 128, 66, 66, 66, 44, 23,, +6, 3, 6, 1, 1, 1, 1, 1, 256, 66, 66, 66, 44, 23,, +6, 3, 6, 1, 1, 1, 1, 1, 384, 66, 66, 66, 44, 23,, +6, 3, 6, 2, 1, 1, 1, 1, 128, 130, 130, 130, 86, 44,, +6, 3, 6, 2, 1, 1, 1, 1, 256, 130, 130, 130, 86, 44,, +6, 3, 6, 2, 1, 1, 1, 1, 384, 130, 130, 130, 86, 44,, +6, 3, 6, 3, 1, 1, 1, 1, 128, 194, 194, 194, 128, 65,, +6, 3, 6, 3, 1, 1, 1, 1, 256, 194, 194, 194, 128, 65,, +6, 3, 6, 3, 1, 1, 1, 1, 384, 194, 194, 194, 128, 65,, +6, 4, 1, 1, 1, 1, 1, 1, 128, 67, 67, 67, 45, 24,, +6, 4, 1, 1, 1, 1, 1, 1, 256, 67, 67, 67, 45, 24,, +6, 4, 1, 1, 1, 1, 1, 1, 384, 67, 67, 67, 45, 24,, +6, 4, 1, 2, 1, 1, 1, 1, 128, 131, 131, 131, 87, 45,, +6, 4, 1, 2, 1, 1, 1, 1, 256, 131, 131, 131, 87, 45,, +6, 4, 1, 2, 1, 1, 1, 1, 384, 131, 131, 131, 87, 45,, +6, 4, 1, 3, 1, 1, 1, 1, 128, 195, 195, 195, 129, 66,, +6, 4, 1, 3, 1, 1, 1, 1, 256, 195, 195, 195, 129, 66,, +6, 4, 1, 3, 1, 1, 1, 1, 384, 195, 195, 195, 129, 66,, +6, 4, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 171, 87,, +6, 4, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 171, 87,, +6, 4, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 171, 87,, +6, 4, 2, 1, 1, 1, 1, 1, 128, 67, 67, 67, 45, 24,, +6, 4, 2, 1, 1, 1, 1, 1, 256, 67, 67, 67, 45, 24,, +6, 4, 2, 1, 1, 1, 1, 1, 384, 67, 67, 67, 45, 24,, +6, 4, 2, 2, 1, 1, 1, 1, 128, 131, 131, 131, 87, 45,, +6, 4, 2, 2, 1, 1, 1, 1, 256, 131, 131, 131, 87, 45,, +6, 4, 2, 2, 1, 1, 1, 1, 384, 131, 131, 131, 87, 45,, +6, 4, 2, 3, 1, 1, 1, 1, 128, 195, 195, 195, 129, 66,, +6, 4, 2, 3, 1, 1, 1, 1, 256, 195, 195, 195, 129, 66,, +6, 4, 2, 3, 1, 1, 1, 1, 384, 195, 195, 195, 129, 66,, +6, 4, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 171, 87,, +6, 4, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 171, 87,, +6, 4, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 171, 87,, +6, 4, 3, 1, 1, 1, 1, 1, 128, 67, 67, 67, 45, 24,, +6, 4, 3, 1, 1, 1, 1, 1, 256, 67, 67, 67, 45, 24,, +6, 4, 3, 1, 1, 1, 1, 1, 384, 67, 67, 67, 45, 24,, +6, 4, 3, 2, 1, 1, 1, 1, 128, 131, 131, 131, 87, 45,, +6, 4, 3, 2, 1, 1, 1, 1, 256, 131, 131, 131, 87, 45,, +6, 4, 3, 2, 1, 1, 1, 1, 384, 131, 131, 131, 87, 45,, +6, 4, 3, 3, 1, 1, 1, 1, 128, 195, 195, 195, 129, 66,, +6, 4, 3, 3, 1, 1, 1, 1, 256, 195, 195, 195, 129, 66,, +6, 4, 3, 3, 1, 1, 1, 1, 384, 195, 195, 195, 129, 66,, +6, 4, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240, 171, 87,, +6, 4, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240, 171, 87,, +6, 4, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240, 171, 87,, +6, 4, 4, 1, 1, 1, 1, 1, 128, 67, 67, 67, 45, 24,, +6, 4, 4, 1, 1, 1, 1, 1, 256, 67, 67, 67, 45, 24,, +6, 4, 4, 1, 1, 1, 1, 1, 384, 67, 67, 67, 45, 24,, +6, 4, 4, 2, 1, 1, 1, 1, 128, 131, 131, 131, 87, 45,, +6, 4, 4, 2, 1, 1, 1, 1, 256, 131, 131, 131, 87, 45,, +6, 4, 4, 2, 1, 1, 1, 1, 384, 131, 131, 131, 87, 45,, +6, 4, 4, 3, 1, 1, 1, 1, 128, 195, 195, 195, 129, 66,, +6, 4, 4, 3, 1, 1, 1, 1, 256, 195, 195, 195, 129, 66,, +6, 4, 4, 3, 1, 1, 1, 1, 384, 195, 195, 195, 129, 66,, +6, 4, 4, 4, 1, 1, 1, 1, 128, 240, 240, 240, 171, 87,, +6, 4, 4, 4, 1, 1, 1, 1, 256, 240, 240, 240, 171, 87,, +6, 4, 4, 4, 1, 1, 1, 1, 384, 240, 240, 240, 171, 87,, +6, 4, 5, 1, 1, 1, 1, 1, 128, 67, 67, 67, 45, 24,, +6, 4, 5, 1, 1, 1, 1, 1, 256, 67, 67, 67, 45, 24,, +6, 4, 5, 1, 1, 1, 1, 1, 384, 67, 67, 67, 45, 24,, +6, 4, 5, 2, 1, 1, 1, 1, 128, 131, 131, 131, 87, 45,, +6, 4, 5, 2, 1, 1, 1, 1, 256, 131, 131, 131, 87, 45,, +6, 4, 5, 2, 1, 1, 1, 1, 384, 131, 131, 131, 87, 45,, +6, 4, 5, 3, 1, 1, 1, 1, 128, 195, 195, 195, 129, 66,, +6, 4, 5, 3, 1, 1, 1, 1, 256, 195, 195, 195, 129, 66,, +6, 4, 5, 3, 1, 1, 1, 1, 384, 195, 195, 195, 129, 66,, +6, 4, 5, 4, 1, 1, 1, 1, 128, 240, 240, 240, 171, 87,, +6, 4, 5, 4, 1, 1, 1, 1, 256, 240, 240, 240, 171, 87,, +6, 4, 5, 4, 1, 1, 1, 1, 384, 240, 240, 240, 171, 87,, +6, 4, 6, 1, 1, 1, 1, 1, 128, 67, 67, 67, 45, 24,, +6, 4, 6, 1, 1, 1, 1, 1, 256, 67, 67, 67, 45, 24,, +6, 4, 6, 1, 1, 1, 1, 1, 384, 67, 67, 67, 45, 24,, +6, 4, 6, 2, 1, 1, 1, 1, 128, 131, 131, 131, 87, 45,, +6, 4, 6, 2, 1, 1, 1, 1, 256, 131, 131, 131, 87, 45,, +6, 4, 6, 2, 1, 1, 1, 1, 384, 131, 131, 131, 87, 45,, +6, 4, 6, 3, 1, 1, 1, 1, 128, 195, 195, 195, 129, 66,, +6, 4, 6, 3, 1, 1, 1, 1, 256, 195, 195, 195, 129, 66,, +6, 4, 6, 3, 1, 1, 1, 1, 384, 195, 195, 195, 129, 66,, +6, 4, 6, 4, 1, 1, 1, 1, 128, 240, 240, 240, 171, 87,, +6, 4, 6, 4, 1, 1, 1, 1, 256, 240, 240, 240, 171, 87,, +6, 4, 6, 4, 1, 1, 1, 1, 384, 240, 240, 240, 171, 87,, +6, 5, 1, 1, 1, 1, 1, 1, 128, 68, 68, 68, 46,,, +6, 5, 1, 1, 1, 1, 1, 1, 256, 68, 68, 68, 46,,, +6, 5, 1, 1, 1, 1, 1, 1, 384, 68, 68, 68, 46,,, +6, 5, 1, 2, 1, 1, 1, 1, 128, 132, 132, 132, 88,,, +6, 5, 1, 2, 1, 1, 1, 1, 256, 132, 132, 132, 88,,, +6, 5, 1, 2, 1, 1, 1, 1, 384, 132, 132, 132, 88,,, +6, 5, 1, 3, 1, 1, 1, 1, 128, 196, 196, 196, 130,,, +6, 5, 1, 3, 1, 1, 1, 1, 256, 196, 196, 196, 130,,, +6, 5, 1, 3, 1, 1, 1, 1, 384, 196, 196, 196, 130,,, +6, 5, 1, 4, 1, 1, 1, 1, 128, 240, 240, 240, 172,,, +6, 5, 1, 4, 1, 1, 1, 1, 256, 240, 240, 240, 172,,, +6, 5, 1, 4, 1, 1, 1, 1, 384, 240, 240, 240, 172,,, +6, 5, 1, 5, 1, 1, 1, 1, 128, 240, 240, 240, 214,,, +6, 5, 1, 5, 1, 1, 1, 1, 256, 240, 240, 240, 214,,, +6, 5, 1, 5, 1, 1, 1, 1, 384, 240, 240, 240, 214,,, +6, 5, 2, 1, 1, 1, 1, 1, 128, 68, 68, 68, 46,,, +6, 5, 2, 1, 1, 1, 1, 1, 256, 68, 68, 68, 46,,, +6, 5, 2, 1, 1, 1, 1, 1, 384, 68, 68, 68, 46,,, +6, 5, 2, 2, 1, 1, 1, 1, 128, 132, 132, 132, 88,,, +6, 5, 2, 2, 1, 1, 1, 1, 256, 132, 132, 132, 88,,, +6, 5, 2, 2, 1, 1, 1, 1, 384, 132, 132, 132, 88,,, +6, 5, 2, 3, 1, 1, 1, 1, 128, 196, 196, 196, 130,,, +6, 5, 2, 3, 1, 1, 1, 1, 256, 196, 196, 196, 130,,, +6, 5, 2, 3, 1, 1, 1, 1, 384, 196, 196, 196, 130,,, +6, 5, 2, 4, 1, 1, 1, 1, 128, 240, 240, 240, 172,,, +6, 5, 2, 4, 1, 1, 1, 1, 256, 240, 240, 240, 172,,, +6, 5, 2, 4, 1, 1, 1, 1, 384, 240, 240, 240, 172,,, +6, 5, 2, 5, 1, 1, 1, 1, 128, 240, 240, 240, 214,,, +6, 5, 2, 5, 1, 1, 1, 1, 256, 240, 240, 240, 214,,, +6, 5, 2, 5, 1, 1, 1, 1, 384, 240, 240, 240, 214,,, +6, 5, 3, 1, 1, 1, 1, 1, 128, 68, 68, 68, 46,,, +6, 5, 3, 1, 1, 1, 1, 1, 256, 68, 68, 68, 46,,, +6, 5, 3, 1, 1, 1, 1, 1, 384, 68, 68, 68, 46,,, +6, 5, 3, 2, 1, 1, 1, 1, 128, 132, 132, 132, 88,,, +6, 5, 3, 2, 1, 1, 1, 1, 256, 132, 132, 132, 88,,, +6, 5, 3, 2, 1, 1, 1, 1, 384, 132, 132, 132, 88,,, +6, 5, 3, 3, 1, 1, 1, 1, 128, 196, 196, 196, 130,,, +6, 5, 3, 3, 1, 1, 1, 1, 256, 196, 196, 196, 130,,, +6, 5, 3, 3, 1, 1, 1, 1, 384, 196, 196, 196, 130,,, +6, 5, 3, 4, 1, 1, 1, 1, 128, 240, 240, 240, 172,,, +6, 5, 3, 4, 1, 1, 1, 1, 256, 240, 240, 240, 172,,, +6, 5, 3, 4, 1, 1, 1, 1, 384, 240, 240, 240, 172,,, +6, 5, 3, 5, 1, 1, 1, 1, 128, 240, 240, 240, 214,,, +6, 5, 3, 5, 1, 1, 1, 1, 256, 240, 240, 240, 214,,, +6, 5, 3, 5, 1, 1, 1, 1, 384, 240, 240, 240, 214,,, +6, 5, 4, 1, 1, 1, 1, 1, 128, 68, 68, 68, 46,,, +6, 5, 4, 1, 1, 1, 1, 1, 256, 68, 68, 68, 46,,, +6, 5, 4, 1, 1, 1, 1, 1, 384, 68, 68, 68, 46,,, +6, 5, 4, 2, 1, 1, 1, 1, 128, 132, 132, 132, 88,,, +6, 5, 4, 2, 1, 1, 1, 1, 256, 132, 132, 132, 88,,, +6, 5, 4, 2, 1, 1, 1, 1, 384, 132, 132, 132, 88,,, +6, 5, 4, 3, 1, 1, 1, 1, 128, 196, 196, 196, 130,,, +6, 5, 4, 3, 1, 1, 1, 1, 256, 196, 196, 196, 130,,, +6, 5, 4, 3, 1, 1, 1, 1, 384, 196, 196, 196, 130,,, +6, 5, 4, 4, 1, 1, 1, 1, 128, 240, 240, 240, 172,,, +6, 5, 4, 4, 1, 1, 1, 1, 256, 240, 240, 240, 172,,, +6, 5, 4, 4, 1, 1, 1, 1, 384, 240, 240, 240, 172,,, +6, 5, 4, 5, 1, 1, 1, 1, 128, 240, 240, 240, 214,,, +6, 5, 4, 5, 1, 1, 1, 1, 256, 240, 240, 240, 214,,, +6, 5, 4, 5, 1, 1, 1, 1, 384, 240, 240, 240, 214,,, +6, 5, 5, 1, 1, 1, 1, 1, 128, 68, 68, 68, 46,,, +6, 5, 5, 1, 1, 1, 1, 1, 256, 68, 68, 68, 46,,, +6, 5, 5, 1, 1, 1, 1, 1, 384, 68, 68, 68, 46,,, +6, 5, 5, 2, 1, 1, 1, 1, 128, 132, 132, 132, 88,,, +6, 5, 5, 2, 1, 1, 1, 1, 256, 132, 132, 132, 88,,, +6, 5, 5, 2, 1, 1, 1, 1, 384, 132, 132, 132, 88,,, +6, 5, 5, 3, 1, 1, 1, 1, 128, 196, 196, 196, 130,,, +6, 5, 5, 3, 1, 1, 1, 1, 256, 196, 196, 196, 130,,, +6, 5, 5, 3, 1, 1, 1, 1, 384, 196, 196, 196, 130,,, +6, 5, 5, 4, 1, 1, 1, 1, 128, 240, 240, 240, 172,,, +6, 5, 5, 4, 1, 1, 1, 1, 256, 240, 240, 240, 172,,, +6, 5, 5, 4, 1, 1, 1, 1, 384, 240, 240, 240, 172,,, +6, 5, 5, 5, 1, 1, 1, 1, 128, 240, 240, 240, 214,,, +6, 5, 5, 5, 1, 1, 1, 1, 256, 240, 240, 240, 214,,, +6, 5, 5, 5, 1, 1, 1, 1, 384, 240, 240, 240, 214,,, +6, 5, 6, 1, 1, 1, 1, 1, 128, 68, 68, 68, 46,,, +6, 5, 6, 1, 1, 1, 1, 1, 256, 68, 68, 68, 46,,, +6, 5, 6, 1, 1, 1, 1, 1, 384, 68, 68, 68, 46,,, +6, 5, 6, 2, 1, 1, 1, 1, 128, 132, 132, 132, 88,,, +6, 5, 6, 2, 1, 1, 1, 1, 256, 132, 132, 132, 88,,, +6, 5, 6, 2, 1, 1, 1, 1, 384, 132, 132, 132, 88,,, +6, 5, 6, 3, 1, 1, 1, 1, 128, 196, 196, 196, 130,,, +6, 5, 6, 3, 1, 1, 1, 1, 256, 196, 196, 196, 130,,, +6, 5, 6, 3, 1, 1, 1, 1, 384, 196, 196, 196, 130,,, +6, 5, 6, 4, 1, 1, 1, 1, 128, 240, 240, 240, 172,,, +6, 5, 6, 4, 1, 1, 1, 1, 256, 240, 240, 240, 172,,, +6, 5, 6, 4, 1, 1, 1, 1, 384, 240, 240, 240, 172,,, +6, 5, 6, 5, 1, 1, 1, 1, 128, 240, 240, 240, 214,,, +6, 5, 6, 5, 1, 1, 1, 1, 256, 240, 240, 240, 214,,, +6, 5, 6, 5, 1, 1, 1, 1, 384, 240, 240, 240, 214,,, +6, 6, 1, 1, 1, 1, 1, 1, 128, 69, 69, 47, 26,,, +6, 6, 1, 1, 1, 1, 1, 1, 256, 69, 69, 47, 26,,, +6, 6, 1, 1, 1, 1, 1, 1, 384, 69, 69, 47, 26,,, +6, 6, 1, 2, 1, 1, 1, 1, 128, 133, 133, 89, 47,,, +6, 6, 1, 2, 1, 1, 1, 1, 256, 133, 133, 89, 47,,, +6, 6, 1, 2, 1, 1, 1, 1, 384, 133, 133, 89, 47,,, +6, 6, 1, 3, 1, 1, 1, 1, 128, 197, 197, 131, 68,,, +6, 6, 1, 3, 1, 1, 1, 1, 256, 197, 197, 131, 68,,, +6, 6, 1, 3, 1, 1, 1, 1, 384, 197, 197, 131, 68,,, +6, 6, 1, 4, 1, 1, 1, 1, 128, 240, 240, 173, 89,,, +6, 6, 1, 4, 1, 1, 1, 1, 256, 240, 240, 173, 89,,, +6, 6, 1, 4, 1, 1, 1, 1, 384, 240, 240, 173, 89,,, +6, 6, 1, 5, 1, 1, 1, 1, 128, 240, 240, 215, 110,,, +6, 6, 1, 5, 1, 1, 1, 1, 256, 240, 240, 215, 110,,, +6, 6, 1, 5, 1, 1, 1, 1, 384, 240, 240, 215, 110,,, +6, 6, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240, 131,,, +6, 6, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240, 131,,, +6, 6, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240, 131,,, +6, 6, 2, 1, 1, 1, 1, 1, 128, 69, 69, 47, 26,,, +6, 6, 2, 1, 1, 1, 1, 1, 256, 69, 69, 47, 26,,, +6, 6, 2, 1, 1, 1, 1, 1, 384, 69, 69, 47, 26,,, +6, 6, 2, 2, 1, 1, 1, 1, 128, 133, 133, 89, 47,,, +6, 6, 2, 2, 1, 1, 1, 1, 256, 133, 133, 89, 47,,, +6, 6, 2, 2, 1, 1, 1, 1, 384, 133, 133, 89, 47,,, +6, 6, 2, 3, 1, 1, 1, 1, 128, 197, 197, 131, 68,,, +6, 6, 2, 3, 1, 1, 1, 1, 256, 197, 197, 131, 68,,, +6, 6, 2, 3, 1, 1, 1, 1, 384, 197, 197, 131, 68,,, +6, 6, 2, 4, 1, 1, 1, 1, 128, 240, 240, 173, 89,,, +6, 6, 2, 4, 1, 1, 1, 1, 256, 240, 240, 173, 89,,, +6, 6, 2, 4, 1, 1, 1, 1, 384, 240, 240, 173, 89,,, +6, 6, 2, 5, 1, 1, 1, 1, 128, 240, 240, 215, 110,,, +6, 6, 2, 5, 1, 1, 1, 1, 256, 240, 240, 215, 110,,, +6, 6, 2, 5, 1, 1, 1, 1, 384, 240, 240, 215, 110,,, +6, 6, 2, 6, 1, 1, 1, 1, 128, 240, 240, 240, 131,,, +6, 6, 2, 6, 1, 1, 1, 1, 256, 240, 240, 240, 131,,, +6, 6, 2, 6, 1, 1, 1, 1, 384, 240, 240, 240, 131,,, +6, 6, 3, 1, 1, 1, 1, 1, 128, 69, 69, 47, 26,,, +6, 6, 3, 1, 1, 1, 1, 1, 256, 69, 69, 47, 26,,, +6, 6, 3, 1, 1, 1, 1, 1, 384, 69, 69, 47, 26,,, +6, 6, 3, 2, 1, 1, 1, 1, 128, 133, 133, 89, 47,,, +6, 6, 3, 2, 1, 1, 1, 1, 256, 133, 133, 89, 47,,, +6, 6, 3, 2, 1, 1, 1, 1, 384, 133, 133, 89, 47,,, +6, 6, 3, 3, 1, 1, 1, 1, 128, 197, 197, 131, 68,,, +6, 6, 3, 3, 1, 1, 1, 1, 256, 197, 197, 131, 68,,, +6, 6, 3, 3, 1, 1, 1, 1, 384, 197, 197, 131, 68,,, +6, 6, 3, 4, 1, 1, 1, 1, 128, 240, 240, 173, 89,,, +6, 6, 3, 4, 1, 1, 1, 1, 256, 240, 240, 173, 89,,, +6, 6, 3, 4, 1, 1, 1, 1, 384, 240, 240, 173, 89,,, +6, 6, 3, 5, 1, 1, 1, 1, 128, 240, 240, 215, 110,,, +6, 6, 3, 5, 1, 1, 1, 1, 256, 240, 240, 215, 110,,, +6, 6, 3, 5, 1, 1, 1, 1, 384, 240, 240, 215, 110,,, +6, 6, 3, 6, 1, 1, 1, 1, 128, 240, 240, 240, 131,,, +6, 6, 3, 6, 1, 1, 1, 1, 256, 240, 240, 240, 131,,, +6, 6, 3, 6, 1, 1, 1, 1, 384, 240, 240, 240, 131,,, +6, 6, 4, 1, 1, 1, 1, 1, 128, 69, 69, 47, 26,,, +6, 6, 4, 1, 1, 1, 1, 1, 256, 69, 69, 47, 26,,, +6, 6, 4, 1, 1, 1, 1, 1, 384, 69, 69, 47, 26,,, +6, 6, 4, 2, 1, 1, 1, 1, 128, 133, 133, 89, 47,,, +6, 6, 4, 2, 1, 1, 1, 1, 256, 133, 133, 89, 47,,, +6, 6, 4, 2, 1, 1, 1, 1, 384, 133, 133, 89, 47,,, +6, 6, 4, 3, 1, 1, 1, 1, 128, 197, 197, 131, 68,,, +6, 6, 4, 3, 1, 1, 1, 1, 256, 197, 197, 131, 68,,, +6, 6, 4, 3, 1, 1, 1, 1, 384, 197, 197, 131, 68,,, +6, 6, 4, 4, 1, 1, 1, 1, 128, 240, 240, 173, 89,,, +6, 6, 4, 4, 1, 1, 1, 1, 256, 240, 240, 173, 89,,, +6, 6, 4, 4, 1, 1, 1, 1, 384, 240, 240, 173, 89,,, +6, 6, 4, 5, 1, 1, 1, 1, 128, 240, 240, 215, 110,,, +6, 6, 4, 5, 1, 1, 1, 1, 256, 240, 240, 215, 110,,, +6, 6, 4, 5, 1, 1, 1, 1, 384, 240, 240, 215, 110,,, +6, 6, 4, 6, 1, 1, 1, 1, 128, 240, 240, 240, 131,,, +6, 6, 4, 6, 1, 1, 1, 1, 256, 240, 240, 240, 131,,, +6, 6, 4, 6, 1, 1, 1, 1, 384, 240, 240, 240, 131,,, +6, 6, 5, 1, 1, 1, 1, 1, 128, 69, 69, 47, 26,,, +6, 6, 5, 1, 1, 1, 1, 1, 256, 69, 69, 47, 26,,, +6, 6, 5, 1, 1, 1, 1, 1, 384, 69, 69, 47, 26,,, +6, 6, 5, 2, 1, 1, 1, 1, 128, 133, 133, 89, 47,,, +6, 6, 5, 2, 1, 1, 1, 1, 256, 133, 133, 89, 47,,, +6, 6, 5, 2, 1, 1, 1, 1, 384, 133, 133, 89, 47,,, +6, 6, 5, 3, 1, 1, 1, 1, 128, 197, 197, 131, 68,,, +6, 6, 5, 3, 1, 1, 1, 1, 256, 197, 197, 131, 68,,, +6, 6, 5, 3, 1, 1, 1, 1, 384, 197, 197, 131, 68,,, +6, 6, 5, 4, 1, 1, 1, 1, 128, 240, 240, 173, 89,,, +6, 6, 5, 4, 1, 1, 1, 1, 256, 240, 240, 173, 89,,, +6, 6, 5, 4, 1, 1, 1, 1, 384, 240, 240, 173, 89,,, +6, 6, 5, 5, 1, 1, 1, 1, 128, 240, 240, 215, 110,,, +6, 6, 5, 5, 1, 1, 1, 1, 256, 240, 240, 215, 110,,, +6, 6, 5, 5, 1, 1, 1, 1, 384, 240, 240, 215, 110,,, +6, 6, 5, 6, 1, 1, 1, 1, 128, 240, 240, 240, 131,,, +6, 6, 5, 6, 1, 1, 1, 1, 256, 240, 240, 240, 131,,, +6, 6, 5, 6, 1, 1, 1, 1, 384, 240, 240, 240, 131,,, +6, 6, 6, 1, 1, 1, 1, 1, 128, 69, 69, 47, 26,,, +6, 6, 6, 1, 1, 1, 1, 1, 256, 69, 69, 47, 26,,, +6, 6, 6, 1, 1, 1, 1, 1, 384, 69, 69, 47, 26,,, +6, 6, 6, 2, 1, 1, 1, 1, 128, 133, 133, 89, 47,,, +6, 6, 6, 2, 1, 1, 1, 1, 256, 133, 133, 89, 47,,, +6, 6, 6, 2, 1, 1, 1, 1, 384, 133, 133, 89, 47,,, +6, 6, 6, 3, 1, 1, 1, 1, 128, 197, 197, 131, 68,,, +6, 6, 6, 3, 1, 1, 1, 1, 256, 197, 197, 131, 68,,, +6, 6, 6, 3, 1, 1, 1, 1, 384, 197, 197, 131, 68,,, +6, 6, 6, 4, 1, 1, 1, 1, 128, 240, 240, 173, 89,,, +6, 6, 6, 4, 1, 1, 1, 1, 256, 240, 240, 173, 89,,, +6, 6, 6, 4, 1, 1, 1, 1, 384, 240, 240, 173, 89,,, +6, 6, 6, 5, 1, 1, 1, 1, 128, 240, 240, 215, 110,,, +6, 6, 6, 5, 1, 1, 1, 1, 256, 240, 240, 215, 110,,, +6, 6, 6, 5, 1, 1, 1, 1, 384, 240, 240, 215, 110,,, +6, 6, 6, 6, 1, 1, 1, 1, 128, 240, 240, 240, 131,,, +6, 6, 6, 6, 1, 1, 1, 1, 256, 240, 240, 240, 131,,, +6, 6, 6, 6, 1, 1, 1, 1, 384, 240, 240, 240, 131,,, +6, 7, 1, 1, 1, 1, 1, 1, 128, 70, 70, 48, 27,,, +6, 7, 1, 1, 1, 1, 1, 1, 256, 70, 70, 48, 27,,, +6, 7, 1, 1, 1, 1, 1, 1, 384, 70, 70, 48, 27,,, +6, 7, 1, 2, 1, 1, 1, 1, 128, 134, 134, 90, 48,,, +6, 7, 1, 2, 1, 1, 1, 1, 256, 134, 134, 90, 48,,, +6, 7, 1, 2, 1, 1, 1, 1, 384, 134, 134, 90, 48,,, +6, 7, 1, 3, 1, 1, 1, 1, 128, 198, 198, 132, 69,,, +6, 7, 1, 3, 1, 1, 1, 1, 256, 198, 198, 132, 69,,, +6, 7, 1, 3, 1, 1, 1, 1, 384, 198, 198, 132, 69,,, +6, 7, 1, 4, 1, 1, 1, 1, 128, 240, 240, 174, 90,,, +6, 7, 1, 4, 1, 1, 1, 1, 256, 240, 240, 174, 90,,, +6, 7, 1, 4, 1, 1, 1, 1, 384, 240, 240, 174, 90,,, +6, 7, 1, 5, 1, 1, 1, 1, 128, 240, 240, 216, 111,,, +6, 7, 1, 5, 1, 1, 1, 1, 256, 240, 240, 216, 111,,, +6, 7, 1, 5, 1, 1, 1, 1, 384, 240, 240, 216, 111,,, +6, 7, 1, 6, 1, 1, 1, 1, 128, 240, 240, 240, 132,,, +6, 7, 1, 6, 1, 1, 1, 1, 256, 240, 240, 240, 132,,, +6, 7, 1, 6, 1, 1, 1, 1, 384, 240, 240, 240, 132,,, +6, 7, 1, 7, 1, 1, 1, 1, 128, 240, 240, 240, 153,,, +6, 7, 1, 7, 1, 1, 1, 1, 256, 240, 240, 240, 153,,, +6, 7, 1, 7, 1, 1, 1, 1, 384, 240, 240, 240, 153,,, +6, 7, 2, 1, 1, 1, 1, 1, 128, 70, 70, 48, 27,,, +6, 7, 2, 1, 1, 1, 1, 1, 256, 70, 70, 48, 27,,, +6, 7, 2, 1, 1, 1, 1, 1, 384, 70, 70, 48, 27,,, +6, 7, 2, 2, 1, 1, 1, 1, 128, 134, 134, 90, 48,,, +6, 7, 2, 2, 1, 1, 1, 1, 256, 134, 134, 90, 48,,, +6, 7, 2, 2, 1, 1, 1, 1, 384, 134, 134, 90, 48,,, +6, 7, 2, 3, 1, 1, 1, 1, 128, 198, 198, 132, 69,,, +6, 7, 2, 3, 1, 1, 1, 1, 256, 198, 198, 132, 69,,, +6, 7, 2, 3, 1, 1, 1, 1, 384, 198, 198, 132, 69,,, +6, 7, 2, 4, 1, 1, 1, 1, 128, 240, 240, 174, 90,,, +6, 7, 2, 4, 1, 1, 1, 1, 256, 240, 240, 174, 90,,, +6, 7, 2, 4, 1, 1, 1, 1, 384, 240, 240, 174, 90,,, +6, 7, 2, 5, 1, 1, 1, 1, 128, 240, 240, 216, 111,,, +6, 7, 2, 5, 1, 1, 1, 1, 256, 240, 240, 216, 111,,, +6, 7, 2, 5, 1, 1, 1, 1, 384, 240, 240, 216, 111,,, +6, 7, 2, 6, 1, 1, 1, 1, 128, 240, 240, 240, 132,,, +6, 7, 2, 6, 1, 1, 1, 1, 256, 240, 240, 240, 132,,, +6, 7, 2, 6, 1, 1, 1, 1, 384, 240, 240, 240, 132,,, +6, 7, 2, 7, 1, 1, 1, 1, 128, 240, 240, 240, 153,,, +6, 7, 2, 7, 1, 1, 1, 1, 256, 240, 240, 240, 153,,, +6, 7, 2, 7, 1, 1, 1, 1, 384, 240, 240, 240, 153,,, +6, 7, 3, 1, 1, 1, 1, 1, 128, 70, 70, 48, 27,,, +6, 7, 3, 1, 1, 1, 1, 1, 256, 70, 70, 48, 27,,, +6, 7, 3, 1, 1, 1, 1, 1, 384, 70, 70, 48, 27,,, +6, 7, 3, 2, 1, 1, 1, 1, 128, 134, 134, 90, 48,,, +6, 7, 3, 2, 1, 1, 1, 1, 256, 134, 134, 90, 48,,, +6, 7, 3, 2, 1, 1, 1, 1, 384, 134, 134, 90, 48,,, +6, 7, 3, 3, 1, 1, 1, 1, 128, 198, 198, 132, 69,,, +6, 7, 3, 3, 1, 1, 1, 1, 256, 198, 198, 132, 69,,, +6, 7, 3, 3, 1, 1, 1, 1, 384, 198, 198, 132, 69,,, +6, 7, 3, 4, 1, 1, 1, 1, 128, 240, 240, 174, 90,,, +6, 7, 3, 4, 1, 1, 1, 1, 256, 240, 240, 174, 90,,, +6, 7, 3, 4, 1, 1, 1, 1, 384, 240, 240, 174, 90,,, +6, 7, 3, 5, 1, 1, 1, 1, 128, 240, 240, 216, 111,,, +6, 7, 3, 5, 1, 1, 1, 1, 256, 240, 240, 216, 111,,, +6, 7, 3, 5, 1, 1, 1, 1, 384, 240, 240, 216, 111,,, +6, 7, 3, 6, 1, 1, 1, 1, 128, 240, 240, 240, 132,,, +6, 7, 3, 6, 1, 1, 1, 1, 256, 240, 240, 240, 132,,, +6, 7, 3, 6, 1, 1, 1, 1, 384, 240, 240, 240, 132,,, +6, 7, 3, 7, 1, 1, 1, 1, 128, 240, 240, 240, 153,,, +6, 7, 3, 7, 1, 1, 1, 1, 256, 240, 240, 240, 153,,, +6, 7, 3, 7, 1, 1, 1, 1, 384, 240, 240, 240, 153,,, +6, 7, 4, 1, 1, 1, 1, 1, 128, 70, 70, 48, 27,,, +6, 7, 4, 1, 1, 1, 1, 1, 256, 70, 70, 48, 27,,, +6, 7, 4, 1, 1, 1, 1, 1, 384, 70, 70, 48, 27,,, +6, 7, 4, 2, 1, 1, 1, 1, 128, 134, 134, 90, 48,,, +6, 7, 4, 2, 1, 1, 1, 1, 256, 134, 134, 90, 48,,, +6, 7, 4, 2, 1, 1, 1, 1, 384, 134, 134, 90, 48,,, +6, 7, 4, 3, 1, 1, 1, 1, 128, 198, 198, 132, 69,,, +6, 7, 4, 3, 1, 1, 1, 1, 256, 198, 198, 132, 69,,, +6, 7, 4, 3, 1, 1, 1, 1, 384, 198, 198, 132, 69,,, +6, 7, 4, 4, 1, 1, 1, 1, 128, 240, 240, 174, 90,,, +6, 7, 4, 4, 1, 1, 1, 1, 256, 240, 240, 174, 90,,, +6, 7, 4, 4, 1, 1, 1, 1, 384, 240, 240, 174, 90,,, +6, 7, 4, 5, 1, 1, 1, 1, 128, 240, 240, 216, 111,,, +6, 7, 4, 5, 1, 1, 1, 1, 256, 240, 240, 216, 111,,, +6, 7, 4, 5, 1, 1, 1, 1, 384, 240, 240, 216, 111,,, +6, 7, 4, 6, 1, 1, 1, 1, 128, 240, 240, 240, 132,,, +6, 7, 4, 6, 1, 1, 1, 1, 256, 240, 240, 240, 132,,, +6, 7, 4, 6, 1, 1, 1, 1, 384, 240, 240, 240, 132,,, +6, 7, 4, 7, 1, 1, 1, 1, 128, 240, 240, 240, 153,,, +6, 7, 4, 7, 1, 1, 1, 1, 256, 240, 240, 240, 153,,, +6, 7, 4, 7, 1, 1, 1, 1, 384, 240, 240, 240, 153,,, +6, 7, 5, 1, 1, 1, 1, 1, 128, 70, 70, 48, 27,,, +6, 7, 5, 1, 1, 1, 1, 1, 256, 70, 70, 48, 27,,, +6, 7, 5, 1, 1, 1, 1, 1, 384, 70, 70, 48, 27,,, +6, 7, 5, 2, 1, 1, 1, 1, 128, 134, 134, 90, 48,,, +6, 7, 5, 2, 1, 1, 1, 1, 256, 134, 134, 90, 48,,, +6, 7, 5, 2, 1, 1, 1, 1, 384, 134, 134, 90, 48,,, +6, 7, 5, 3, 1, 1, 1, 1, 128, 198, 198, 132, 69,,, +6, 7, 5, 3, 1, 1, 1, 1, 256, 198, 198, 132, 69,,, +6, 7, 5, 3, 1, 1, 1, 1, 384, 198, 198, 132, 69,,, +6, 7, 5, 4, 1, 1, 1, 1, 128, 240, 240, 174, 90,,, +6, 7, 5, 4, 1, 1, 1, 1, 256, 240, 240, 174, 90,,, +6, 7, 5, 4, 1, 1, 1, 1, 384, 240, 240, 174, 90,,, +6, 7, 5, 5, 1, 1, 1, 1, 128, 240, 240, 216, 111,,, +6, 7, 5, 5, 1, 1, 1, 1, 256, 240, 240, 216, 111,,, +6, 7, 5, 5, 1, 1, 1, 1, 384, 240, 240, 216, 111,,, +6, 7, 5, 6, 1, 1, 1, 1, 128, 240, 240, 240, 132,,, +6, 7, 5, 6, 1, 1, 1, 1, 256, 240, 240, 240, 132,,, +6, 7, 5, 6, 1, 1, 1, 1, 384, 240, 240, 240, 132,,, +6, 7, 5, 7, 1, 1, 1, 1, 128, 240, 240, 240, 153,,, +6, 7, 5, 7, 1, 1, 1, 1, 256, 240, 240, 240, 153,,, +6, 7, 5, 7, 1, 1, 1, 1, 384, 240, 240, 240, 153,,, +6, 7, 6, 1, 1, 1, 1, 1, 128, 70, 70, 48, 27,,, +6, 7, 6, 1, 1, 1, 1, 1, 256, 70, 70, 48, 27,,, +6, 7, 6, 1, 1, 1, 1, 1, 384, 70, 70, 48, 27,,, +6, 7, 6, 2, 1, 1, 1, 1, 128, 134, 134, 90, 48,,, +6, 7, 6, 2, 1, 1, 1, 1, 256, 134, 134, 90, 48,,, +6, 7, 6, 2, 1, 1, 1, 1, 384, 134, 134, 90, 48,,, +6, 7, 6, 3, 1, 1, 1, 1, 128, 198, 198, 132, 69,,, +6, 7, 6, 3, 1, 1, 1, 1, 256, 198, 198, 132, 69,,, +6, 7, 6, 3, 1, 1, 1, 1, 384, 198, 198, 132, 69,,, +6, 7, 6, 4, 1, 1, 1, 1, 128, 240, 240, 174, 90,,, +6, 7, 6, 4, 1, 1, 1, 1, 256, 240, 240, 174, 90,,, +6, 7, 6, 4, 1, 1, 1, 1, 384, 240, 240, 174, 90,,, +6, 7, 6, 5, 1, 1, 1, 1, 128, 240, 240, 216, 111,,, +6, 7, 6, 5, 1, 1, 1, 1, 256, 240, 240, 216, 111,,, +6, 7, 6, 5, 1, 1, 1, 1, 384, 240, 240, 216, 111,,, +6, 7, 6, 6, 1, 1, 1, 1, 128, 240, 240, 240, 132,,, +6, 7, 6, 6, 1, 1, 1, 1, 256, 240, 240, 240, 132,,, +6, 7, 6, 6, 1, 1, 1, 1, 384, 240, 240, 240, 132,,, +6, 7, 6, 7, 1, 1, 1, 1, 128, 240, 240, 240, 153,,, +6, 7, 6, 7, 1, 1, 1, 1, 256, 240, 240, 240, 153,,, +6, 7, 6, 7, 1, 1, 1, 1, 384, 240, 240, 240, 153,,, +7, 1, 1, 1, 1, 1, 1, 1, 128, 54, 54, 54, 54, 54, 36, 18 +7, 1, 1, 1, 1, 1, 1, 1, 256, 54, 54, 54, 54, 54, 36, 18 +7, 1, 1, 1, 1, 1, 1, 1, 384, 54, 54, 54, 54, 54, 36, 18 +7, 1, 2, 1, 1, 1, 1, 1, 128, 54, 54, 54, 54, 54, 36, 18 +7, 1, 2, 1, 1, 1, 1, 1, 256, 54, 54, 54, 54, 54, 36, 18 +7, 1, 2, 1, 1, 1, 1, 1, 384, 54, 54, 54, 54, 54, 36, 18 +7, 1, 3, 1, 1, 1, 1, 1, 128, 54, 54, 54, 54, 54, 36, 18 +7, 1, 3, 1, 1, 1, 1, 1, 256, 54, 54, 54, 54, 54, 36, 18 +7, 1, 3, 1, 1, 1, 1, 1, 384, 54, 54, 54, 54, 54, 36, 18 +7, 1, 4, 1, 1, 1, 1, 1, 128, 54, 54, 54, 54, 54, 36, 18 +7, 1, 4, 1, 1, 1, 1, 1, 256, 54, 54, 54, 54, 54, 36, 18 +7, 1, 4, 1, 1, 1, 1, 1, 384, 54, 54, 54, 54, 54, 36, 18 +7, 1, 5, 1, 1, 1, 1, 1, 128, 54, 54, 54, 54, 54, 36, 18 +7, 1, 5, 1, 1, 1, 1, 1, 256, 54, 54, 54, 54, 54, 36, 18 +7, 1, 5, 1, 1, 1, 1, 1, 384, 54, 54, 54, 54, 54, 36, 18 +7, 1, 6, 1, 1, 1, 1, 1, 128, 54, 54, 54, 54, 54, 36, 18 +7, 1, 6, 1, 1, 1, 1, 1, 256, 54, 54, 54, 54, 54, 36, 18 +7, 1, 6, 1, 1, 1, 1, 1, 384, 54, 54, 54, 54, 54, 36, 18 +7, 1, 7, 1, 1, 1, 1, 1, 128, 54, 54, 54, 54, 54, 36, 18 +7, 1, 7, 1, 1, 1, 1, 1, 256, 54, 54, 54, 54, 54, 36, 18 +7, 1, 7, 1, 1, 1, 1, 1, 384, 54, 54, 54, 54, 54, 36, 18 +7, 2, 1, 1, 1, 1, 1, 1, 128, 55, 55, 55, 55, 37,, +7, 2, 1, 1, 1, 1, 1, 1, 256, 55, 55, 55, 55, 37,, +7, 2, 1, 1, 1, 1, 1, 1, 384, 55, 55, 55, 55, 37,, +7, 2, 1, 2, 1, 1, 1, 1, 128, 109, 109, 109, 109, 73,, +7, 2, 1, 2, 1, 1, 1, 1, 256, 109, 109, 109, 109, 73,, +7, 2, 1, 2, 1, 1, 1, 1, 384, 109, 109, 109, 109, 73,, +7, 2, 2, 1, 1, 1, 1, 1, 128, 55, 55, 55, 55, 37,, +7, 2, 2, 1, 1, 1, 1, 1, 256, 55, 55, 55, 55, 37,, +7, 2, 2, 1, 1, 1, 1, 1, 384, 55, 55, 55, 55, 37,, +7, 2, 2, 2, 1, 1, 1, 1, 128, 109, 109, 109, 109, 73,, +7, 2, 2, 2, 1, 1, 1, 1, 256, 109, 109, 109, 109, 73,, +7, 2, 2, 2, 1, 1, 1, 1, 384, 109, 109, 109, 109, 73,, +7, 2, 3, 1, 1, 1, 1, 1, 128, 55, 55, 55, 55, 37,, +7, 2, 3, 1, 1, 1, 1, 1, 256, 55, 55, 55, 55, 37,, +7, 2, 3, 1, 1, 1, 1, 1, 384, 55, 55, 55, 55, 37,, +7, 2, 3, 2, 1, 1, 1, 1, 128, 109, 109, 109, 109, 73,, +7, 2, 3, 2, 1, 1, 1, 1, 256, 109, 109, 109, 109, 73,, +7, 2, 3, 2, 1, 1, 1, 1, 384, 109, 109, 109, 109, 73,, +7, 2, 4, 1, 1, 1, 1, 1, 128, 55, 55, 55, 55, 37,, +7, 2, 4, 1, 1, 1, 1, 1, 256, 55, 55, 55, 55, 37,, +7, 2, 4, 1, 1, 1, 1, 1, 384, 55, 55, 55, 55, 37,, +7, 2, 4, 2, 1, 1, 1, 1, 128, 109, 109, 109, 109, 73,, +7, 2, 4, 2, 1, 1, 1, 1, 256, 109, 109, 109, 109, 73,, +7, 2, 4, 2, 1, 1, 1, 1, 384, 109, 109, 109, 109, 73,, +7, 2, 5, 1, 1, 1, 1, 1, 128, 55, 55, 55, 55, 37,, +7, 2, 5, 1, 1, 1, 1, 1, 256, 55, 55, 55, 55, 37,, +7, 2, 5, 1, 1, 1, 1, 1, 384, 55, 55, 55, 55, 37,, +7, 2, 5, 2, 1, 1, 1, 1, 128, 109, 109, 109, 109, 73,, +7, 2, 5, 2, 1, 1, 1, 1, 256, 109, 109, 109, 109, 73,, +7, 2, 5, 2, 1, 1, 1, 1, 384, 109, 109, 109, 109, 73,, +7, 2, 6, 1, 1, 1, 1, 1, 128, 55, 55, 55, 55, 37,, +7, 2, 6, 1, 1, 1, 1, 1, 256, 55, 55, 55, 55, 37,, +7, 2, 6, 1, 1, 1, 1, 1, 384, 55, 55, 55, 55, 37,, +7, 2, 6, 2, 1, 1, 1, 1, 128, 109, 109, 109, 109, 73,, +7, 2, 6, 2, 1, 1, 1, 1, 256, 109, 109, 109, 109, 73,, +7, 2, 6, 2, 1, 1, 1, 1, 384, 109, 109, 109, 109, 73,, +7, 2, 7, 1, 1, 1, 1, 1, 128, 55, 55, 55, 55, 37,, +7, 2, 7, 1, 1, 1, 1, 1, 256, 55, 55, 55, 55, 37,, +7, 2, 7, 1, 1, 1, 1, 1, 384, 55, 55, 55, 55, 37,, +7, 2, 7, 2, 1, 1, 1, 1, 128, 109, 109, 109, 109, 73,, +7, 2, 7, 2, 1, 1, 1, 1, 256, 109, 109, 109, 109, 73,, +7, 2, 7, 2, 1, 1, 1, 1, 384, 109, 109, 109, 109, 73,, +7, 3, 1, 1, 1, 1, 1, 1, 128, 56, 56, 56, 38, 20,, +7, 3, 1, 1, 1, 1, 1, 1, 256, 56, 56, 56, 38, 20,, +7, 3, 1, 1, 1, 1, 1, 1, 384, 56, 56, 56, 38, 20,, +7, 3, 1, 2, 1, 1, 1, 1, 128, 110, 110, 110, 74, 38,, +7, 3, 1, 2, 1, 1, 1, 1, 256, 110, 110, 110, 74, 38,, +7, 3, 1, 2, 1, 1, 1, 1, 384, 110, 110, 110, 74, 38,, +7, 3, 1, 3, 1, 1, 1, 1, 128, 164, 164, 164, 110, 56,, +7, 3, 1, 3, 1, 1, 1, 1, 256, 164, 164, 164, 110, 56,, +7, 3, 1, 3, 1, 1, 1, 1, 384, 164, 164, 164, 110, 56,, +7, 3, 2, 1, 1, 1, 1, 1, 128, 56, 56, 56, 38, 20,, +7, 3, 2, 1, 1, 1, 1, 1, 256, 56, 56, 56, 38, 20,, +7, 3, 2, 1, 1, 1, 1, 1, 384, 56, 56, 56, 38, 20,, +7, 3, 2, 2, 1, 1, 1, 1, 128, 110, 110, 110, 74, 38,, +7, 3, 2, 2, 1, 1, 1, 1, 256, 110, 110, 110, 74, 38,, +7, 3, 2, 2, 1, 1, 1, 1, 384, 110, 110, 110, 74, 38,, +7, 3, 2, 3, 1, 1, 1, 1, 128, 164, 164, 164, 110, 56,, +7, 3, 2, 3, 1, 1, 1, 1, 256, 164, 164, 164, 110, 56,, +7, 3, 2, 3, 1, 1, 1, 1, 384, 164, 164, 164, 110, 56,, +7, 3, 3, 1, 1, 1, 1, 1, 128, 56, 56, 56, 38, 20,, +7, 3, 3, 1, 1, 1, 1, 1, 256, 56, 56, 56, 38, 20,, +7, 3, 3, 1, 1, 1, 1, 1, 384, 56, 56, 56, 38, 20,, +7, 3, 3, 2, 1, 1, 1, 1, 128, 110, 110, 110, 74, 38,, +7, 3, 3, 2, 1, 1, 1, 1, 256, 110, 110, 110, 74, 38,, +7, 3, 3, 2, 1, 1, 1, 1, 384, 110, 110, 110, 74, 38,, +7, 3, 3, 3, 1, 1, 1, 1, 128, 164, 164, 164, 110, 56,, +7, 3, 3, 3, 1, 1, 1, 1, 256, 164, 164, 164, 110, 56,, +7, 3, 3, 3, 1, 1, 1, 1, 384, 164, 164, 164, 110, 56,, +7, 3, 4, 1, 1, 1, 1, 1, 128, 56, 56, 56, 38, 20,, +7, 3, 4, 1, 1, 1, 1, 1, 256, 56, 56, 56, 38, 20,, +7, 3, 4, 1, 1, 1, 1, 1, 384, 56, 56, 56, 38, 20,, +7, 3, 4, 2, 1, 1, 1, 1, 128, 110, 110, 110, 74, 38,, +7, 3, 4, 2, 1, 1, 1, 1, 256, 110, 110, 110, 74, 38,, +7, 3, 4, 2, 1, 1, 1, 1, 384, 110, 110, 110, 74, 38,, +7, 3, 4, 3, 1, 1, 1, 1, 128, 164, 164, 164, 110, 56,, +7, 3, 4, 3, 1, 1, 1, 1, 256, 164, 164, 164, 110, 56,, +7, 3, 4, 3, 1, 1, 1, 1, 384, 164, 164, 164, 110, 56,, +7, 3, 5, 1, 1, 1, 1, 1, 128, 56, 56, 56, 38, 20,, +7, 3, 5, 1, 1, 1, 1, 1, 256, 56, 56, 56, 38, 20,, +7, 3, 5, 1, 1, 1, 1, 1, 384, 56, 56, 56, 38, 20,, +7, 3, 5, 2, 1, 1, 1, 1, 128, 110, 110, 110, 74, 38,, +7, 3, 5, 2, 1, 1, 1, 1, 256, 110, 110, 110, 74, 38,, +7, 3, 5, 2, 1, 1, 1, 1, 384, 110, 110, 110, 74, 38,, +7, 3, 5, 3, 1, 1, 1, 1, 128, 164, 164, 164, 110, 56,, +7, 3, 5, 3, 1, 1, 1, 1, 256, 164, 164, 164, 110, 56,, +7, 3, 5, 3, 1, 1, 1, 1, 384, 164, 164, 164, 110, 56,, +7, 3, 6, 1, 1, 1, 1, 1, 128, 56, 56, 56, 38, 20,, +7, 3, 6, 1, 1, 1, 1, 1, 256, 56, 56, 56, 38, 20,, +7, 3, 6, 1, 1, 1, 1, 1, 384, 56, 56, 56, 38, 20,, +7, 3, 6, 2, 1, 1, 1, 1, 128, 110, 110, 110, 74, 38,, +7, 3, 6, 2, 1, 1, 1, 1, 256, 110, 110, 110, 74, 38,, +7, 3, 6, 2, 1, 1, 1, 1, 384, 110, 110, 110, 74, 38,, +7, 3, 6, 3, 1, 1, 1, 1, 128, 164, 164, 164, 110, 56,, +7, 3, 6, 3, 1, 1, 1, 1, 256, 164, 164, 164, 110, 56,, +7, 3, 6, 3, 1, 1, 1, 1, 384, 164, 164, 164, 110, 56,, +7, 3, 7, 1, 1, 1, 1, 1, 128, 56, 56, 56, 38, 20,, +7, 3, 7, 1, 1, 1, 1, 1, 256, 56, 56, 56, 38, 20,, +7, 3, 7, 1, 1, 1, 1, 1, 384, 56, 56, 56, 38, 20,, +7, 3, 7, 2, 1, 1, 1, 1, 128, 110, 110, 110, 74, 38,, +7, 3, 7, 2, 1, 1, 1, 1, 256, 110, 110, 110, 74, 38,, +7, 3, 7, 2, 1, 1, 1, 1, 384, 110, 110, 110, 74, 38,, +7, 3, 7, 3, 1, 1, 1, 1, 128, 164, 164, 164, 110, 56,, +7, 3, 7, 3, 1, 1, 1, 1, 256, 164, 164, 164, 110, 56,, +7, 3, 7, 3, 1, 1, 1, 1, 384, 164, 164, 164, 110, 56,, +7, 4, 1, 1, 1, 1, 1, 1, 128, 57, 57, 57, 39,,, +7, 4, 1, 1, 1, 1, 1, 1, 256, 57, 57, 57, 39,,, +7, 4, 1, 1, 1, 1, 1, 1, 384, 57, 57, 57, 39,,, +7, 4, 1, 2, 1, 1, 1, 1, 128, 111, 111, 111, 75,,, +7, 4, 1, 2, 1, 1, 1, 1, 256, 111, 111, 111, 75,,, +7, 4, 1, 2, 1, 1, 1, 1, 384, 111, 111, 111, 75,,, +7, 4, 1, 3, 1, 1, 1, 1, 128, 165, 165, 165, 111,,, +7, 4, 1, 3, 1, 1, 1, 1, 256, 165, 165, 165, 111,,, +7, 4, 1, 3, 1, 1, 1, 1, 384, 165, 165, 165, 111,,, +7, 4, 1, 4, 1, 1, 1, 1, 128, 219, 219, 219, 147,,, +7, 4, 1, 4, 1, 1, 1, 1, 256, 219, 219, 219, 147,,, +7, 4, 1, 4, 1, 1, 1, 1, 384, 219, 219, 219, 147,,, +7, 4, 2, 1, 1, 1, 1, 1, 128, 57, 57, 57, 39,,, +7, 4, 2, 1, 1, 1, 1, 1, 256, 57, 57, 57, 39,,, +7, 4, 2, 1, 1, 1, 1, 1, 384, 57, 57, 57, 39,,, +7, 4, 2, 2, 1, 1, 1, 1, 128, 111, 111, 111, 75,,, +7, 4, 2, 2, 1, 1, 1, 1, 256, 111, 111, 111, 75,,, +7, 4, 2, 2, 1, 1, 1, 1, 384, 111, 111, 111, 75,,, +7, 4, 2, 3, 1, 1, 1, 1, 128, 165, 165, 165, 111,,, +7, 4, 2, 3, 1, 1, 1, 1, 256, 165, 165, 165, 111,,, +7, 4, 2, 3, 1, 1, 1, 1, 384, 165, 165, 165, 111,,, +7, 4, 2, 4, 1, 1, 1, 1, 128, 219, 219, 219, 147,,, +7, 4, 2, 4, 1, 1, 1, 1, 256, 219, 219, 219, 147,,, +7, 4, 2, 4, 1, 1, 1, 1, 384, 219, 219, 219, 147,,, +7, 4, 3, 1, 1, 1, 1, 1, 128, 57, 57, 57, 39,,, +7, 4, 3, 1, 1, 1, 1, 1, 256, 57, 57, 57, 39,,, +7, 4, 3, 1, 1, 1, 1, 1, 384, 57, 57, 57, 39,,, +7, 4, 3, 2, 1, 1, 1, 1, 128, 111, 111, 111, 75,,, +7, 4, 3, 2, 1, 1, 1, 1, 256, 111, 111, 111, 75,,, +7, 4, 3, 2, 1, 1, 1, 1, 384, 111, 111, 111, 75,,, +7, 4, 3, 3, 1, 1, 1, 1, 128, 165, 165, 165, 111,,, +7, 4, 3, 3, 1, 1, 1, 1, 256, 165, 165, 165, 111,,, +7, 4, 3, 3, 1, 1, 1, 1, 384, 165, 165, 165, 111,,, +7, 4, 3, 4, 1, 1, 1, 1, 128, 219, 219, 219, 147,,, +7, 4, 3, 4, 1, 1, 1, 1, 256, 219, 219, 219, 147,,, +7, 4, 3, 4, 1, 1, 1, 1, 384, 219, 219, 219, 147,,, +7, 4, 4, 1, 1, 1, 1, 1, 128, 57, 57, 57, 39,,, +7, 4, 4, 1, 1, 1, 1, 1, 256, 57, 57, 57, 39,,, +7, 4, 4, 1, 1, 1, 1, 1, 384, 57, 57, 57, 39,,, +7, 4, 4, 2, 1, 1, 1, 1, 128, 111, 111, 111, 75,,, +7, 4, 4, 2, 1, 1, 1, 1, 256, 111, 111, 111, 75,,, +7, 4, 4, 2, 1, 1, 1, 1, 384, 111, 111, 111, 75,,, +7, 4, 4, 3, 1, 1, 1, 1, 128, 165, 165, 165, 111,,, +7, 4, 4, 3, 1, 1, 1, 1, 256, 165, 165, 165, 111,,, +7, 4, 4, 3, 1, 1, 1, 1, 384, 165, 165, 165, 111,,, +7, 4, 4, 4, 1, 1, 1, 1, 128, 219, 219, 219, 147,,, +7, 4, 4, 4, 1, 1, 1, 1, 256, 219, 219, 219, 147,,, +7, 4, 4, 4, 1, 1, 1, 1, 384, 219, 219, 219, 147,,, +7, 4, 5, 1, 1, 1, 1, 1, 128, 57, 57, 57, 39,,, +7, 4, 5, 1, 1, 1, 1, 1, 256, 57, 57, 57, 39,,, +7, 4, 5, 1, 1, 1, 1, 1, 384, 57, 57, 57, 39,,, +7, 4, 5, 2, 1, 1, 1, 1, 128, 111, 111, 111, 75,,, +7, 4, 5, 2, 1, 1, 1, 1, 256, 111, 111, 111, 75,,, +7, 4, 5, 2, 1, 1, 1, 1, 384, 111, 111, 111, 75,,, +7, 4, 5, 3, 1, 1, 1, 1, 128, 165, 165, 165, 111,,, +7, 4, 5, 3, 1, 1, 1, 1, 256, 165, 165, 165, 111,,, +7, 4, 5, 3, 1, 1, 1, 1, 384, 165, 165, 165, 111,,, +7, 4, 5, 4, 1, 1, 1, 1, 128, 219, 219, 219, 147,,, +7, 4, 5, 4, 1, 1, 1, 1, 256, 219, 219, 219, 147,,, +7, 4, 5, 4, 1, 1, 1, 1, 384, 219, 219, 219, 147,,, +7, 4, 6, 1, 1, 1, 1, 1, 128, 57, 57, 57, 39,,, +7, 4, 6, 1, 1, 1, 1, 1, 256, 57, 57, 57, 39,,, +7, 4, 6, 1, 1, 1, 1, 1, 384, 57, 57, 57, 39,,, +7, 4, 6, 2, 1, 1, 1, 1, 128, 111, 111, 111, 75,,, +7, 4, 6, 2, 1, 1, 1, 1, 256, 111, 111, 111, 75,,, +7, 4, 6, 2, 1, 1, 1, 1, 384, 111, 111, 111, 75,,, +7, 4, 6, 3, 1, 1, 1, 1, 128, 165, 165, 165, 111,,, +7, 4, 6, 3, 1, 1, 1, 1, 256, 165, 165, 165, 111,,, +7, 4, 6, 3, 1, 1, 1, 1, 384, 165, 165, 165, 111,,, +7, 4, 6, 4, 1, 1, 1, 1, 128, 219, 219, 219, 147,,, +7, 4, 6, 4, 1, 1, 1, 1, 256, 219, 219, 219, 147,,, +7, 4, 6, 4, 1, 1, 1, 1, 384, 219, 219, 219, 147,,, +7, 4, 7, 1, 1, 1, 1, 1, 128, 57, 57, 57, 39,,, +7, 4, 7, 1, 1, 1, 1, 1, 256, 57, 57, 57, 39,,, +7, 4, 7, 1, 1, 1, 1, 1, 384, 57, 57, 57, 39,,, +7, 4, 7, 2, 1, 1, 1, 1, 128, 111, 111, 111, 75,,, +7, 4, 7, 2, 1, 1, 1, 1, 256, 111, 111, 111, 75,,, +7, 4, 7, 2, 1, 1, 1, 1, 384, 111, 111, 111, 75,,, +7, 4, 7, 3, 1, 1, 1, 1, 128, 165, 165, 165, 111,,, +7, 4, 7, 3, 1, 1, 1, 1, 256, 165, 165, 165, 111,,, +7, 4, 7, 3, 1, 1, 1, 1, 384, 165, 165, 165, 111,,, +7, 4, 7, 4, 1, 1, 1, 1, 128, 219, 219, 219, 147,,, +7, 4, 7, 4, 1, 1, 1, 1, 256, 219, 219, 219, 147,,, +7, 4, 7, 4, 1, 1, 1, 1, 384, 219, 219, 219, 147,,, +7, 5, 1, 1, 1, 1, 1, 1, 128, 58, 58, 40, 22,,, +7, 5, 1, 1, 1, 1, 1, 1, 256, 58, 58, 40, 22,,, +7, 5, 1, 1, 1, 1, 1, 1, 384, 58, 58, 40, 22,,, +7, 5, 1, 2, 1, 1, 1, 1, 128, 112, 112, 76, 40,,, +7, 5, 1, 2, 1, 1, 1, 1, 256, 112, 112, 76, 40,,, +7, 5, 1, 2, 1, 1, 1, 1, 384, 112, 112, 76, 40,,, +7, 5, 1, 3, 1, 1, 1, 1, 128, 166, 166, 112, 58,,, +7, 5, 1, 3, 1, 1, 1, 1, 256, 166, 166, 112, 58,,, +7, 5, 1, 3, 1, 1, 1, 1, 384, 166, 166, 112, 58,,, +7, 5, 1, 4, 1, 1, 1, 1, 128, 220, 220, 148, 76,,, +7, 5, 1, 4, 1, 1, 1, 1, 256, 220, 220, 148, 76,,, +7, 5, 1, 4, 1, 1, 1, 1, 384, 220, 220, 148, 76,,, +7, 5, 1, 5, 1, 1, 1, 1, 128, 240, 240, 184, 94,,, +7, 5, 1, 5, 1, 1, 1, 1, 256, 240, 240, 184, 94,,, +7, 5, 1, 5, 1, 1, 1, 1, 384, 240, 240, 184, 94,,, +7, 5, 2, 1, 1, 1, 1, 1, 128, 58, 58, 40, 22,,, +7, 5, 2, 1, 1, 1, 1, 1, 256, 58, 58, 40, 22,,, +7, 5, 2, 1, 1, 1, 1, 1, 384, 58, 58, 40, 22,,, +7, 5, 2, 2, 1, 1, 1, 1, 128, 112, 112, 76, 40,,, +7, 5, 2, 2, 1, 1, 1, 1, 256, 112, 112, 76, 40,,, +7, 5, 2, 2, 1, 1, 1, 1, 384, 112, 112, 76, 40,,, +7, 5, 2, 3, 1, 1, 1, 1, 128, 166, 166, 112, 58,,, +7, 5, 2, 3, 1, 1, 1, 1, 256, 166, 166, 112, 58,,, +7, 5, 2, 3, 1, 1, 1, 1, 384, 166, 166, 112, 58,,, +7, 5, 2, 4, 1, 1, 1, 1, 128, 220, 220, 148, 76,,, +7, 5, 2, 4, 1, 1, 1, 1, 256, 220, 220, 148, 76,,, +7, 5, 2, 4, 1, 1, 1, 1, 384, 220, 220, 148, 76,,, +7, 5, 2, 5, 1, 1, 1, 1, 128, 240, 240, 184, 94,,, +7, 5, 2, 5, 1, 1, 1, 1, 256, 240, 240, 184, 94,,, +7, 5, 2, 5, 1, 1, 1, 1, 384, 240, 240, 184, 94,,, +7, 5, 3, 1, 1, 1, 1, 1, 128, 58, 58, 40, 22,,, +7, 5, 3, 1, 1, 1, 1, 1, 256, 58, 58, 40, 22,,, +7, 5, 3, 1, 1, 1, 1, 1, 384, 58, 58, 40, 22,,, +7, 5, 3, 2, 1, 1, 1, 1, 128, 112, 112, 76, 40,,, +7, 5, 3, 2, 1, 1, 1, 1, 256, 112, 112, 76, 40,,, +7, 5, 3, 2, 1, 1, 1, 1, 384, 112, 112, 76, 40,,, +7, 5, 3, 3, 1, 1, 1, 1, 128, 166, 166, 112, 58,,, +7, 5, 3, 3, 1, 1, 1, 1, 256, 166, 166, 112, 58,,, +7, 5, 3, 3, 1, 1, 1, 1, 384, 166, 166, 112, 58,,, +7, 5, 3, 4, 1, 1, 1, 1, 128, 220, 220, 148, 76,,, +7, 5, 3, 4, 1, 1, 1, 1, 256, 220, 220, 148, 76,,, +7, 5, 3, 4, 1, 1, 1, 1, 384, 220, 220, 148, 76,,, +7, 5, 3, 5, 1, 1, 1, 1, 128, 240, 240, 184, 94,,, +7, 5, 3, 5, 1, 1, 1, 1, 256, 240, 240, 184, 94,,, +7, 5, 3, 5, 1, 1, 1, 1, 384, 240, 240, 184, 94,,, +7, 5, 4, 1, 1, 1, 1, 1, 128, 58, 58, 40, 22,,, +7, 5, 4, 1, 1, 1, 1, 1, 256, 58, 58, 40, 22,,, +7, 5, 4, 1, 1, 1, 1, 1, 384, 58, 58, 40, 22,,, +7, 5, 4, 2, 1, 1, 1, 1, 128, 112, 112, 76, 40,,, +7, 5, 4, 2, 1, 1, 1, 1, 256, 112, 112, 76, 40,,, +7, 5, 4, 2, 1, 1, 1, 1, 384, 112, 112, 76, 40,,, +7, 5, 4, 3, 1, 1, 1, 1, 128, 166, 166, 112, 58,,, +7, 5, 4, 3, 1, 1, 1, 1, 256, 166, 166, 112, 58,,, +7, 5, 4, 3, 1, 1, 1, 1, 384, 166, 166, 112, 58,,, +7, 5, 4, 4, 1, 1, 1, 1, 128, 220, 220, 148, 76,,, +7, 5, 4, 4, 1, 1, 1, 1, 256, 220, 220, 148, 76,,, +7, 5, 4, 4, 1, 1, 1, 1, 384, 220, 220, 148, 76,,, +7, 5, 4, 5, 1, 1, 1, 1, 128, 240, 240, 184, 94,,, +7, 5, 4, 5, 1, 1, 1, 1, 256, 240, 240, 184, 94,,, +7, 5, 4, 5, 1, 1, 1, 1, 384, 240, 240, 184, 94,,, +7, 5, 5, 1, 1, 1, 1, 1, 128, 58, 58, 40, 22,,, +7, 5, 5, 1, 1, 1, 1, 1, 256, 58, 58, 40, 22,,, +7, 5, 5, 1, 1, 1, 1, 1, 384, 58, 58, 40, 22,,, +7, 5, 5, 2, 1, 1, 1, 1, 128, 112, 112, 76, 40,,, +7, 5, 5, 2, 1, 1, 1, 1, 256, 112, 112, 76, 40,,, +7, 5, 5, 2, 1, 1, 1, 1, 384, 112, 112, 76, 40,,, +7, 5, 5, 3, 1, 1, 1, 1, 128, 166, 166, 112, 58,,, +7, 5, 5, 3, 1, 1, 1, 1, 256, 166, 166, 112, 58,,, +7, 5, 5, 3, 1, 1, 1, 1, 384, 166, 166, 112, 58,,, +7, 5, 5, 4, 1, 1, 1, 1, 128, 220, 220, 148, 76,,, +7, 5, 5, 4, 1, 1, 1, 1, 256, 220, 220, 148, 76,,, +7, 5, 5, 4, 1, 1, 1, 1, 384, 220, 220, 148, 76,,, +7, 5, 5, 5, 1, 1, 1, 1, 128, 240, 240, 184, 94,,, +7, 5, 5, 5, 1, 1, 1, 1, 256, 240, 240, 184, 94,,, +7, 5, 5, 5, 1, 1, 1, 1, 384, 240, 240, 184, 94,,, +7, 5, 6, 1, 1, 1, 1, 1, 128, 58, 58, 40, 22,,, +7, 5, 6, 1, 1, 1, 1, 1, 256, 58, 58, 40, 22,,, +7, 5, 6, 1, 1, 1, 1, 1, 384, 58, 58, 40, 22,,, +7, 5, 6, 2, 1, 1, 1, 1, 128, 112, 112, 76, 40,,, +7, 5, 6, 2, 1, 1, 1, 1, 256, 112, 112, 76, 40,,, +7, 5, 6, 2, 1, 1, 1, 1, 384, 112, 112, 76, 40,,, +7, 5, 6, 3, 1, 1, 1, 1, 128, 166, 166, 112, 58,,, +7, 5, 6, 3, 1, 1, 1, 1, 256, 166, 166, 112, 58,,, +7, 5, 6, 3, 1, 1, 1, 1, 384, 166, 166, 112, 58,,, +7, 5, 6, 4, 1, 1, 1, 1, 128, 220, 220, 148, 76,,, +7, 5, 6, 4, 1, 1, 1, 1, 256, 220, 220, 148, 76,,, +7, 5, 6, 4, 1, 1, 1, 1, 384, 220, 220, 148, 76,,, +7, 5, 6, 5, 1, 1, 1, 1, 128, 240, 240, 184, 94,,, +7, 5, 6, 5, 1, 1, 1, 1, 256, 240, 240, 184, 94,,, +7, 5, 6, 5, 1, 1, 1, 1, 384, 240, 240, 184, 94,,, +7, 5, 7, 1, 1, 1, 1, 1, 128, 58, 58, 40, 22,,, +7, 5, 7, 1, 1, 1, 1, 1, 256, 58, 58, 40, 22,,, +7, 5, 7, 1, 1, 1, 1, 1, 384, 58, 58, 40, 22,,, +7, 5, 7, 2, 1, 1, 1, 1, 128, 112, 112, 76, 40,,, +7, 5, 7, 2, 1, 1, 1, 1, 256, 112, 112, 76, 40,,, +7, 5, 7, 2, 1, 1, 1, 1, 384, 112, 112, 76, 40,,, +7, 5, 7, 3, 1, 1, 1, 1, 128, 166, 166, 112, 58,,, +7, 5, 7, 3, 1, 1, 1, 1, 256, 166, 166, 112, 58,,, +7, 5, 7, 3, 1, 1, 1, 1, 384, 166, 166, 112, 58,,, +7, 5, 7, 4, 1, 1, 1, 1, 128, 220, 220, 148, 76,,, +7, 5, 7, 4, 1, 1, 1, 1, 256, 220, 220, 148, 76,,, +7, 5, 7, 4, 1, 1, 1, 1, 384, 220, 220, 148, 76,,, +7, 5, 7, 5, 1, 1, 1, 1, 128, 240, 240, 184, 94,,, +7, 5, 7, 5, 1, 1, 1, 1, 256, 240, 240, 184, 94,,, +7, 5, 7, 5, 1, 1, 1, 1, 384, 240, 240, 184, 94,,, +7, 6, 1, 1, 1, 1, 1, 1, 128, 59, 59, 41, 23,,, +7, 6, 1, 1, 1, 1, 1, 1, 256, 59, 59, 41, 23,,, +7, 6, 1, 1, 1, 1, 1, 1, 384, 59, 59, 41, 23,,, +7, 6, 1, 2, 1, 1, 1, 1, 128, 113, 113, 77, 41,,, +7, 6, 1, 2, 1, 1, 1, 1, 256, 113, 113, 77, 41,,, +7, 6, 1, 2, 1, 1, 1, 1, 384, 113, 113, 77, 41,,, +7, 6, 1, 3, 1, 1, 1, 1, 128, 167, 167, 113, 59,,, +7, 6, 1, 3, 1, 1, 1, 1, 256, 167, 167, 113, 59,,, +7, 6, 1, 3, 1, 1, 1, 1, 384, 167, 167, 113, 59,,, +7, 6, 1, 4, 1, 1, 1, 1, 128, 221, 221, 149, 77,,, +7, 6, 1, 4, 1, 1, 1, 1, 256, 221, 221, 149, 77,,, +7, 6, 1, 4, 1, 1, 1, 1, 384, 221, 221, 149, 77,,, +7, 6, 1, 5, 1, 1, 1, 1, 128, 240, 240, 185, 95,,, +7, 6, 1, 5, 1, 1, 1, 1, 256, 240, 240, 185, 95,,, +7, 6, 1, 5, 1, 1, 1, 1, 384, 240, 240, 185, 95,,, +7, 6, 1, 6, 1, 1, 1, 1, 128, 240, 240, 221, 113,,, +7, 6, 1, 6, 1, 1, 1, 1, 256, 240, 240, 221, 113,,, +7, 6, 1, 6, 1, 1, 1, 1, 384, 240, 240, 221, 113,,, +7, 6, 2, 1, 1, 1, 1, 1, 128, 59, 59, 41, 23,,, +7, 6, 2, 1, 1, 1, 1, 1, 256, 59, 59, 41, 23,,, +7, 6, 2, 1, 1, 1, 1, 1, 384, 59, 59, 41, 23,,, +7, 6, 2, 2, 1, 1, 1, 1, 128, 113, 113, 77, 41,,, +7, 6, 2, 2, 1, 1, 1, 1, 256, 113, 113, 77, 41,,, +7, 6, 2, 2, 1, 1, 1, 1, 384, 113, 113, 77, 41,,, +7, 6, 2, 3, 1, 1, 1, 1, 128, 167, 167, 113, 59,,, +7, 6, 2, 3, 1, 1, 1, 1, 256, 167, 167, 113, 59,,, +7, 6, 2, 3, 1, 1, 1, 1, 384, 167, 167, 113, 59,,, +7, 6, 2, 4, 1, 1, 1, 1, 128, 221, 221, 149, 77,,, +7, 6, 2, 4, 1, 1, 1, 1, 256, 221, 221, 149, 77,,, +7, 6, 2, 4, 1, 1, 1, 1, 384, 221, 221, 149, 77,,, +7, 6, 2, 5, 1, 1, 1, 1, 128, 240, 240, 185, 95,,, +7, 6, 2, 5, 1, 1, 1, 1, 256, 240, 240, 185, 95,,, +7, 6, 2, 5, 1, 1, 1, 1, 384, 240, 240, 185, 95,,, +7, 6, 2, 6, 1, 1, 1, 1, 128, 240, 240, 221, 113,,, +7, 6, 2, 6, 1, 1, 1, 1, 256, 240, 240, 221, 113,,, +7, 6, 2, 6, 1, 1, 1, 1, 384, 240, 240, 221, 113,,, +7, 6, 3, 1, 1, 1, 1, 1, 128, 59, 59, 41, 23,,, +7, 6, 3, 1, 1, 1, 1, 1, 256, 59, 59, 41, 23,,, +7, 6, 3, 1, 1, 1, 1, 1, 384, 59, 59, 41, 23,,, +7, 6, 3, 2, 1, 1, 1, 1, 128, 113, 113, 77, 41,,, +7, 6, 3, 2, 1, 1, 1, 1, 256, 113, 113, 77, 41,,, +7, 6, 3, 2, 1, 1, 1, 1, 384, 113, 113, 77, 41,,, +7, 6, 3, 3, 1, 1, 1, 1, 128, 167, 167, 113, 59,,, +7, 6, 3, 3, 1, 1, 1, 1, 256, 167, 167, 113, 59,,, +7, 6, 3, 3, 1, 1, 1, 1, 384, 167, 167, 113, 59,,, +7, 6, 3, 4, 1, 1, 1, 1, 128, 221, 221, 149, 77,,, +7, 6, 3, 4, 1, 1, 1, 1, 256, 221, 221, 149, 77,,, +7, 6, 3, 4, 1, 1, 1, 1, 384, 221, 221, 149, 77,,, +7, 6, 3, 5, 1, 1, 1, 1, 128, 240, 240, 185, 95,,, +7, 6, 3, 5, 1, 1, 1, 1, 256, 240, 240, 185, 95,,, +7, 6, 3, 5, 1, 1, 1, 1, 384, 240, 240, 185, 95,,, +7, 6, 3, 6, 1, 1, 1, 1, 128, 240, 240, 221, 113,,, +7, 6, 3, 6, 1, 1, 1, 1, 256, 240, 240, 221, 113,,, +7, 6, 3, 6, 1, 1, 1, 1, 384, 240, 240, 221, 113,,, +7, 6, 4, 1, 1, 1, 1, 1, 128, 59, 59, 41, 23,,, +7, 6, 4, 1, 1, 1, 1, 1, 256, 59, 59, 41, 23,,, +7, 6, 4, 1, 1, 1, 1, 1, 384, 59, 59, 41, 23,,, +7, 6, 4, 2, 1, 1, 1, 1, 128, 113, 113, 77, 41,,, +7, 6, 4, 2, 1, 1, 1, 1, 256, 113, 113, 77, 41,,, +7, 6, 4, 2, 1, 1, 1, 1, 384, 113, 113, 77, 41,,, +7, 6, 4, 3, 1, 1, 1, 1, 128, 167, 167, 113, 59,,, +7, 6, 4, 3, 1, 1, 1, 1, 256, 167, 167, 113, 59,,, +7, 6, 4, 3, 1, 1, 1, 1, 384, 167, 167, 113, 59,,, +7, 6, 4, 4, 1, 1, 1, 1, 128, 221, 221, 149, 77,,, +7, 6, 4, 4, 1, 1, 1, 1, 256, 221, 221, 149, 77,,, +7, 6, 4, 4, 1, 1, 1, 1, 384, 221, 221, 149, 77,,, +7, 6, 4, 5, 1, 1, 1, 1, 128, 240, 240, 185, 95,,, +7, 6, 4, 5, 1, 1, 1, 1, 256, 240, 240, 185, 95,,, +7, 6, 4, 5, 1, 1, 1, 1, 384, 240, 240, 185, 95,,, +7, 6, 4, 6, 1, 1, 1, 1, 128, 240, 240, 221, 113,,, +7, 6, 4, 6, 1, 1, 1, 1, 256, 240, 240, 221, 113,,, +7, 6, 4, 6, 1, 1, 1, 1, 384, 240, 240, 221, 113,,, +7, 6, 5, 1, 1, 1, 1, 1, 128, 59, 59, 41, 23,,, +7, 6, 5, 1, 1, 1, 1, 1, 256, 59, 59, 41, 23,,, +7, 6, 5, 1, 1, 1, 1, 1, 384, 59, 59, 41, 23,,, +7, 6, 5, 2, 1, 1, 1, 1, 128, 113, 113, 77, 41,,, +7, 6, 5, 2, 1, 1, 1, 1, 256, 113, 113, 77, 41,,, +7, 6, 5, 2, 1, 1, 1, 1, 384, 113, 113, 77, 41,,, +7, 6, 5, 3, 1, 1, 1, 1, 128, 167, 167, 113, 59,,, +7, 6, 5, 3, 1, 1, 1, 1, 256, 167, 167, 113, 59,,, +7, 6, 5, 3, 1, 1, 1, 1, 384, 167, 167, 113, 59,,, +7, 6, 5, 4, 1, 1, 1, 1, 128, 221, 221, 149, 77,,, +7, 6, 5, 4, 1, 1, 1, 1, 256, 221, 221, 149, 77,,, +7, 6, 5, 4, 1, 1, 1, 1, 384, 221, 221, 149, 77,,, +7, 6, 5, 5, 1, 1, 1, 1, 128, 240, 240, 185, 95,,, +7, 6, 5, 5, 1, 1, 1, 1, 256, 240, 240, 185, 95,,, +7, 6, 5, 5, 1, 1, 1, 1, 384, 240, 240, 185, 95,,, +7, 6, 5, 6, 1, 1, 1, 1, 128, 240, 240, 221, 113,,, +7, 6, 5, 6, 1, 1, 1, 1, 256, 240, 240, 221, 113,,, +7, 6, 5, 6, 1, 1, 1, 1, 384, 240, 240, 221, 113,,, +7, 6, 6, 1, 1, 1, 1, 1, 128, 59, 59, 41, 23,,, +7, 6, 6, 1, 1, 1, 1, 1, 256, 59, 59, 41, 23,,, +7, 6, 6, 1, 1, 1, 1, 1, 384, 59, 59, 41, 23,,, +7, 6, 6, 2, 1, 1, 1, 1, 128, 113, 113, 77, 41,,, +7, 6, 6, 2, 1, 1, 1, 1, 256, 113, 113, 77, 41,,, +7, 6, 6, 2, 1, 1, 1, 1, 384, 113, 113, 77, 41,,, +7, 6, 6, 3, 1, 1, 1, 1, 128, 167, 167, 113, 59,,, +7, 6, 6, 3, 1, 1, 1, 1, 256, 167, 167, 113, 59,,, +7, 6, 6, 3, 1, 1, 1, 1, 384, 167, 167, 113, 59,,, +7, 6, 6, 4, 1, 1, 1, 1, 128, 221, 221, 149, 77,,, +7, 6, 6, 4, 1, 1, 1, 1, 256, 221, 221, 149, 77,,, +7, 6, 6, 4, 1, 1, 1, 1, 384, 221, 221, 149, 77,,, +7, 6, 6, 5, 1, 1, 1, 1, 128, 240, 240, 185, 95,,, +7, 6, 6, 5, 1, 1, 1, 1, 256, 240, 240, 185, 95,,, +7, 6, 6, 5, 1, 1, 1, 1, 384, 240, 240, 185, 95,,, +7, 6, 6, 6, 1, 1, 1, 1, 128, 240, 240, 221, 113,,, +7, 6, 6, 6, 1, 1, 1, 1, 256, 240, 240, 221, 113,,, +7, 6, 6, 6, 1, 1, 1, 1, 384, 240, 240, 221, 113,,, +7, 6, 7, 1, 1, 1, 1, 1, 128, 59, 59, 41, 23,,, +7, 6, 7, 1, 1, 1, 1, 1, 256, 59, 59, 41, 23,,, +7, 6, 7, 1, 1, 1, 1, 1, 384, 59, 59, 41, 23,,, +7, 6, 7, 2, 1, 1, 1, 1, 128, 113, 113, 77, 41,,, +7, 6, 7, 2, 1, 1, 1, 1, 256, 113, 113, 77, 41,,, +7, 6, 7, 2, 1, 1, 1, 1, 384, 113, 113, 77, 41,,, +7, 6, 7, 3, 1, 1, 1, 1, 128, 167, 167, 113, 59,,, +7, 6, 7, 3, 1, 1, 1, 1, 256, 167, 167, 113, 59,,, +7, 6, 7, 3, 1, 1, 1, 1, 384, 167, 167, 113, 59,,, +7, 6, 7, 4, 1, 1, 1, 1, 128, 221, 221, 149, 77,,, +7, 6, 7, 4, 1, 1, 1, 1, 256, 221, 221, 149, 77,,, +7, 6, 7, 4, 1, 1, 1, 1, 384, 221, 221, 149, 77,,, +7, 6, 7, 5, 1, 1, 1, 1, 128, 240, 240, 185, 95,,, +7, 6, 7, 5, 1, 1, 1, 1, 256, 240, 240, 185, 95,,, +7, 6, 7, 5, 1, 1, 1, 1, 384, 240, 240, 185, 95,,, +7, 6, 7, 6, 1, 1, 1, 1, 128, 240, 240, 221, 113,,, +7, 6, 7, 6, 1, 1, 1, 1, 256, 240, 240, 221, 113,,, +7, 6, 7, 6, 1, 1, 1, 1, 384, 240, 240, 221, 113,,, +7, 7, 1, 1, 1, 1, 1, 1, 128, 60, 60, 42,,,, +7, 7, 1, 1, 1, 1, 1, 1, 256, 60, 60, 42,,,, +7, 7, 1, 1, 1, 1, 1, 1, 384, 60, 60, 42,,,, +7, 7, 1, 2, 1, 1, 1, 1, 128, 114, 114, 78,,,, +7, 7, 1, 2, 1, 1, 1, 1, 256, 114, 114, 78,,,, +7, 7, 1, 2, 1, 1, 1, 1, 384, 114, 114, 78,,,, +7, 7, 1, 3, 1, 1, 1, 1, 128, 168, 168, 114,,,, +7, 7, 1, 3, 1, 1, 1, 1, 256, 168, 168, 114,,,, +7, 7, 1, 3, 1, 1, 1, 1, 384, 168, 168, 114,,,, +7, 7, 1, 4, 1, 1, 1, 1, 128, 222, 222, 150,,,, +7, 7, 1, 4, 1, 1, 1, 1, 256, 222, 222, 150,,,, +7, 7, 1, 4, 1, 1, 1, 1, 384, 222, 222, 150,,,, +7, 7, 1, 5, 1, 1, 1, 1, 128, 240, 240, 186,,,, +7, 7, 1, 5, 1, 1, 1, 1, 256, 240, 240, 186,,,, +7, 7, 1, 5, 1, 1, 1, 1, 384, 240, 240, 186,,,, +7, 7, 1, 6, 1, 1, 1, 1, 128, 240, 240, 222,,,, +7, 7, 1, 6, 1, 1, 1, 1, 256, 240, 240, 222,,,, +7, 7, 1, 6, 1, 1, 1, 1, 384, 240, 240, 222,,,, +7, 7, 1, 7, 1, 1, 1, 1, 128, 240, 240, 240,,,, +7, 7, 1, 7, 1, 1, 1, 1, 256, 240, 240, 240,,,, +7, 7, 1, 7, 1, 1, 1, 1, 384, 240, 240, 240,,,, +7, 7, 2, 1, 1, 1, 1, 1, 128, 60, 60, 42,,,, +7, 7, 2, 1, 1, 1, 1, 1, 256, 60, 60, 42,,,, +7, 7, 2, 1, 1, 1, 1, 1, 384, 60, 60, 42,,,, +7, 7, 2, 2, 1, 1, 1, 1, 128, 114, 114, 78,,,, +7, 7, 2, 2, 1, 1, 1, 1, 256, 114, 114, 78,,,, +7, 7, 2, 2, 1, 1, 1, 1, 384, 114, 114, 78,,,, +7, 7, 2, 3, 1, 1, 1, 1, 128, 168, 168, 114,,,, +7, 7, 2, 3, 1, 1, 1, 1, 256, 168, 168, 114,,,, +7, 7, 2, 3, 1, 1, 1, 1, 384, 168, 168, 114,,,, +7, 7, 2, 4, 1, 1, 1, 1, 128, 222, 222, 150,,,, +7, 7, 2, 4, 1, 1, 1, 1, 256, 222, 222, 150,,,, +7, 7, 2, 4, 1, 1, 1, 1, 384, 222, 222, 150,,,, +7, 7, 2, 5, 1, 1, 1, 1, 128, 240, 240, 186,,,, +7, 7, 2, 5, 1, 1, 1, 1, 256, 240, 240, 186,,,, +7, 7, 2, 5, 1, 1, 1, 1, 384, 240, 240, 186,,,, +7, 7, 2, 6, 1, 1, 1, 1, 128, 240, 240, 222,,,, +7, 7, 2, 6, 1, 1, 1, 1, 256, 240, 240, 222,,,, +7, 7, 2, 6, 1, 1, 1, 1, 384, 240, 240, 222,,,, +7, 7, 2, 7, 1, 1, 1, 1, 128, 240, 240, 240,,,, +7, 7, 2, 7, 1, 1, 1, 1, 256, 240, 240, 240,,,, +7, 7, 2, 7, 1, 1, 1, 1, 384, 240, 240, 240,,,, +7, 7, 3, 1, 1, 1, 1, 1, 128, 60, 60, 42,,,, +7, 7, 3, 1, 1, 1, 1, 1, 256, 60, 60, 42,,,, +7, 7, 3, 1, 1, 1, 1, 1, 384, 60, 60, 42,,,, +7, 7, 3, 2, 1, 1, 1, 1, 128, 114, 114, 78,,,, +7, 7, 3, 2, 1, 1, 1, 1, 256, 114, 114, 78,,,, +7, 7, 3, 2, 1, 1, 1, 1, 384, 114, 114, 78,,,, +7, 7, 3, 3, 1, 1, 1, 1, 128, 168, 168, 114,,,, +7, 7, 3, 3, 1, 1, 1, 1, 256, 168, 168, 114,,,, +7, 7, 3, 3, 1, 1, 1, 1, 384, 168, 168, 114,,,, +7, 7, 3, 4, 1, 1, 1, 1, 128, 222, 222, 150,,,, +7, 7, 3, 4, 1, 1, 1, 1, 256, 222, 222, 150,,,, +7, 7, 3, 4, 1, 1, 1, 1, 384, 222, 222, 150,,,, +7, 7, 3, 5, 1, 1, 1, 1, 128, 240, 240, 186,,,, +7, 7, 3, 5, 1, 1, 1, 1, 256, 240, 240, 186,,,, +7, 7, 3, 5, 1, 1, 1, 1, 384, 240, 240, 186,,,, +7, 7, 3, 6, 1, 1, 1, 1, 128, 240, 240, 222,,,, +7, 7, 3, 6, 1, 1, 1, 1, 256, 240, 240, 222,,,, +7, 7, 3, 6, 1, 1, 1, 1, 384, 240, 240, 222,,,, +7, 7, 3, 7, 1, 1, 1, 1, 128, 240, 240, 240,,,, +7, 7, 3, 7, 1, 1, 1, 1, 256, 240, 240, 240,,,, +7, 7, 3, 7, 1, 1, 1, 1, 384, 240, 240, 240,,,, +7, 7, 4, 1, 1, 1, 1, 1, 128, 60, 60, 42,,,, +7, 7, 4, 1, 1, 1, 1, 1, 256, 60, 60, 42,,,, +7, 7, 4, 1, 1, 1, 1, 1, 384, 60, 60, 42,,,, +7, 7, 4, 2, 1, 1, 1, 1, 128, 114, 114, 78,,,, +7, 7, 4, 2, 1, 1, 1, 1, 256, 114, 114, 78,,,, +7, 7, 4, 2, 1, 1, 1, 1, 384, 114, 114, 78,,,, +7, 7, 4, 3, 1, 1, 1, 1, 128, 168, 168, 114,,,, +7, 7, 4, 3, 1, 1, 1, 1, 256, 168, 168, 114,,,, +7, 7, 4, 3, 1, 1, 1, 1, 384, 168, 168, 114,,,, +7, 7, 4, 4, 1, 1, 1, 1, 128, 222, 222, 150,,,, +7, 7, 4, 4, 1, 1, 1, 1, 256, 222, 222, 150,,,, +7, 7, 4, 4, 1, 1, 1, 1, 384, 222, 222, 150,,,, +7, 7, 4, 5, 1, 1, 1, 1, 128, 240, 240, 186,,,, +7, 7, 4, 5, 1, 1, 1, 1, 256, 240, 240, 186,,,, +7, 7, 4, 5, 1, 1, 1, 1, 384, 240, 240, 186,,,, +7, 7, 4, 6, 1, 1, 1, 1, 128, 240, 240, 222,,,, +7, 7, 4, 6, 1, 1, 1, 1, 256, 240, 240, 222,,,, +7, 7, 4, 6, 1, 1, 1, 1, 384, 240, 240, 222,,,, +7, 7, 4, 7, 1, 1, 1, 1, 128, 240, 240, 240,,,, +7, 7, 4, 7, 1, 1, 1, 1, 256, 240, 240, 240,,,, +7, 7, 4, 7, 1, 1, 1, 1, 384, 240, 240, 240,,,, +7, 7, 5, 1, 1, 1, 1, 1, 128, 60, 60, 42,,,, +7, 7, 5, 1, 1, 1, 1, 1, 256, 60, 60, 42,,,, +7, 7, 5, 1, 1, 1, 1, 1, 384, 60, 60, 42,,,, +7, 7, 5, 2, 1, 1, 1, 1, 128, 114, 114, 78,,,, +7, 7, 5, 2, 1, 1, 1, 1, 256, 114, 114, 78,,,, +7, 7, 5, 2, 1, 1, 1, 1, 384, 114, 114, 78,,,, +7, 7, 5, 3, 1, 1, 1, 1, 128, 168, 168, 114,,,, +7, 7, 5, 3, 1, 1, 1, 1, 256, 168, 168, 114,,,, +7, 7, 5, 3, 1, 1, 1, 1, 384, 168, 168, 114,,,, +7, 7, 5, 4, 1, 1, 1, 1, 128, 222, 222, 150,,,, +7, 7, 5, 4, 1, 1, 1, 1, 256, 222, 222, 150,,,, +7, 7, 5, 4, 1, 1, 1, 1, 384, 222, 222, 150,,,, +7, 7, 5, 5, 1, 1, 1, 1, 128, 240, 240, 186,,,, +7, 7, 5, 5, 1, 1, 1, 1, 256, 240, 240, 186,,,, +7, 7, 5, 5, 1, 1, 1, 1, 384, 240, 240, 186,,,, +7, 7, 5, 6, 1, 1, 1, 1, 128, 240, 240, 222,,,, +7, 7, 5, 6, 1, 1, 1, 1, 256, 240, 240, 222,,,, +7, 7, 5, 6, 1, 1, 1, 1, 384, 240, 240, 222,,,, +7, 7, 5, 7, 1, 1, 1, 1, 128, 240, 240, 240,,,, +7, 7, 5, 7, 1, 1, 1, 1, 256, 240, 240, 240,,,, +7, 7, 5, 7, 1, 1, 1, 1, 384, 240, 240, 240,,,, +7, 7, 6, 1, 1, 1, 1, 1, 128, 60, 60, 42,,,, +7, 7, 6, 1, 1, 1, 1, 1, 256, 60, 60, 42,,,, +7, 7, 6, 1, 1, 1, 1, 1, 384, 60, 60, 42,,,, +7, 7, 6, 2, 1, 1, 1, 1, 128, 114, 114, 78,,,, +7, 7, 6, 2, 1, 1, 1, 1, 256, 114, 114, 78,,,, +7, 7, 6, 2, 1, 1, 1, 1, 384, 114, 114, 78,,,, +7, 7, 6, 3, 1, 1, 1, 1, 128, 168, 168, 114,,,, +7, 7, 6, 3, 1, 1, 1, 1, 256, 168, 168, 114,,,, +7, 7, 6, 3, 1, 1, 1, 1, 384, 168, 168, 114,,,, +7, 7, 6, 4, 1, 1, 1, 1, 128, 222, 222, 150,,,, +7, 7, 6, 4, 1, 1, 1, 1, 256, 222, 222, 150,,,, +7, 7, 6, 4, 1, 1, 1, 1, 384, 222, 222, 150,,,, +7, 7, 6, 5, 1, 1, 1, 1, 128, 240, 240, 186,,,, +7, 7, 6, 5, 1, 1, 1, 1, 256, 240, 240, 186,,,, +7, 7, 6, 5, 1, 1, 1, 1, 384, 240, 240, 186,,,, +7, 7, 6, 6, 1, 1, 1, 1, 128, 240, 240, 222,,,, +7, 7, 6, 6, 1, 1, 1, 1, 256, 240, 240, 222,,,, +7, 7, 6, 6, 1, 1, 1, 1, 384, 240, 240, 222,,,, +7, 7, 6, 7, 1, 1, 1, 1, 128, 240, 240, 240,,,, +7, 7, 6, 7, 1, 1, 1, 1, 256, 240, 240, 240,,,, +7, 7, 6, 7, 1, 1, 1, 1, 384, 240, 240, 240,,,, +7, 7, 7, 1, 1, 1, 1, 1, 128, 60, 60, 42,,,, +7, 7, 7, 1, 1, 1, 1, 1, 256, 60, 60, 42,,,, +7, 7, 7, 1, 1, 1, 1, 1, 384, 60, 60, 42,,,, +7, 7, 7, 2, 1, 1, 1, 1, 128, 114, 114, 78,,,, +7, 7, 7, 2, 1, 1, 1, 1, 256, 114, 114, 78,,,, +7, 7, 7, 2, 1, 1, 1, 1, 384, 114, 114, 78,,,, +7, 7, 7, 3, 1, 1, 1, 1, 128, 168, 168, 114,,,, +7, 7, 7, 3, 1, 1, 1, 1, 256, 168, 168, 114,,,, +7, 7, 7, 3, 1, 1, 1, 1, 384, 168, 168, 114,,,, +7, 7, 7, 4, 1, 1, 1, 1, 128, 222, 222, 150,,,, +7, 7, 7, 4, 1, 1, 1, 1, 256, 222, 222, 150,,,, +7, 7, 7, 4, 1, 1, 1, 1, 384, 222, 222, 150,,,, +7, 7, 7, 5, 1, 1, 1, 1, 128, 240, 240, 186,,,, +7, 7, 7, 5, 1, 1, 1, 1, 256, 240, 240, 186,,,, +7, 7, 7, 5, 1, 1, 1, 1, 384, 240, 240, 186,,,, +7, 7, 7, 6, 1, 1, 1, 1, 128, 240, 240, 222,,,, +7, 7, 7, 6, 1, 1, 1, 1, 256, 240, 240, 222,,,, +7, 7, 7, 6, 1, 1, 1, 1, 384, 240, 240, 222,,,, +7, 7, 7, 7, 1, 1, 1, 1, 128, 240, 240, 240,,,, +7, 7, 7, 7, 1, 1, 1, 1, 256, 240, 240, 240,,,, +7, 7, 7, 7, 1, 1, 1, 1, 384, 240, 240, 240,,,,, \ No newline at end of file diff --git a/docs/_static/css/custom.css b/docs/_static/css/custom.css index 07533f91be3..763b9f7505b 100644 --- a/docs/_static/css/custom.css +++ b/docs/_static/css/custom.css @@ -49,7 +49,37 @@ main img { background-image: url('media/union-up.svg'); } +div.highlight { + margin-bottom: 1.15rem; +} + .highlight .err { border:none; color:inherit; } + +.opt-notice-wrapper { + position: fixed; + bottom:0; + background: black; + width:100%; + text-align: center; + padding: 1rem; + z-index: 1000; +} + +.opt-notice { + margin-bottom: 0; + position: absolute; + top: 50%; + transform: translateY(-50%); + text-align: center; + width:100%; + color: #fff; +} + +@media (min-width: 1200px) { + .container, .container-lg, .container-md, .container-sm, .container-xl { + max-width: 1800px; + } +} diff --git a/docs/_static/images/DL_WB_start.png b/docs/_static/images/DL_WB_start.png new file mode 100644 index 00000000000..322c4480219 --- /dev/null +++ b/docs/_static/images/DL_WB_start.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7e7dae1b0f7316d11d4d0459fc372d632e1b47edf0addcb90d26be7dc1492e21 +size 109620 diff --git a/docs/_static/images/IE_DG_supported_plugins_AUTO_debugging-img01-localhost.png b/docs/_static/images/IE_DG_supported_plugins_AUTO_debugging-img01-localhost.png new file mode 100644 index 00000000000..c5b186c02e7 --- /dev/null +++ b/docs/_static/images/IE_DG_supported_plugins_AUTO_debugging-img01-localhost.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:36f4b9e0714e819b0c98a30f3c08d6ce1f9206906be42e80cb1fa746e6354ad6 +size 25333 diff --git a/docs/_static/images/IE_DG_supported_plugins_AUTO_debugging-img02-launch.png b/docs/_static/images/IE_DG_supported_plugins_AUTO_debugging-img02-launch.png new file mode 100644 index 00000000000..b00d9f0dcb8 --- /dev/null +++ b/docs/_static/images/IE_DG_supported_plugins_AUTO_debugging-img02-launch.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a5a0022bda018ae7e5261bbb9f5e8cc28374254c272dd3cbc2ab2f872381e2c5 +size 21106 diff --git a/docs/_static/images/IE_DG_supported_plugins_AUTO_debugging-img03-hotspots.png b/docs/_static/images/IE_DG_supported_plugins_AUTO_debugging-img03-hotspots.png new file mode 100644 index 00000000000..dc1f7d7c0b1 --- /dev/null +++ b/docs/_static/images/IE_DG_supported_plugins_AUTO_debugging-img03-hotspots.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0319b56afe702fb09957f4d3c996155be79efc672edc0e780d34441eaa660b2c +size 43521 diff --git a/docs/_static/images/IE_DG_supported_plugins_AUTO_debugging-img04-vtunesummary.png b/docs/_static/images/IE_DG_supported_plugins_AUTO_debugging-img04-vtunesummary.png new file mode 100644 index 00000000000..9769b6eb0e6 --- /dev/null +++ b/docs/_static/images/IE_DG_supported_plugins_AUTO_debugging-img04-vtunesummary.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:813e629fe674b676c8484a92f94d55ffc13cacc1e077fa19a2c82cd528819d72 +size 256217 diff --git a/docs/_static/images/IE_DG_supported_plugins_AUTO_debugging-img05-vtunebottomup.png b/docs/_static/images/IE_DG_supported_plugins_AUTO_debugging-img05-vtunebottomup.png new file mode 100644 index 00000000000..feecb907c87 --- /dev/null +++ b/docs/_static/images/IE_DG_supported_plugins_AUTO_debugging-img05-vtunebottomup.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:62fb4b08191499cfe765f3777dd7ae543739232cfd8861e21f976ba09aa9797b +size 176560 diff --git a/docs/_static/images/accuracy_table_yolo.png b/docs/_static/images/accuracy_table_yolo.png new file mode 100644 index 00000000000..09c102cb935 --- /dev/null +++ b/docs/_static/images/accuracy_table_yolo.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0d9d37e783ef8cf930f4009743cb604ff3180225f981d07f917c7f25846bbefe +size 125159 diff --git a/docs/_static/images/accuracy_table_yolo.png.png b/docs/_static/images/accuracy_table_yolo.png.png new file mode 100644 index 00000000000..81486c68bb5 --- /dev/null +++ b/docs/_static/images/accuracy_table_yolo.png.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7b11408aa0c5aba0a28615a585abda61b539cb198bf511297275949d6dee78e5 +size 115623 diff --git a/docs/_static/images/accuracy_table_yolo_advanced.png b/docs/_static/images/accuracy_table_yolo_advanced.png new file mode 100644 index 00000000000..cd154eb5af2 --- /dev/null +++ b/docs/_static/images/accuracy_table_yolo_advanced.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:50bd093500582971fd9ad3c99556d739132680328adfa9e034d9093fb5a31023 +size 53183 diff --git a/docs/_static/images/accuracy_table_yolo_basic.png b/docs/_static/images/accuracy_table_yolo_basic.png new file mode 100644 index 00000000000..f5ddd43558a --- /dev/null +++ b/docs/_static/images/accuracy_table_yolo_basic.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c6f96b5460e218899068bb9bcaf4caae7d9b5827ac44ff5a6d7145dfb5a4a6a3 +size 43233 diff --git a/docs/_static/images/accuracy_yolov4.png b/docs/_static/images/accuracy_yolov4.png new file mode 100644 index 00000000000..281395e2c0b --- /dev/null +++ b/docs/_static/images/accuracy_yolov4.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e44a115c712032a30e705771a0f1a49221090c340901d0497b26d208fed6cc8c +size 110224 diff --git a/docs/_static/images/calibration_yolov4.png b/docs/_static/images/calibration_yolov4.png new file mode 100644 index 00000000000..1bc2e81db46 --- /dev/null +++ b/docs/_static/images/calibration_yolov4.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:351cf888e0ed3d5c6701db028523859c42faa91ac2ce95b7ee00eaa2db8d6ad5 +size 60247 diff --git a/docs/_static/images/check_yolo_model.png b/docs/_static/images/check_yolo_model.png new file mode 100644 index 00000000000..150b1b68c48 --- /dev/null +++ b/docs/_static/images/check_yolo_model.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e2cf1cfc3cc38d25f6a46ff0cd2bc63ff1d93bb3e52b57c640f80a73e56f3c44 +size 489383 diff --git a/docs/_static/images/color_transformations.png b/docs/_static/images/color_transformations.png new file mode 100644 index 00000000000..08efcf7b536 --- /dev/null +++ b/docs/_static/images/color_transformations.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5e4d3d29a4cec7dfffa3c743336925ce565722f9950c8cd3f9c6b6bf75c38c53 +size 508773 diff --git a/docs/_static/images/config_filled.png b/docs/_static/images/config_filled.png new file mode 100644 index 00000000000..b6fc836ed72 --- /dev/null +++ b/docs/_static/images/config_filled.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6a63fe316d6ee337ccafcf83ff088fe413b985793631d2cf7d4ff6d5f0c5391b +size 51962 diff --git a/docs/_static/images/configure_input.png b/docs/_static/images/configure_input.png new file mode 100644 index 00000000000..c6996203bbd --- /dev/null +++ b/docs/_static/images/configure_input.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:75196ce75c33936fb8b06bc27e7328b23cf08f066a6f996f6a413fee42aff6d6 +size 28423 diff --git a/docs/_static/images/convert_model_to_ir_general.png b/docs/_static/images/convert_model_to_ir_general.png new file mode 100644 index 00000000000..d08ae9b560d --- /dev/null +++ b/docs/_static/images/convert_model_to_ir_general.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cdf4769ec02cd5fb13335f11436c18151aaacbb0efe13b680ff8813ca3c2f997 +size 45839 diff --git a/docs/_static/images/convert_omz_to_IR.png b/docs/_static/images/convert_omz_to_IR.png new file mode 100644 index 00000000000..963666d2ab6 --- /dev/null +++ b/docs/_static/images/convert_omz_to_IR.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8f0cd7ef06dc11c0f25409b2625869f6b725a8a004d9016a858b25c59dd01465 +size 19178 diff --git a/docs/_static/images/create_project.png b/docs/_static/images/create_project.png new file mode 100644 index 00000000000..9abf32f56da --- /dev/null +++ b/docs/_static/images/create_project.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:de6f29d1c04a9affba1ca4f065f6d403001033a58b0cb586f1e39dc3701034c0 +size 83105 diff --git a/docs/_static/images/create_project_selected.png b/docs/_static/images/create_project_selected.png new file mode 100644 index 00000000000..ebd1deb909c --- /dev/null +++ b/docs/_static/images/create_project_selected.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6895aa172437708eda816bb50acce4307c58da3b85b095c7f3e3c1fc9f24140a +size 121292 diff --git a/docs/_static/images/create_project_yolo.png b/docs/_static/images/create_project_yolo.png new file mode 100644 index 00000000000..099c0b23660 --- /dev/null +++ b/docs/_static/images/create_project_yolo.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c2ada7ee59b840e9402a06afa08deadb2b4c1ff6ecd7fca99e71a839699f3e4b +size 43098 diff --git a/docs/_static/images/create_report_yolo.png b/docs/_static/images/create_report_yolo.png new file mode 100644 index 00000000000..4c3f9884167 --- /dev/null +++ b/docs/_static/images/create_report_yolo.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8e771bc373c56930e3fe6c8d2de69aa16fa92bb09cacbaf22e55c85f90674222 +size 57050 diff --git a/docs/_static/images/custom_dataset.png b/docs/_static/images/custom_dataset.png new file mode 100644 index 00000000000..5a11ef4c81f --- /dev/null +++ b/docs/_static/images/custom_dataset.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b8bcf5b310b6d2d6cf4a14635f82491ec4fa8f1efbf6f0485b4968d3f01e36ee +size 540797 diff --git a/docs/_static/images/custom_dataset_imported.png b/docs/_static/images/custom_dataset_imported.png new file mode 100644 index 00000000000..2497a10a381 --- /dev/null +++ b/docs/_static/images/custom_dataset_imported.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aed8f8a3d61e764b12cf8f1c99db3d4c42f17008d4b3367a36aefec761e7840b +size 19079 diff --git a/docs/_static/images/dataset.png b/docs/_static/images/dataset.png new file mode 100644 index 00000000000..960d4d032a7 --- /dev/null +++ b/docs/_static/images/dataset.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:caf1538ca8b64cbc243ab8e4a87b38a7eb071c2f19955fe881cd221807f485b7 +size 312545 diff --git a/docs/_static/images/dataset_import.png b/docs/_static/images/dataset_import.png new file mode 100644 index 00000000000..8495b48a5e8 --- /dev/null +++ b/docs/_static/images/dataset_import.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5bea2db4a7618e4370398d29358bffaad8bb9fe61316012b38ba54d305615a77 +size 37413 diff --git a/docs/_static/images/dataset_imported.png b/docs/_static/images/dataset_imported.png new file mode 100644 index 00000000000..8187b1a8c1a --- /dev/null +++ b/docs/_static/images/dataset_imported.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:974bdd3a7cfba238ec7647af349fbd5e291fb79f30f3ab337df274c338b6ee6e +size 20895 diff --git a/docs/_static/images/dataset_selection.png b/docs/_static/images/dataset_selection.png new file mode 100644 index 00000000000..64352e54679 --- /dev/null +++ b/docs/_static/images/dataset_selection.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f551cf331e3dd23598f80511b6b4be27829a578eca9aa8a83e63f70c743822bb +size 25656 diff --git a/docs/_static/images/detections_yolo_true.png b/docs/_static/images/detections_yolo_true.png new file mode 100644 index 00000000000..94ba19fb8af --- /dev/null +++ b/docs/_static/images/detections_yolo_true.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bdd39272d00fd0d066c36c2750fd6af77fd7305d47650a1c4f1bee2a991a5e8b +size 314031 diff --git a/docs/_static/images/download_cola.png b/docs/_static/images/download_cola.png new file mode 100644 index 00000000000..35a493d5709 --- /dev/null +++ b/docs/_static/images/download_cola.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:92055ab91793ece49fd47bb65037073f35d29a974fa03e1cc76a013cb79f3f8f +size 74276 diff --git a/docs/_static/images/generate_datasets_closeup.png b/docs/_static/images/generate_datasets_closeup.png new file mode 100644 index 00000000000..051a6f3a43d --- /dev/null +++ b/docs/_static/images/generate_datasets_closeup.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1bef9577a4ff23b7f102a314c90fffd167fab56b0f6a9492970e02de3a0de998 +size 399477 diff --git a/docs/_static/images/github.png b/docs/_static/images/github.png new file mode 100644 index 00000000000..4bf56a3d3e4 --- /dev/null +++ b/docs/_static/images/github.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7b2749d93dff16fc9062a0fa01fd694bf19385a0b4f3d0e409eb56f2648e3cfc +size 11929 diff --git a/docs/_static/images/horizontal_flip.png b/docs/_static/images/horizontal_flip.png new file mode 100644 index 00000000000..ff4dc034864 --- /dev/null +++ b/docs/_static/images/horizontal_flip.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f66d41137ef6cfd0fe4bd8f635814203bc963b546a20da9718b42f58b6fa4bd9 +size 168160 diff --git a/docs/_static/images/horizontal_flip_closeup.png b/docs/_static/images/horizontal_flip_closeup.png new file mode 100644 index 00000000000..1ba92fd8615 --- /dev/null +++ b/docs/_static/images/horizontal_flip_closeup.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f57758f726846dc6f83dc14ed0f96622a7d527287330b6c6f8ac95cecfd30cbf +size 432513 diff --git a/docs/_static/images/import_annotated_dataset.png b/docs/_static/images/import_annotated_dataset.png new file mode 100644 index 00000000000..12da21ab981 --- /dev/null +++ b/docs/_static/images/import_annotated_dataset.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:51c33a7b19d2d21bab642372efcbeccdbcb6f8813d98cfe8fb9c03f801287474 +size 38600 diff --git a/docs/_static/images/import_dataset_page.png b/docs/_static/images/import_dataset_page.png new file mode 100644 index 00000000000..18eac942c2f --- /dev/null +++ b/docs/_static/images/import_dataset_page.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3994e201f3077a11da2870ae6d13242d0d0d353d35f837cdd7e01686a445859d +size 266463 diff --git a/docs/_static/images/import_frameworks.png b/docs/_static/images/import_frameworks.png new file mode 100644 index 00000000000..b01cda8aba1 --- /dev/null +++ b/docs/_static/images/import_frameworks.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1a70538daffd8c1b4180e5106ed2d3ba3fc21301b874fecae10a87d518b47dd7 +size 18404 diff --git a/docs/_static/images/import_image_dataset.png b/docs/_static/images/import_image_dataset.png new file mode 100644 index 00000000000..144a8dd4de7 --- /dev/null +++ b/docs/_static/images/import_image_dataset.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f055ebe36900ad24d4501d89f52e343f7c2b14a025e020b747c7272325d2c4a2 +size 10521 diff --git a/docs/_static/images/import_model.png b/docs/_static/images/import_model.png new file mode 100644 index 00000000000..34c266a1a69 --- /dev/null +++ b/docs/_static/images/import_model.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3e628143bbd02dc43ccff24e7f436f8f22fcffb3a606fdf65e49c18097741949 +size 50825 diff --git a/docs/_static/images/import_model_button.png b/docs/_static/images/import_model_button.png new file mode 100644 index 00000000000..ff42cad97e9 --- /dev/null +++ b/docs/_static/images/import_model_button.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:93b002b735efaacd4b5a8c10115737082ba9f9b06696411d401225f52f3da9a0 +size 39891 diff --git a/docs/_static/images/import_model_nlp.png b/docs/_static/images/import_model_nlp.png new file mode 100644 index 00000000000..5e82e53a2ec --- /dev/null +++ b/docs/_static/images/import_model_nlp.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:14e14c77fef928ecb0b53905667aff2a77a9b71b2c4160e7f3072e587cda4416 +size 39565 diff --git a/docs/_static/images/import_text_dataset.png b/docs/_static/images/import_text_dataset.png new file mode 100644 index 00000000000..23de3e9739f --- /dev/null +++ b/docs/_static/images/import_text_dataset.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fdec98af4d2a0f7a0b8665165184968fea3fa294938d40619572e285c70745dd +size 51976 diff --git a/docs/_static/images/import_tokenizer.png b/docs/_static/images/import_tokenizer.png new file mode 100644 index 00000000000..192c5a0823b --- /dev/null +++ b/docs/_static/images/import_tokenizer.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9f9b185567143692721ea68b0afd1c63d000df1d6b216c51567306d98bac5de8 +size 31986 diff --git a/docs/_static/images/import_yolov4.png b/docs/_static/images/import_yolov4.png new file mode 100644 index 00000000000..77c88ad2ac3 --- /dev/null +++ b/docs/_static/images/import_yolov4.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:94e10d4ec22ca3822faad9d0eea18661b3feec09611ae7dd19b64b8d636487cd +size 47182 diff --git a/docs/_static/images/input_name.png b/docs/_static/images/input_name.png new file mode 100644 index 00000000000..6ed2f94db7e --- /dev/null +++ b/docs/_static/images/input_name.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dcdb6eac41fb75ae1248c7103046fb51298f0d0c08b52ff22cc5f8c81b394741 +size 15202 diff --git a/docs/_static/images/input_shape.png b/docs/_static/images/input_shape.png new file mode 100644 index 00000000000..d407c3b17c8 --- /dev/null +++ b/docs/_static/images/input_shape.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:af7835134b35136d8b50e2b22944a88f2bcfb3ff114afdfd052d52b4358212b3 +size 22970 diff --git a/docs/_static/images/inputs.png b/docs/_static/images/inputs.png new file mode 100644 index 00000000000..d4518d9de79 --- /dev/null +++ b/docs/_static/images/inputs.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f1f8bc12837e03b1a2c1386c2bac512c21b1fb073d990379079c317488e9ce1c +size 39015 diff --git a/docs/_static/images/layers_yolov3.png b/docs/_static/images/layers_yolov3.png new file mode 100644 index 00000000000..179c825f5b1 --- /dev/null +++ b/docs/_static/images/layers_yolov3.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:172f332868ccc10f938c5cf2eafd6380a5eda87b7bd11d5df34da8316173f01a +size 23048 diff --git a/docs/_static/images/layers_yolov4.png b/docs/_static/images/layers_yolov4.png new file mode 100644 index 00000000000..ac73a77742d --- /dev/null +++ b/docs/_static/images/layers_yolov4.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fc69087823f95bfd95bc0ed453b669232bfd80efd0045720cc7696a2a9a795bf +size 22431 diff --git a/docs/_static/images/means_scales.png b/docs/_static/images/means_scales.png new file mode 100644 index 00000000000..6695c64933d --- /dev/null +++ b/docs/_static/images/means_scales.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c0b876337f2c094325d075db980ca8d010860485950cbb9001f948e060465377 +size 10957 diff --git a/docs/_static/images/model_analysis.png b/docs/_static/images/model_analysis.png new file mode 100644 index 00000000000..55f0323e314 --- /dev/null +++ b/docs/_static/images/model_analysis.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9640f8e9260a4ea796b7768de3368762719644319cfdad332fe1a8dc79946a4c +size 49674 diff --git a/docs/_static/images/model_analysis_details.png b/docs/_static/images/model_analysis_details.png new file mode 100644 index 00000000000..975b8714506 --- /dev/null +++ b/docs/_static/images/model_analysis_details.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e02a91f6ae4a94f7a5bc08987b2bd5a239d13dd80ae485189f79a2ee30bc5528 +size 13546 diff --git a/docs/_static/images/noise_injection.png b/docs/_static/images/noise_injection.png new file mode 100644 index 00000000000..3011823cf99 --- /dev/null +++ b/docs/_static/images/noise_injection.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0cd558b8355e04507aedc1b18ca8075e74d934d311320ea25942a646d47f9a21 +size 366753 diff --git a/docs/_static/images/omz_banner.png b/docs/_static/images/omz_banner.png new file mode 100644 index 00000000000..32a9e7f899e --- /dev/null +++ b/docs/_static/images/omz_banner.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6a3d820b43de20a74d857cb720783c1b579624afde26a2bb4bb097ba8fd0bd79 +size 2052 diff --git a/docs/_static/images/omz_import.png b/docs/_static/images/omz_import.png new file mode 100644 index 00000000000..b66de979251 --- /dev/null +++ b/docs/_static/images/omz_import.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:349e96028d651807b887cf73a4be79eb2123e62c6abe2ed3ec4b419e306ce3d5 +size 56444 diff --git a/docs/_static/images/open_creat_project.png b/docs/_static/images/open_creat_project.png new file mode 100644 index 00000000000..42c9301a4b2 --- /dev/null +++ b/docs/_static/images/open_creat_project.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7d9921a0e3d41ceb6236a362cb4e2796b9433c8073fafa2a86574317b131036d +size 120364 diff --git a/docs/_static/images/open_yolo_model.png b/docs/_static/images/open_yolo_model.png new file mode 100644 index 00000000000..772cda23ced --- /dev/null +++ b/docs/_static/images/open_yolo_model.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:db9ba92400f3c40cb6ecc7db9b1e1c5168de158a98a2c1efd31ffe2562e90262 +size 14297 diff --git a/docs/_static/images/optimization_INT8_YOLO.png b/docs/_static/images/optimization_INT8_YOLO.png new file mode 100644 index 00000000000..aad6d31199d --- /dev/null +++ b/docs/_static/images/optimization_INT8_YOLO.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:495c0918c8424aea8dd0bd94a3a5dc2c91335dbab9196fece425689631d0fdb8 +size 57178 diff --git a/docs/_static/images/optimization_settings.png b/docs/_static/images/optimization_settings.png new file mode 100644 index 00000000000..91a3314121f --- /dev/null +++ b/docs/_static/images/optimization_settings.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d9c013e8d76cf87c461407132ef6eb59e57c14f8c4cf242f71ae617140bf4209 +size 120403 diff --git a/docs/_static/images/optimize_face_detection.png b/docs/_static/images/optimize_face_detection.png new file mode 100644 index 00000000000..d984d5f1e53 --- /dev/null +++ b/docs/_static/images/optimize_face_detection.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:499c44eb2099f8f0de23800b6dd75395c110783eb80f92f1bb91034c7f93724d +size 132642 diff --git a/docs/_static/images/original_model_banner.png b/docs/_static/images/original_model_banner.png new file mode 100644 index 00000000000..76d5e0dd808 --- /dev/null +++ b/docs/_static/images/original_model_banner.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:04e954b5e1501f958ea2c03303760786ca7a57aaf6de335cb936750c675e6107 +size 1626 diff --git a/docs/_static/images/ov_chart.png b/docs/_static/images/ov_chart.png index f91923443c3..fa25daf3601 100644 --- a/docs/_static/images/ov_chart.png +++ b/docs/_static/images/ov_chart.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:f9a0d138f7f6d2546f0e48d9240d6a90aec18dc6d8092e1082b2fc3125f1ce3d -size 108434 +oid sha256:83f0013e02ea792b553b5bd0a5630fb456a6fefc8dd701cd4430fc83d75cbff7 +size 78205 diff --git a/docs/_static/images/pack.png b/docs/_static/images/pack.png new file mode 100644 index 00000000000..6e9c713f394 --- /dev/null +++ b/docs/_static/images/pack.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:91b43730b70b6d83bffb6cfbe9982cf806b0ac7f8a58001ba74596cface2fe5d +size 92680 diff --git a/docs/_static/images/performance.png b/docs/_static/images/performance.png new file mode 100644 index 00000000000..e874daf3eb5 --- /dev/null +++ b/docs/_static/images/performance.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b64c598656309f5156e291d93a2865537ebe2b87f62688b2d1fd2da35c63a471 +size 110383 diff --git a/docs/_static/images/performance_change.jpeg b/docs/_static/images/performance_change.jpeg new file mode 100644 index 00000000000..71ba4eab00c Binary files /dev/null and b/docs/_static/images/performance_change.jpeg differ diff --git a/docs/_static/images/performance_nlp.png b/docs/_static/images/performance_nlp.png new file mode 100644 index 00000000000..32f4a30cb4f --- /dev/null +++ b/docs/_static/images/performance_nlp.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6a1989ee52ba90db620843fbdf8df387ec7e1794f4b2baca31cc9dfbd14026bc +size 84528 diff --git a/docs/_static/images/random_erase.png b/docs/_static/images/random_erase.png new file mode 100644 index 00000000000..b72482898cb --- /dev/null +++ b/docs/_static/images/random_erase.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d8dfe286f3aa8eb104b0217fba8148fc1f69e86b4a1557ff36bcbead6dba60e5 +size 298320 diff --git a/docs/_static/images/required_params.png b/docs/_static/images/required_params.png new file mode 100644 index 00000000000..30e8261090a --- /dev/null +++ b/docs/_static/images/required_params.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6b5bdd79f3460d532fe836389072f7ff7439d7e1104c56d03a617492226754ee +size 17428 diff --git a/docs/_static/images/rgb.png b/docs/_static/images/rgb.png new file mode 100644 index 00000000000..87f8a672d83 --- /dev/null +++ b/docs/_static/images/rgb.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f3dc6397527d62052243539fbcab4dc393a0c434fdddb34ef9f36031abbdf49 +size 22234 diff --git a/docs/_static/images/scales.png b/docs/_static/images/scales.png new file mode 100644 index 00000000000..e2dce47459c --- /dev/null +++ b/docs/_static/images/scales.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ddb124e0d67f958042cd04bfcaeb59c5505558651d86dee1d68a41b4f87a4d5e +size 5502 diff --git a/docs/_static/images/select_environment.png b/docs/_static/images/select_environment.png new file mode 100644 index 00000000000..37adb293071 --- /dev/null +++ b/docs/_static/images/select_environment.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:10673436cf21b8560887506204d4736dcce2a34574b4702caaff9992a5af0b01 +size 87902 diff --git a/docs/_static/images/select_tokenizer.png b/docs/_static/images/select_tokenizer.png new file mode 100644 index 00000000000..8f0f48eb9ce --- /dev/null +++ b/docs/_static/images/select_tokenizer.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:080563fa8984d82c4d0151e5c42c44c4cf872dffbc0eb5f42fff0ab58adee7f0 +size 18584 diff --git a/docs/_static/images/selected_components.png b/docs/_static/images/selected_components.png new file mode 100644 index 00000000000..05467f57a44 --- /dev/null +++ b/docs/_static/images/selected_components.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:951de8950a33b2fc56e9429cea6ecfb152dfc28d8088eb90db79a47c6d87ddc0 +size 41345 diff --git a/docs/_static/images/start_page_dl_wb.png b/docs/_static/images/start_page_dl_wb.png new file mode 100644 index 00000000000..6fb7c80641c --- /dev/null +++ b/docs/_static/images/start_page_dl_wb.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c57c68582ee21c31115c78515c2fd9189fe4010c223f356e734c004f160ad569 +size 129931 diff --git a/docs/_static/images/text_dataset.png b/docs/_static/images/text_dataset.png new file mode 100644 index 00000000000..c9ccb90a2c2 --- /dev/null +++ b/docs/_static/images/text_dataset.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:58fdd181204bf35bca5a5db4b4ca0a673c5eb009cb7c40e006713f671ccfc86f +size 84039 diff --git a/docs/_static/images/tokenizer_fill.png b/docs/_static/images/tokenizer_fill.png new file mode 100644 index 00000000000..c0fe2487ac2 --- /dev/null +++ b/docs/_static/images/tokenizer_fill.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:93f4045a51868adb17b7ed949f27f430836fdc29fca870f6bc07564bc4b800fd +size 17119 diff --git a/docs/_static/images/vertical_flip.png b/docs/_static/images/vertical_flip.png new file mode 100644 index 00000000000..460a01b0b7d --- /dev/null +++ b/docs/_static/images/vertical_flip.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:13cfb4f7a604f3c9693ccd59500e509b5adb5f4e81171feaa38cc6e16b773765 +size 305754 diff --git a/docs/_static/images/yml_file_info.png b/docs/_static/images/yml_file_info.png new file mode 100644 index 00000000000..b16ba125863 --- /dev/null +++ b/docs/_static/images/yml_file_info.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c68c191934f4e0b571b02c3c458036df1361a12cf0c87c89f023ff3b383fb393 +size 11448 diff --git a/docs/_static/images/yolo_detection_false.png b/docs/_static/images/yolo_detection_false.png new file mode 100644 index 00000000000..0664ae17788 --- /dev/null +++ b/docs/_static/images/yolo_detection_false.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6786910b4bae6ec610b4b8ca3c612d296dbe3c4b7f758f3a31b959ffbcc3173c +size 239896 diff --git a/docs/_static/images/yolov4_imported.png b/docs/_static/images/yolov4_imported.png new file mode 100644 index 00000000000..6f9e8db2419 --- /dev/null +++ b/docs/_static/images/yolov4_imported.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:01b6dd951d8077c3a2420479c4cae59f5e8a58bac6272afc2c9259901f21f1d9 +size 53491 diff --git a/docs/_static/js/custom.js b/docs/_static/js/custom.js index 8b027164517..23763b3f0ca 100644 --- a/docs/_static/js/custom.js +++ b/docs/_static/js/custom.js @@ -25,8 +25,17 @@ var wapSection = 'openvinotoolkit'; s.appendChild(po); })(); +// legal notice for benchmarks +function addLegalNotice() { + if (window.location.href.indexOf('openvino_docs_performance_') !== -1) { + var legalNotice = $('

Results may vary. For workloads and configurations visit: www.intel.com/PerformanceIndex and Legal Information.

'); + $('body').append(legalNotice); + } +} + $(document).ready(function () { createVersions(); + updateTitleTag(); init_col_sections(); init_switchers(); handleSwitcherParam(); @@ -35,6 +44,7 @@ $(document).ready(function () { if (TABLE_SORT) { addTableSort(); } + addLegalNotice(); }); // Determine where we'd go if clicking on a version selector option @@ -44,6 +54,13 @@ function getPageUrlWithVersion(version) { return encodeURI(newURL); } +function updateTitleTag() { + var title = $('title'); + var currentVersion = getCurrentVersion(); + var newTitle = (title.text() + ' — Version(' + currentVersion + ')').replace(/\s+/g, ' ').trim(); + title.text(newTitle); +} + function getCurrentVersion() { var protocol = window.location.protocol + "//"; var index = window.location.href.indexOf(protocol); diff --git a/docs/_static/js/graphs_ov_tf.js b/docs/_static/js/graphs_ov_tf.js new file mode 100644 index 00000000000..bf16e9dacc5 --- /dev/null +++ b/docs/_static/js/graphs_ov_tf.js @@ -0,0 +1,109 @@ +$(document).ready(function () { + var chartBlock = $('.chart-block-tf-ov'); + chartBlock.each(function () { + var url = $(this).data('loadcsv'); + Papa.parse(url, { + download: true, + complete: renderData($(this)) + }) + }); + + function getLabels(data) { + return data + .map((item) => item[1]); + } + + function getChartOptions(title, displayLabels) { + return { + responsive: false, + maintainAspectRatio: false, + legend: { display: true, position: 'bottom' }, + title: { + display: true, + text: title + }, + scales: { + xAxes: [{ + ticks: { + beginAtZero: true + } + }], + yAxes: [{ + ticks: { + display: displayLabels, //this will remove only the label + beginAtZero: true + } + }] + }, + plugins: { + datalabels: { + color: "#4A4A4A", + anchor: "end", + align: "end", + clamp: false, + offset: 0, + display: true, + font: { + size: 8, + family: 'Roboto' + } + } + } + } + } + + function getChartData(data) { + function getDataset(data, col, label, color) { + return { + label: label, + data: data.map(function (item) { + return item[col] + }), + backgroundColor: color, + borderColor: 'rgba(170,170,170,0)', + barThickness: 12 + } + } + return { + labels: getLabels(data), + datasets: [getDataset(data, 2, 'openvino', '#00C7FD'), getDataset(data, 3, 'TF', '#8F5DA2')] + }; + } + + function renderData(currentChart) { + return function (result) { + var data = result.data; + // remove col names + data.shift(0); + var chartName = data[1][0]; + var chartSlug = chartName.replace(')', '').replace(' (', '-'); + var graphContainer = $('
'); + var chartContainer = $('
'); + graphContainer.attr('id', 'ov-graph-container-' + chartSlug); + chartContainer.addClass('chart-container'); + chartContainer.addClass('container'); + var chartWrap = $('
'); + chartWrap.addClass('chart-wrap'); + chartWrap.addClass('container'); + chartContainer.append(chartWrap); + var chart = $('
'); + chart.addClass('chart'); + chart.addClass('col-md-12'); + var canvas = $(''); + chart.append(canvas); + var container = $('
'); + container.addClass('row'); + container.append(chart); + var context = canvas.get(0).getContext('2d'); + context.canvas.width = context.canvas.width * 2.5; + var chartTitle = chartName + ', Throughput (FPS) Precision: FP32 (Higher is better)'; + new Chart(context, { + type: 'horizontalBar', + data: getChartData(data), + options: getChartOptions(chartTitle, true) + }); + chartContainer.append(container); + currentChart.append(chartContainer); + } + } +}); diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html index f12ea6528e0..de7a83e8c19 100644 --- a/docs/_templates/layout.html +++ b/docs/_templates/layout.html @@ -12,7 +12,5 @@ - - {% endblock %} diff --git a/docs/api/ie_python_api/api.rst b/docs/api/ie_python_api/api.rst index 3528148dd87..a2f69af4790 100644 --- a/docs/api/ie_python_api/api.rst +++ b/docs/api/ie_python_api/api.rst @@ -67,12 +67,6 @@ OpenVINO Python API openvino.runtime.passes -.. autosummary:: - :toctree: _autosummary - :template: custom-module-template.rst - - openvino.runtime.utils - .. autosummary:: :toctree: _autosummary :template: custom-module-template.rst @@ -85,12 +79,6 @@ OpenVINO Python API openvino.frontend -.. autosummary:: - :toctree: _autosummary - :template: custom-module-template.rst - - openvino.offline_transformations - .. toctree:: :maxdepth: 2 :hidden: diff --git a/docs/benchmarks/performance_benchmarks_faq.md b/docs/benchmarks/performance_benchmarks_faq.md index 2010111bf3b..b628b12f116 100644 --- a/docs/benchmarks/performance_benchmarks_faq.md +++ b/docs/benchmarks/performance_benchmarks_faq.md @@ -66,6 +66,6 @@ The CAFFE version of resnet-50, mobilenet-v1-1.0-224 and mobilenet-v2 have been The web site format has changed in order to support the more common search approach of looking for the performance of a given neural network model on different HW-platforms. As opposed to review a given HW-platform's performance on different neural network models. #### 13. How is Latency measured? -Latency is measured by running the OpenVINO™ inference engine in synchronous mode. In synchronous mode each frame or image is processed through the entire set of stages (pre-processing, inference, post-processing) before the next frame or image is processed. This KPI is relevant for applications where the inference on a single image is required, for example the analysis of an ultra sound image in a medical application or the analysis of a seismic image in the oil & gas industry. Other use cases include real-time or near real-time applications like an industrial robot's response to changes in its environment and obstacle avoidance for autonomous vehicles where a quick response to the result of the inference is required. +Latency is measured by running the OpenVINO™ Runtime in synchronous mode. In synchronous mode each frame or image is processed through the entire set of stages (pre-processing, inference, post-processing) before the next frame or image is processed. This KPI is relevant for applications where the inference on a single image is required, for example the analysis of an ultra sound image in a medical application or the analysis of a seismic image in the oil & gas industry. Other use cases include real-time or near real-time applications like an industrial robot's response to changes in its environment and obstacle avoidance for autonomous vehicles where a quick response to the result of the inference is required. For more complete information about performance and benchmark results, visit: [www.intel.com/benchmarks](https://www.intel.com/benchmarks) and [Optimization Notice](https://software.intel.com/articles/optimization-notice). [Legal Information](../Legal_Information.md). diff --git a/docs/benchmarks/performance_benchmarks_openvino.md b/docs/benchmarks/performance_benchmarks_openvino.md index ca55baf8c2c..89b282ebf6a 100644 --- a/docs/benchmarks/performance_benchmarks_openvino.md +++ b/docs/benchmarks/performance_benchmarks_openvino.md @@ -14,7 +14,7 @@ This benchmark setup includes a single machine on which both the benchmark application and the OpenVINO™ installation reside. -The benchmark application loads the Inference Engine (SW) at run time and executes inferences on the specified hardware inference engine, (CPU, GPU or VPU). The benchmark application measures the time spent on actual inferencing (excluding any pre or post processing) and then reports on the inferences per second (or Frames Per Second). For more information on the benchmark application, please also refer to the entry 5 of the [FAQ section](performance_benchmarks_faq.md). +The benchmark application loads the OpenVINO Runtime (SW) at runtime and executes inferences on the specified hardware (CPU, GPU or VPU). The benchmark application measures the time spent on actual inferencing (excluding any pre or post processing) and then reports on the inferences per second (or Frames Per Second). For more information on the benchmark application, please also refer to the entry 5 of the [FAQ section](performance_benchmarks_faq.md). Devices similar to the ones we have used for benchmarking can be accessed using [Intel® DevCloud for the Edge](https://devcloud.intel.com/edge/), a remote development environment with access to Intel® hardware and the latest versions of the Intel® Distribution of the OpenVINO™ Toolkit. [Learn more](https://devcloud.intel.com/edge/get_started/devcloud/) or [Register here](https://inteliot.force.com/DevcloudForEdge/s/). diff --git a/docs/benchmarks/performance_benchmarks_ovms.md b/docs/benchmarks/performance_benchmarks_ovms.md index 57495835d86..d7393aa5047 100644 --- a/docs/benchmarks/performance_benchmarks_ovms.md +++ b/docs/benchmarks/performance_benchmarks_ovms.md @@ -1,6 +1,6 @@ # OpenVINO™ Model Server Benchmark Results {#openvino_docs_performance_benchmarks_ovms} -OpenVINO™ Model Server is an open-source, production-grade inference platform that exposes a set of models via a convenient inference API over gRPC or HTTP/REST. It employs the inference engine libraries for from the Intel® Distribution of OpenVINO™ toolkit to extend workloads across Intel® hardware including CPU, GPU and others. +OpenVINO™ Model Server is an open-source, production-grade inference platform that exposes a set of models via a convenient inference API over gRPC or HTTP/REST. It employs the OpenVINO Runtime libraries for from the Intel® Distribution of OpenVINO™ toolkit to extend workloads across Intel® hardware including CPU, GPU and others. ![OpenVINO™ Model Server](../img/performance_benchmarks_ovms_01.png) diff --git a/docs/conf.py b/docs/conf.py index 692c2302fda..be7051bcf34 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -63,7 +63,8 @@ templates_path = ['_templates'] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'openvino/inference-engine'] +exclude_patterns = ['_build', 'Thumbs.db', + '.DS_Store', 'openvino/inference-engine'] panels_add_bootstrap_css = False @@ -103,11 +104,23 @@ repositories = { 'github_version': 'master', 'host_url': 'https://github.com' }, + 'ote': { + 'github_user': 'openvinotoolkit', + 'github_repo': 'training_extensions', + 'github_version': 'develop', + 'host_url': 'https://github.com' + }, 'open_model_zoo': { 'github_user': 'openvinotoolkit', 'github_repo': 'open_model_zoo', 'github_version': 'master', 'host_url': 'https://github.com' + }, + 'ovms': { + 'github_user': 'openvinotoolkit', + 'github_repo': 'model_server', + 'github_version': 'main', + 'host_url': 'https://github.com' } } @@ -128,6 +141,7 @@ html_static_path = ['_static'] # monkeypatch sphinx api doc to prevent showing inheritance from object and enum.Enum add_line = ClassDocumenter.add_line + def add_line_no_base_object(self, line, *args, **kwargs): if line.strip() in ['Bases: :class:`object`', 'Bases: :class:`enum.Enum`']: return @@ -137,14 +151,30 @@ def add_line_no_base_object(self, line, *args, **kwargs): ClassDocumenter.add_line = add_line_no_base_object +# OpenVINO Python API Reference Configuration +exclude_pyapi_methods = ('__weakref__', + '__doc__', + '__module__', + '__dict__', + 'add_openvino_libs_to_path' + ) + + +def autodoc_skip_member(app, what, name, obj, skip, options): + return name in exclude_pyapi_methods + def setup(app): logger = logging.getLogger(__name__) - app.add_config_value('doxygen_mapping_file', doxygen_mapping_file, rebuild=True) + app.add_config_value('doxygen_mapping_file', + doxygen_mapping_file, rebuild=True) app.add_config_value('repositories', repositories, rebuild=True) + app.connect('autodoc-skip-member', autodoc_skip_member) app.add_js_file('js/custom.js') app.add_js_file('js/graphs.js') + app.add_js_file('js/graphs_ov_tf.js') try: - shutil.copytree(os.path.join(app.srcdir, 'csv'), os.path.join(app.outdir, 'csv'), dirs_exist_ok=True) + shutil.copytree(os.path.join(app.srcdir, 'csv'), os.path.join( + app.outdir, 'csv'), dirs_exist_ok=True) except FileNotFoundError: logger.warning('csv directory not found.') diff --git a/docs/documentation.md b/docs/documentation.md index 9a89656a6dd..e4a18481222 100644 --- a/docs/documentation.md +++ b/docs/documentation.md @@ -8,7 +8,6 @@ :hidden: openvino_docs_MO_DG_Deep_Learning_Model_Optimizer_DevGuide - openvino_docs_HOWTO_Custom_Layers_Guide omz_tools_downloader @@ -27,11 +26,12 @@ :caption: Tuning for Performance :hidden: - openvino_docs_performance_benchmarks openvino_docs_optimization_guide_dldt_optimization_guide openvino_docs_MO_DG_Getting_Performance_Numbers - pot_README + openvino_docs_model_optimization_guide + openvino_docs_deployment_optimization_guide_dldt_optimization_guide openvino_docs_tuning_utilities + openvino_docs_performance_benchmarks .. toctree:: @@ -53,8 +53,6 @@ Intel® Deep Learning Streamer openvino_docs_gapi_gapi_intro - OpenVX Developer Guide - OpenVX API Reference OpenCV* Developer Guide OpenCL™ Developer Guide @@ -63,14 +61,17 @@ :caption: Add-Ons :hidden: - openvino_docs_ovms + ovms_what_is_openvino_model_server + ote_documentation ovsa_get_started .. toctree:: :maxdepth: 1 - :caption: Developing Inference Engine Plugins + :caption: OpenVINO Extensibility :hidden: + openvino_docs_Extensibility_UG_Intro + openvino_docs_transformations Inference Engine Plugin Developer Guide groupie_dev_api Plugin Transformation Pipeline @@ -93,7 +94,7 @@ This section provides reference documents that guide you through developing your With the [Model Downloader](@ref omz_tools_downloader) and [Model Optimizer](MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) guides, you will learn to download pre-trained models and convert them for use with the OpenVINO™ toolkit. You can provide your own model or choose a public or Intel model from a broad selection provided in the [Open Model Zoo](model_zoo.md). ## Deploying Inference -The [OpenVINO™ Runtime User Guide](OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md) explains the process of creating your own application that runs inference with the OpenVINO™ toolkit. The [API Reference](./api_references.html) defines the Inference Engine API for Python, C++, and C and the nGraph API for Python and C++. The Inference Engine API is what you'll use to create an OpenVINO™ application, while the nGraph API is available for using enhanced operations sets and other features. After writing your application, you can use the [Deployment Manager](install_guides/deployment-manager-tool.md) for deploying to target devices. +The [OpenVINO™ Runtime User Guide](OV_Runtime_UG/openvino_intro.md) explains the process of creating your own application that runs inference with the OpenVINO™ toolkit. The [API Reference](./api_references.html) defines the OpenVINO Runtime API for Python, C++, and C. The OpenVINO Runtime API is what you'll use to create an OpenVINO™ inference application, use enhanced operations sets and other features. After writing your application, you can use the [Deployment Manager](install_guides/deployment-manager-tool.md) for deploying to target devices. ## Tuning for Performance The toolkit provides a [Performance Optimization Guide](optimization_guide/dldt_optimization_guide.md) and utilities for squeezing the best performance out of your application, including [Accuracy Checker](@ref omz_tools_accuracy_checker), [Post-Training Optimization Tool](@ref pot_README), and other tools for measuring accuracy, benchmarking performance, and tuning your application. @@ -108,5 +109,5 @@ The OpenVINO™ toolkit also works with the following media processing framework * [Intel® Deep Learning Streamer (Intel® DL Streamer)](@ref openvino_docs_dlstreamer) — A streaming media analytics framework based on GStreamer, for creating complex media analytics pipelines optimized for Intel hardware platforms. Go to the Intel® DL Streamer [documentation](https://dlstreamer.github.io/) website to learn more. * [Intel® oneAPI Video Processing Library (oneVPL)](https://www.intel.com/content/www/us/en/develop/documentation/oneapi-programming-guide/top/api-based-programming/intel-oneapi-video-processing-library-onevpl.html) — A programming interface for video decoding, encoding, and processing to build portable media pipelines on CPUs, GPUs, and other accelerators. -You can also add computer vision capabilities to your application using optimized versions of [OpenCV](https://opencv.org/) and [OpenVX](https://khronos.org/openvx). +You can also add computer vision capabilities to your application using optimized versions of [OpenCV](https://opencv.org/). diff --git a/docs/doxygen-xfail.txt b/docs/doxygen-xfail.txt index 80435be0e28..ad0eb3d7784 100644 --- a/docs/doxygen-xfail.txt +++ b/docs/doxygen-xfail.txt @@ -60,4 +60,13 @@ openvino_docs_ie_dg_lpt_variadicsplittransformation.rst openvino_docs_ie_plugin_dg_lp_representation.rst openvino_docs_ie_dg_lpt.rst notebooks/notebook_utils-with-output.rst +ovms_extras_nginx-mtls-auth-readme.rst +ovms_client_python_lib_readme.rst +ovms_docs_shape_batch_layout.rst api/api_reference.rst +workbench/docs/workbench_dg/key_concepts.md +workbench/docs/workbench_dg/run_single_inference.md +omz_tools_downloader.rst +warning +attribute +omz_python_model_api.rst \ No newline at end of file diff --git a/docs/doxyrest/frame/common/item.lua b/docs/doxyrest/frame/common/item.lua index 18595ff9d78..8f479c37b1c 100644 --- a/docs/doxyrest/frame/common/item.lua +++ b/docs/doxyrest/frame/common/item.lua @@ -98,16 +98,16 @@ function getItemRefTargetString(item) end local s = - ".. index:: pair: " .. item.memberKind .. "; " .. item.name .. "\n" .. - ".. _doxid-" .. item.id .. ":\n" + ".. _doxid-" .. item.id .. ":\n" .. + ".. index:: pair: " .. item.memberKind .. "; " .. item.name .. "\n" if item.isSubGroupHead then for j = 1, #item.subGroupSlaveArray do slaveItem = item.subGroupSlaveArray[j] s = s .. - ".. index:: pair: " .. slaveItem.memberKind .. "; " .. slaveItem.name .. "\n" .. - ".. _doxid-" .. slaveItem.id .. ":\n" + ".. _doxid-" .. slaveItem.id .. ":\n" .. + ".. index:: pair: " .. slaveItem.memberKind .. "; " .. slaveItem.name .. "\n" end end diff --git a/docs/gapi/face_beautification.md b/docs/gapi/face_beautification.md index 25619ae8e0b..7026d9b98a0 100644 --- a/docs/gapi/face_beautification.md +++ b/docs/gapi/face_beautification.md @@ -338,7 +338,7 @@ The steps to get the masks are: Once the graph is fully expressed, we can finally compile it and run on real data. G-API graph compilation is the stage where the G-API framework actually understands which kernels and networks to use. This configuration happens via G-API compilation arguments. ### DNN Parameters -This sample is using OpenVINO™ Toolkit Inference Engine backend for DL inference, which is configured the following way: +This sample is using OpenVINO™ Toolkit OpenVINO Runtime backend for DL inference, which is configured the following way: ```cpp auto faceParams = cv::gapi::ie::Params { diff --git a/docs/gapi/gapi_face_analytics_pipeline.md b/docs/gapi/gapi_face_analytics_pipeline.md index 6b544485668..be07aaae573 100644 --- a/docs/gapi/gapi_face_analytics_pipeline.md +++ b/docs/gapi/gapi_face_analytics_pipeline.md @@ -41,7 +41,7 @@ Constructing a G-API graph for a video streaming case does not differ much from ### Declare Deep Learning topologies {#gapi_ifd_declaring_nets} -In contrast with traditional CV functions (see [core](https://docs.opencv.org/4.5.0/df/d1f/group__gapi__core.html) and [imgproc](https://docs.opencv.org/4.5.0/d2/d00/group__gapi__imgproc.html)) where G-API declares distinct operations for every function, inference in G-API is a single generic operation `cv::gapi::infer<>`. As usual, it is just an interface and it can be implemented in a number of ways under the hood. In OpenCV 4.2, only OpenVINO™ Inference Engine-based backend is available, and OpenCV's own DNN module-based backend is to come. +In contrast with traditional CV functions (see [core](https://docs.opencv.org/4.5.0/df/d1f/group__gapi__core.html) and [imgproc](https://docs.opencv.org/4.5.0/d2/d00/group__gapi__imgproc.html)) where G-API declares distinct operations for every function, inference in G-API is a single generic operation `cv::gapi::infer<>`. As usual, it is just an interface and it can be implemented in a number of ways under the hood. In OpenCV 4.2, only OpenVINO™ Runtime-based backend is available, and OpenCV's own DNN module-based backend is to come. `cv::gapi::infer<>` is _parametrized_ by the details of a topology we are going to execute. Like operations, topologies in G-API are strongly typed and are defined with a special macro `G_API_NET()`: @@ -126,7 +126,7 @@ G-API strictly separates construction from configuration -- with the idea to kee Platform-specific details arise when the pipeline is *compiled* -- i.e. is turned from a declarative to an executable form. The way *how* to run stuff is specified via compilation arguments, and new inference/streaming features are no exception from this rule. -G-API is built on backends which implement interfaces (see [Architecture](https://docs.opencv.org/4.5.0/de/d4d/gapi_hld.html) and [Kernels](kernel_api.md) for details) -- thus `cv::gapi::infer<>` is a function which can be implemented by different backends. In OpenCV 4.2, only OpenVINO™ Inference Engine backend for inference is available. Every inference backend in G-API has to provide a special parameterizable structure to express *backend-specific* neural network parameters -- and in this case, it is `cv::gapi::ie::Params`: +G-API is built on backends which implement interfaces (see [Architecture](https://docs.opencv.org/4.5.0/de/d4d/gapi_hld.html) and [Kernels](kernel_api.md) for details) -- thus `cv::gapi::infer<>` is a function which can be implemented by different backends. In OpenCV 4.2, only OpenVINO™ Runtime backend for inference is available. Every inference backend in G-API has to provide a special parameterizable structure to express *backend-specific* neural network parameters -- and in this case, it is `cv::gapi::ie::Params`: ```cpp auto det_net = cv::gapi::ie::Params { @@ -148,7 +148,7 @@ auto emo_net = cv::gapi::ie::Params { Here we define three parameter objects: `det_net`, `age_net`, and `emo_net`. Every object is a `cv::gapi::ie::Params` structure parametrization for each particular network we use. On a compilation stage, G-API automatically matches network parameters with their `cv::gapi::infer<>` calls in graph using this information. -Regardless of the topology, every parameter structure is constructed with three string arguments – specific to the OpenVINO™ Inference Engine: +Regardless of the topology, every parameter structure is constructed with three string arguments – specific to the OpenVINO™ Runtime: * Path to the topology's intermediate representation (.xml file); * Path to the topology's model weights (.bin file); diff --git a/docs/get_started.md b/docs/get_started.md index f576ab82b02..aa86fd8e9e8 100644 --- a/docs/get_started.md +++ b/docs/get_started.md @@ -22,13 +22,7 @@ Configurations for GPU Configurations for NCS2 Configurations for VPU - -.. toctree:: - :maxdepth: 1 - :hidden: - :caption: Troubleshooting - - Troubleshooting Guide + Configurations for GNA .. toctree:: :maxdepth: 1 @@ -41,11 +35,17 @@ .. toctree:: :maxdepth: 1 :hidden: - :caption: Inference Engine Code Samples + :caption: OpenVINO Code Samples openvino_docs_IE_DG_Samples_Overview - +.. toctree:: + :maxdepth: 1 + :hidden: + :caption: Troubleshooting + + Installation & Configuration Issues + @endsphinxdirective @sphinxdirective @@ -62,7 +62,7 @@

-

If you are using Intel® Processor Graphics, Intel® Vision Accelerator Design with Intel® Movidius™ VPUs or Intel® Neural Compute Stick 2, please check the additional configurations for them accordingly: Configurations for GPU, Configurations for VPU or Configurations for NCS2. +

If you are using Intel® Processor Graphics, Intel® Vision Accelerator Design with Intel® Movidius™ VPUs, Intel® Neural Compute Stick 2 or Intel® Gaussian & Neural Accelerator (GNA), please check the additional configurations for them accordingly: Configurations for GPU, Configurations for VPU, Configurations for NCS2 or Configurations for GNA.

With OpenVINO installed, you are ready to run your first inference and learn the workflow.
Here is a set of hands-on demonstrations of various complexity levels to guide you through the process: from performing sample inference with just one command, to running code samples, demo application or Jupyter notebooks. If you prefer working with GUI, you can also get started with the DL Workbench application. This way you can choose the right level for you.

@@ -70,10 +70,6 @@

Choose how you want to progress:

- -

One-command demo

-

Execute just one command and watch all the steps happening before your eyes.

-

Step-by-step demo

Follow the step-by-step instructions to execute simple tasks with OpenVINO.

@@ -87,7 +83,7 @@

Use a web-based version of OpenVINO with a Graphical User Interface. Installing a DL Workbench container is required.

-

Inference Engine samples

+

OpenVINO samples

See ready-made applications explaining OpenVINO features and various use-cases.

diff --git a/docs/get_started/get_started_demos.md b/docs/get_started/get_started_demos.md index 25eb5786d5e..9a6a34fa256 100644 --- a/docs/get_started/get_started_demos.md +++ b/docs/get_started/get_started_demos.md @@ -10,9 +10,8 @@ You will perform the following steps: 3. Download media files to run inference on. 4. Run inference on the sample and see the results: - Image Classification Code Sample - - Security Barrier Camera Demo application -If you installed OpenVINO™ via `pip` you need to change commands listed below. Details are listed in one of [tutorials](https://github.com/openvinotoolkit/openvino_notebooks/blob/main/notebooks/002-openvino-api/002-openvino-api.ipynb). +If you installed OpenVINO™ via `pip` you can quickly getting started with the product by using these [tutorials](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks). This guide assumes you completed all installation and configuration steps. If you have not yet installed and configured the toolkit: @@ -31,23 +30,32 @@ This guide assumes you completed all installation and configuration steps. If yo @endsphinxdirective +## Install OpenVINO Development Tools + +To install OpenVINO Development Tools for working with Caffe* models, use the following command: + +``` sh + pip install openvino-dev[caffe] +``` + ## Build Samples and Demos If you have already built the demos and samples, you can skip this section. The build will take about 5-10 minutes, depending on your system. To build OpenVINO samples: + @sphinxdirective .. tab:: Linux - Go to the :doc:`Inference Engine Samples page ` and see the "Build the Sample Applications on Linux*" section. + Go to the :doc:`OpenVINO Samples page ` and see the "Build the Sample Applications on Linux*" section. .. tab:: Windows - Go to the :doc:`Inference Engine Samples page ` and see the "Build the Sample Applications on Microsoft Windows* OS" section. + Go to the :doc:`OpenVINO Samples page ` and see the "Build the Sample Applications on Microsoft Windows* OS" section. .. tab:: macOS - Go to the :doc:`Inference Engine Samples page ` and see the "Build the Sample Applications on macOS*" section. + Go to the :doc:`OpenVINO Samples page ` and see the "Build the Sample Applications on macOS*" section. @endsphinxdirective @@ -83,102 +91,65 @@ Options to find a model suitable for the OpenVINO™ toolkit: This guide uses the OpenVINO™ Model Downloader to get pre-trained models. You can use one of the following commands to find a model: -### List the models available in the downloader - -@sphinxdirective -.. tab:: Linux - - .. code-block:: sh - - cd /opt/intel/openvino_2021/deployment_tools/tools/model_downloader/ - python3 info_dumper.py --print_all - -.. tab:: Windows - - .. code-block:: bat - - cd \deployment_tools\tools\model_downloader\ - python info_dumper.py --print_all - -.. tab:: macOS - - .. code-block:: sh - - cd /opt/intel/openvino_2021/deployment_tools/tools/model_downloader/ - python3 info_dumper.py --print_all - -@endsphinxdirective - -### Use `grep` to list models that have a specific name pattern +* List the models available in the downloader ``` sh - python3 info_dumper.py --print_all | grep + info_dumper --print_all ``` -Use the Model Downloader to download the models to a models directory. This guide uses `` and `` as placeholders for the models directory and model name: -@sphinxdirective -.. tab:: Linux +* Use `grep` to list models that have a specific name pattern - Don't run downloader with `sudo`. It will further lead to complications - .. code-block:: sh +``` sh + info_dumper --print_all | grep +``` - python3 downloader.py --name --output_dir +* Use Model Downloader to download models. -.. tab:: Windows + This guide uses `` and `` as placeholders for the models directory and model name: - .. code-block:: bat +``` sh + omz_downloader --name --output_dir +``` - python downloader.py --name --output_dir - -.. tab:: macOS - - Don't run downloader with `sudo`. It will further lead to complications - .. code-block:: sh - - python3 downloader.py --name --output_dir - -@endsphinxdirective - -Download the following models to run the Image Classification Sample and Security Barrier Camera Demo applications: +* Download the following models to run the Image Classification Sample: |Model Name | Code Sample or Demo App | |-----------------------------------------------|------------------------------------------| -|`squeezenet1.1` | Image Classification Sample | -|`vehicle-license-plate-detection-barrier-0106` | Security Barrier Camera Demo | -|`vehicle-attributes-recognition-barrier-0039` | Security Barrier Camera Demo | -|`license-plate-recognition-barrier-0001` | Security Barrier Camera Demo | +|`googlenet-v1` | Image Classification Sample | @sphinxdirective .. raw:: html -
+
@endsphinxdirective -To download the SqueezeNet 1.1 Caffe* model to the `models` folder: +To download the GoogleNet v1 Caffe* model to the `models` folder: @sphinxdirective + .. tab:: Linux .. code-block:: sh - python3 downloader.py --name squeezenet1.1 --output_dir ~/models + omz_downloader --name googlenet-v1 --output_dir ~/models .. tab:: Windows .. code-block:: bat - python downloader.py --name squeezenet1.1 --output_dir C:\Users\\Documents\models + omz_downloader --name googlenet-v1 --output_dir %USERPROFILE%\Documents\models .. tab:: macOS .. code-block:: sh - python3 downloader.py --name squeezenet1.1 --output_dir ~/models + omz_downloader --name googlenet-v1 --output_dir ~/models @endsphinxdirective Your screen looks similar to this after the download and shows the paths of downloaded files: + @sphinxdirective .. tab:: Linux @@ -186,14 +157,14 @@ Your screen looks similar to this after the download and shows the paths of down ###############|| Downloading models ||############### - ========= Downloading /home/username/models/public/squeezenet1.1/squeezenet1.1.prototxt + ========= Downloading /home/username/models/public/googlenet-v1/googlenet-v1.prototxt - ========= Downloading /home/username/models/public/squeezenet1.1/squeezenet1.1.caffemodel + ========= Downloading /home/username/models/public/googlenet-v1/googlenet-v1.caffemodel ... 100%, 4834 KB, 3157 KB/s, 1 seconds passed ###############|| Post processing ||############### - ========= Replacing text in /home/username/models/public/squeezenet1.1/squeezenet1.1.prototxt ========= + ========= Replacing text in /home/username/models/public/googlenet-v1/googlenet-v1.prototxt ========= .. tab:: Windows @@ -201,15 +172,15 @@ Your screen looks similar to this after the download and shows the paths of down ################|| Downloading models ||################ - ========== Downloading C:\Users\username\Documents\models\public\squeezenet1.1\squeezenet1.1.prototxt + ========== Downloading C:\Users\username\Documents\models\public\googlenet-v1\googlenet-v1.prototxt ... 100%, 9 KB, ? KB/s, 0 seconds passed - ========== Downloading C:\Users\username\Documents\models\public\squeezenet1.1\squeezenet1.1.caffemodel + ========== Downloading C:\Users\username\Documents\models\public\googlenet-v1\googlenet-v1.caffemodel ... 100%, 4834 KB, 571 KB/s, 8 seconds passed ################|| Post-processing ||################ - ========== Replacing text in C:\Users\username\Documents\models\public\squeezenet1.1\squeezenet1.1.prototxt + ========== Replacing text in C:\Users\username\Documents\models\public\googlenet-v1\googlenet-v1.prototxt .. tab:: macOS @@ -217,135 +188,15 @@ Your screen looks similar to this after the download and shows the paths of down ###############|| Downloading models ||############### - ========= Downloading /Users/username/models/public/squeezenet1.1/squeezenet1.1.prototxt + ========= Downloading /Users/username/models/public/googlenet-v1/googlenet-v1.prototxt ... 100%, 9 KB, 44058 KB/s, 0 seconds passed - ========= Downloading /Users/username/models/public/squeezenet1.1/squeezenet1.1.caffemodel + ========= Downloading /Users/username/models/public/googlenet-v1/googlenet-v1.caffemodel ... 100%, 4834 KB, 4877 KB/s, 0 seconds passed ###############|| Post processing ||############### - ========= Replacing text in /Users/username/models/public/squeezenet1.1/squeezenet1.1.prototxt ========= - -@endsphinxdirective - -@sphinxdirective -.. raw:: html - -
- -@endsphinxdirective - -@sphinxdirective -.. raw:: html - -
- -@endsphinxdirective - -To download all three pre-trained models in FP16 precision to the `models` folder in your home folder: - -@sphinxdirective -.. tab:: Linux - - .. code-block:: sh - - python3 downloader.py --name vehicle-license-plate-detection-barrier-0106,vehicle-attributes-recognition-barrier-0039,license-plate-recognition-barrier-0001 --output_dir ~/models --precisions FP16 - -.. tab:: Windows - - .. code-block:: bat - - python downloader.py --name vehicle-license-plate-detection-barrier-0106,vehicle-attributes-recognition-barrier-0039,license-plate-recognition-barrier-0001 --output_dir C:\Users\\Documents\models --precisions FP16 - -.. tab:: macOS - - .. code-block:: sh - - python3 downloader.py --name vehicle-license-plate-detection-barrier-0106,vehicle-attributes-recognition-barrier-0039,license-plate-recognition-barrier-0001 --output_dir ~/models --precisions FP16 - -@endsphinxdirective - -Your screen looks similar to this after the download: - -@sphinxdirective -.. tab:: Linux - - .. code-block:: sh - - ################|| Downloading models ||################ - - ========== Downloading /home/username/models/intel/vehicle-license-plate-detection-barrier-0106/FP16/vehicle-license-plate-detection-barrier-0106.xml - ... 100%, 204 KB, 183949 KB/s, 0 seconds passed - - ========== Downloading /home/username/models/intel/vehicle-license-plate-detection-barrier-0106/FP16/vehicle-license-plate-detection-barrier-0106.bin - ... 100%, 1256 KB, 3948 KB/s, 0 seconds passed - - ========== Downloading /home/username/models/intel/vehicle-attributes-recognition-barrier-0039/FP16/vehicle-attributes-recognition-barrier-0039.xml - ... 100%, 32 KB, 133398 KB/s, 0 seconds passed - - ========== Downloading /home/username/models/intel/vehicle-attributes-recognition-barrier-0039/FP16/vehicle-attributes-recognition-barrier-0039.bin - ... 100%, 1222 KB, 3167 KB/s, 0 seconds passed - - ========== Downloading /home/username/models/intel/license-plate-recognition-barrier-0001/FP16/license-plate-recognition-barrier-0001.xml - ... 100%, 47 KB, 85357 KB/s, 0 seconds passed - - ========== Downloading /home/username/models/intel/license-plate-recognition-barrier-0001/FP16/license-plate-recognition-barrier-0001.bin - ... 100%, 2378 KB, 5333 KB/s, 0 seconds passed - - ################|| Post-processing ||################ - -.. tab:: Windows - - .. code-block:: bat - - ################|| Downloading models ||################ - - ========== Downloading C:\Users\username\Documents\models\intel\vehicle-license-plate-detection-barrier-0106\FP16\vehicle-license-plate-detection-barrier-0106.xml - ... 100%, 207 KB, 13810 KB/s, 0 seconds passed - - ========== Downloading C:\Users\username\Documents\models\intel\vehicle-license-plate-detection-barrier-0106\FP16\vehicle-license-plate-detection-barrier-0106.bin - ... 100%, 1256 KB, 70 KB/s, 17 seconds passed - - ========== Downloading C:\Users\username\Documents\models\intel\vehicle-attributes-recognition-barrier-0039\FP16\vehicle-attributes-recognition-barrier-0039.xml - ... 100%, 32 KB, ? KB/s, 0 seconds passed - - ========== Downloading C:\Users\username\Documents\models\intel\vehicle-attributes-recognition-barrier-0039\FP16\vehicle-attributes-recognition-barrier-0039.bin - ... 100%, 1222 KB, 277 KB/s, 4 seconds passed - - ========== Downloading C:\Users\username\Documents\models\intel\license-plate-recognition-barrier-0001\FP16\license-plate-recognition-barrier-0001.xml - ... 100%, 47 KB, ? KB/s, 0 seconds passed - - ========== Downloading C:\Users\username\Documents\models\intel\license-plate-recognition-barrier-0001\FP16\license-plate-recognition-barrier-0001.bin - ... 100%, 2378 KB, 120 KB/s, 19 seconds passed - - ################|| Post-processing ||################ - -.. tab:: macOS - - .. code-block:: sh - - ################|| Downloading models ||################ - - ========== Downloading /Users/username/models/intel/vehicle-license-plate-detection-barrier-0106/FP16/vehicle-license-plate-detection-barrier-0106.xml - ... 100%, 207 KB, 313926 KB/s, 0 seconds passed - - ========== Downloading /Users/username/models/intel/vehicle-license-plate-detection-barrier-0106/FP16/vehicle-license-plate-detection-barrier-0106.bin - ... 100%, 1256 KB, 2552 KB/s, 0 seconds passed - - ========== Downloading /Users/username/models/intel/vehicle-attributes-recognition-barrier-0039/FP16/vehicle-attributes-recognition-barrier-0039.xml - ... 100%, 32 KB, 172042 KB/s, 0 seconds passed - - ========== Downloading /Users/username/models/intel/vehicle-attributes-recognition-barrier-0039/FP16/vehicle-attributes-recognition-barrier-0039.bin - ... 100%, 1222 KB, 2712 KB/s, 0 seconds passed - - ========== Downloading /Users/username/models/intel/license-plate-recognition-barrier-0001/FP16/license-plate-recognition-barrier-0001.xml - ... 100%, 47 KB, 217130 KB/s, 0 seconds passed - - ========== Downloading /Users/username/models/intel/license-plate-recognition-barrier-0001/FP16/license-plate-recognition-barrier-0001.bin - ... 100%, 2378 KB, 4222 KB/s, 0 seconds passed - - ################|| Post-processing ||################ + ========= Replacing text in /Users/username/models/public/googlenet-v1/googlenet-v1.prototxt ========= @endsphinxdirective @@ -358,17 +209,17 @@ Your screen looks similar to this after the download: ## Step 2: Convert the Model with Model Optimizer -In this step, your trained models are ready to run through the Model Optimizer to convert them to the IR (Intermediate Representation) format. For most model types, this is required before using the Inference Engine with the model. +In this step, your trained models are ready to run through the Model Optimizer to convert them to the IR (Intermediate Representation) format. For most model types, this is required before using the OpenVINO Runtime with the model. -Models in the IR format always include an `.xml` and `.bin` file and may also include other files such as `.json` or `.mapping`. Make sure you have these files together in a single directory so the Inference Engine can find them. +Models in the IR format always include an `.xml` and `.bin` file and may also include other files such as `.json` or `.mapping`. Make sure you have these files together in a single directory so the OpenVINO Runtime can find them. REQUIRED: `model_name.xml` REQUIRED: `model_name.bin` OPTIONAL: `model_name.json`, `model_name.mapping`, etc. -This tutorial uses the public SqueezeNet 1.1 Caffe* model to run the Image Classification Sample. See the example in the Download Models section of this page to learn how to download this model. +This tutorial uses the public GoogleNet v1 Caffe* model to run the Image Classification Sample. See the example in the Download Models section of this page to learn how to download this model. -The SqueezeNet1.1 model is downloaded in the Caffe* format. You must use the Model Optimizer to convert the model to IR. The `vehicle-license-plate-detection-barrier-0106`, `vehicle-attributes-recognition-barrier-0039`, and `license-plate-recognition-barrier-0001` models are downloaded in IR format. You don't need to use the Model Optimizer on them because they are Intel models that have previously been converted. Public models will need converting with Model Optimizer. +The googlenet-v1 model is downloaded in the Caffe* format. You must use the Model Optimizer to convert the model to IR. Create an `` directory to contain the model's Intermediate Representation (IR). @@ -383,7 +234,7 @@ Create an `` directory to contain the model's Intermediate Representatio .. code-block:: bat - mkdir C:\Users\\Documents\ir + mkdir %USERPROFILE%\Documents\ir .. tab:: macOS @@ -393,33 +244,13 @@ Create an `` directory to contain the model's Intermediate Representatio @endsphinxdirective -The Inference Engine can perform inference on different precision formats, such as FP32, FP16, or INT8. To generate an IR with a specific precision, run the Model Optimizer with the appropriate `--data_type` option. +The OpenVINO Runtime can perform inference on different precision formats, such as FP32, FP16, or INT8. To generate an IR with a specific precision, run the Model Optimizer with the appropriate `--data_type` option. Generic Model Optimizer script: -@sphinxdirective -.. tab:: Linux - - .. code-block:: sh - - cd /opt/intel/openvino/deployment_tools/model_optimizer - python3 mo.py --input_model / --data_type --output_dir - -.. tab:: Windows - - .. code-block:: bat - - cd \deployment_tools\model_optimizer - python mo.py --input_model \ --data_type --output_dir - -.. tab:: macOS - - .. code-block:: sh - - cd /opt/intel/openvino/deployment_tools/model_optimizer - python3 ./mo.py --input_model / --data_type --output_dir - -@endsphinxdirective +``` sh + mo --input_model / --data_type --output_dir +``` IR files produced by the script are written to the directory. @@ -430,22 +261,19 @@ The command with most placeholders filled in and FP16 precision: .. code-block:: sh - cd /opt/intel/openvino/deployment_tools/model_optimizer - python3 mo.py --input_model ~/models/public/squeezenet1.1/squeezenet1.1.caffemodel --data_type FP16 --output_dir ~/ir + mo --input_model ~/models/public/googlenet-v1/googlenet-v1.caffemodel --data_type FP16 --output_dir ~/ir .. tab:: Windows .. code-block:: bat - cd \deployment_tools\model_optimizer - python mo.py --input_model C:\Users\\Documents\models\public\squeezenet1.1\squeezenet1.1.caffemodel --data_type FP16 --output_dir C:\Users\\Documents\ir + mo --input_model %USERPROFILE%\Documents\models\public\googlenet-v1\googlenet-v1.caffemodel --data_type FP16 --output_dir %USERPROFILE%\Documents\ir .. tab:: macOS .. code-block:: sh - cd /opt/intel/openvino/deployment_tools/model_optimizer - python3 mo.py --input_model ~/models/public/squeezenet1.1/squeezenet1.1.caffemodel --data_type FP16 --output_dir ~/ir + mo --input_model ~/models/public/googlenet-v1/googlenet-v1.caffemodel --data_type FP16 --output_dir ~/ir @endsphinxdirective @@ -457,32 +285,13 @@ Many sources are available from which you can download video media to use the co - [Google Images](https://images.google.com) As an alternative, the Intel® Distribution of OpenVINO™ toolkit includes several sample images and videos that you can use for running code samples and demo applications: -@sphinxdirective -.. tab:: Linux - - ``/opt/intel/openvino_2021/deployment_tools/demo/car.png`` - - ``/opt/intel/openvino_2021/deployment_tools/demo/car_1.bmp`` - - `Sample images and video `_ - - `Sample videos `_ - -.. tab:: Windows - - - ``\deployment_tools\demo\car.png`` - - ``\deployment_tools\demo\car_1.bmp`` - - `Sample images and video `_ - - `Sample videos `_ - -.. tab:: macOS - - - ``/opt/intel/openvino_2021/deployment_tools/demo/car.png`` - - ``/opt/intel/openvino_2021/deployment_tools/demo/car_1.bmp`` - - `Sample images and video `_ - - `Sample videos `_ - -@endsphinxdirective + - [Sample images and video](https://storage.openvinotoolkit.org/data/test_data/) + - [Sample videos](https://github.com/intel-iot-devkit/sample-videos) ## Step 4: Run Inference on the Sample + ### Run the Image Classification Code Sample To run the **Image Classification** code sample with an input image using the IR model: @@ -493,19 +302,19 @@ To run the **Image Classification** code sample with an input image using the IR .. code-block:: sh - source /opt/intel/openvino/bin/setupvars.sh + source /setupvars.sh .. tab:: Windows .. code-block:: bat - \openvino\bin\setupvars.bat + \setupvars.bat .. tab:: macOS .. code-block:: sh - source /opt/intel/openvino/bin/setupvars.sh + source /setupvars.sh @endsphinxdirective @@ -521,7 +330,7 @@ To run the **Image Classification** code sample with an input image using the IR .. code-block:: bat - cd C:\Users\\Documents\Intel\OpenVINO\inference_engine_samples_build\intel64\Release + cd %USERPROFILE%\Documents\Intel\OpenVINO\inference_engine_samples_build\intel64\Release .. tab:: macOS @@ -561,7 +370,7 @@ To run the **Image Classification** code sample with an input image using the IR @endsphinxdirective -The following commands run the Image Classification Code Sample using the `car.png` file from the `demo` directory as an input image, the model in IR format from the `ir` directory, and on different hardware devices: +The following commands run the Image Classification Code Sample using the [`dog.bmp`](https://storage.openvinotoolkit.org/data/test_data/images/224x224/dog.bmp) file as an input image, the model in IR format from the `ir` directory, and on different hardware devices: **CPU:** @sphinxdirective @@ -569,292 +378,86 @@ The following commands run the Image Classification Code Sample using the `car.p .. code-block:: sh - ./classification_sample_async -i /opt/intel/openvino/deployment_tools/demo/car.png -m ~/models/public/squeezenet1.1/ir/squeezenet1.1.xml -d CPU + ./classification_sample_async -i ~/Downloads/dog.bmp -m ~/ir/googlenet-v1.xml -d CPU .. tab:: Windows .. code-block:: bat - .\classification_sample_async.exe -i \deployment_tools\demo\car.png -m C:\Users\\Documents\models\public\squeezenet1.1\ir\squeezenet1.1.xml -d CPU + .\classification_sample_async.exe -i %USERPROFILE%\Downloads\dog.bmp -m %USERPROFILE%\Documents\ir\googlenet-v1.xml -d CPU .. tab:: macOS .. code-block:: sh - ./classification_sample_async -i /opt/intel/openvino/deployment_tools/demo/car.png -m ~/models/public/squeezenet1.1/ir/squeezenet1.1.xml -d CPU + ./classification_sample_async -i ~/Downloads/dog.bmp -m ~/ir/googlenet-v1.xml -d CPU @endsphinxdirective **GPU:** - > **NOTE**: Running inference on Intel® Processor Graphics (GPU) requires - [additional hardware configuration steps](https://docs.openvino.ai/latest/_docs_install_guides_installing_openvino_linux.html#additional-GPU-steps), as described earlier on this page. Running on GPU is not compatible with macOS*. + > **NOTE**: Running inference on Intel® Processor Graphics (GPU) requires [additional hardware configuration steps](../install_guides/configurations-for-intel-gpu.md), as described earlier on this page. Running on GPU is not compatible with macOS*. @sphinxdirective .. tab:: Linux .. code-block:: sh - ./classification_sample -i /opt/intel/openvino/deployment_tools/demo/car.png -m ~/models/public/squeezenet1.1/ir/squeezenet1.1.xml -d GPU + ./classification_sample_async -i ~/Downloads/dog.bmp -m ~/ir/googlenet-v1.xml -d GPU .. tab:: Windows .. code-block:: bat - .\classification_sample_async.exe -i \deployment_tools\demo\car.png -m C:\Users\\Documents\models\public\squeezenet1.1\ir\squeezenet1.1.xml -d GPU + .\classification_sample_async.exe -i %USERPROFILE%\Downloads\dog.bmp -m %USERPROFILE%\Documents\ir\googlenet-v1.xml -d GPU @endsphinxdirective **MYRIAD:** - > **NOTE**: Running inference on VPU devices (Intel® Movidius™ Neural Compute - Stick or Intel® Neural Compute Stick 2) with the MYRIAD plugin requires - [additional hardware configuration steps](inference-engine/README.md#optional-additional-installation-steps-for-the-intel-movidius-neural-compute-stick-and-neural-compute-stick-2), as described earlier on this page. + > **NOTE**: Running inference on VPU devices (Intel® Movidius™ Neural Compute Stick or Intel® Neural Compute Stick 2) with the MYRIAD plugin requires [additional hardware configuration steps](../install_guides/configurations-for-ncs2.md), as described earlier on this page. @sphinxdirective .. tab:: Linux .. code-block:: sh - ./classification_sample -i /opt/intel/openvino/deployment_tools/demo/car.png -m ~/models/public/squeezenet1.1/ir/squeezenet1.1.xml -d MYRIAD + ./classification_sample_async -i ~/Downloads/dog.bmp -m ~/ir/googlenet-v1.xml -d MYRIAD .. tab:: Windows .. code-block:: bat - .\classification_sample_async.exe -i \deployment_tools\demo\car.png -m C:\Users\\Documents\models\public\squeezenet1.1\ir\squeezenet1.1.xml -d MYRIAD + .\classification_sample_async.exe -i %USERPROFILE%\Downloads\dog.bmp -m %USERPROFILE%\Documents\ir\googlenet-v1.xml -d MYRIAD .. tab:: macOS .. code-block:: sh - ./classification_sample -i /opt/intel/openvino/deployment_tools/demo/car.png -m ~/models/public/squeezenet1.1/ir/squeezenet1.1.xml -d MYRIAD + ./classification_sample_async -i ~/Downloads/dog.bmp -m ~/ir/googlenet-v1.xml -d MYRIAD @endsphinxdirective When the sample application is complete, you see the label and confidence for the top 10 categories on the display. Below is a sample output with inference results on CPU: @sphinxdirective -.. tab:: Linux .. code-block:: sh - Top 10 results: - - Image /opt/intel/deployment-tools/demo/car.png - - classid probability label - ------- ----------- ----- - 817 0.8363345 sports car, sport car - 511 0.0946488 convertible - 479 0.0419131 car wheel - 751 0.0091071 racer, race car, racing car - 436 0.0068161 beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon - 656 0.0037564 minivan - 586 0.0025741 half track - 717 0.0016069 pickup, pickup truck - 864 0.0012027 tow truck, tow car, wrecker - 581 0.0005882 grille, radiator grille - - [ INFO ] Execution successful - - [ INFO ] This sample is an API example, for any performance measurements please use the dedicated benchmark_app tool - -.. tab:: Windows - - .. code-block:: bat - Top 10 results: - Image C:\Program Files (x86)\Intel\openvino_2021\deployment_tools\demo\car.png + Image dog.bmp classid probability label ------- ----------- ----- - 817 0.8363345 sports car, sport car - 511 0.0946488 convertible - 479 0.0419131 car wheel - 751 0.0091071 racer, race car, racing car - 436 0.0068161 beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon - 656 0.0037564 minivan - 586 0.0025741 half track - 717 0.0016069 pickup, pickup truck - 864 0.0012027 tow truck, tow car, wrecker - 581 0.0005882 grille, radiator grille - - [ INFO ] Execution successful - - [ INFO ] This sample is an API example, for any performance measurements please use the dedicated benchmark_app tool - -.. tab:: macOS - - .. code-block:: sh - - Top 10 results: - - Image /opt/intel/deployment-tools/demo/car.png - - classid probability label - ------- ----------- ----- - 817 0.8363345 sports car, sport car - 511 0.0946488 convertible - 479 0.0419131 car wheel - 751 0.0091071 racer, race car, racing car - 436 0.0068161 beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon - 656 0.0037564 minivan - 586 0.0025741 half track - 717 0.0016069 pickup, pickup truck - 864 0.0012027 tow truck, tow car, wrecker - 581 0.0005882 grille, radiator grille - - [ INFO ] Execution successful - - [ INFO ] This sample is an API example, for any performance measurements please use the dedicated benchmark_app tool - -@endsphinxdirective - -@sphinxdirective -.. raw:: html - -
- -@endsphinxdirective - -### Run the Security Barrier Camera Demo Application - -To run the **Security Barrier Camera Demo Application** using an input image on the prepared IR models: - -1. Set up the OpenVINO environment variables: -@sphinxdirective -.. tab:: Linux - - .. code-block:: sh - - source /opt/intel/openvino/bin/setupvars.sh - -.. tab:: Windows - - .. code-block:: bat - - \bin\setupvars.bat - -.. tab:: macOS - - .. code-block:: sh - - source /opt/intel/openvino/bin/setupvars.sh - -@endsphinxdirective - -2. Go to the demo application build directory: -@sphinxdirective -.. tab:: Linux - - .. code-block:: sh - - cd ~/inference_engine_demos_build/intel64/Release - -.. tab:: Windows - - .. code-block:: bat - - cd C:\Users\\Documents\Intel\OpenVINO\inference_engine_demos_build\intel64\Release - -.. tab:: macOS - - .. code-block:: sh - - cd ~/inference_engine_demos_build/intel64/Release - -@endsphinxdirective - -3. Run the demo executable, specifying the input media file, list of model IRs, and a target device for performing inference: -@sphinxdirective -.. tab:: Linux - - .. code-block:: sh - - ./security_barrier_camera_demo -i -m /vehicle-license-plate-detection-barrier-0106.xml -m_va /vehicle-attributes-recognition-barrier-0039.xml -m_lpr /license-plate-recognition-barrier-0001.xml -d - -.. tab:: Windows - - .. code-block:: bat - - .\security_barrier_camera_demo.exe -i -m -m_va -m_lpr -d - -.. tab:: macOS - - .. code-block:: sh - - ./security_barrier_camera_demo -i -m /vehicle-license-plate-detection-barrier-0106.xml -m_va /vehicle-attributes-recognition-barrier-0039.xml -m_lpr /license-plate-recognition-barrier-0001.xml -d - -@endsphinxdirective - -@sphinxdirective -.. raw:: html - -
- -@endsphinxdirective - -**CPU:** -@sphinxdirective -.. tab:: Linux - - .. code-block:: sh - - ./security_barrier_camera_demo -i /opt/intel/openvino/deployment_tools/demo/car_1.bmp -m /vehicle-license-plate-detection-barrier-0106.xml -m_va /vehicle-attributes-recognition-barrier-0039.xml -m_lpr /license-plate-recognition-barrier-0001.xml -d CPU - -.. tab:: Windows - - .. code-block:: bat - - .\security_barrier_camera_demo.exe -i \deployment_tools\demo\car_1.bmp -m C:\Users\username\Documents\models\intel\vehicle-license-plate-detection-barrier-0106\FP16\vehicle-license-plate-detection-barrier-0106.xml -m_va C:\Users\username\Documents\models\intel\vehicle-attributes-recognition-barrier-0039\FP16\vehicle-attributes-recognition-barrier-0039.xml -m_lpr C:\Users\username\Documents\models\intel\license-plate-recognition-barrier-0001\FP16\license-plate-recognition-barrier-0001.xml -d CPU - -.. tab:: macOS - - .. code-block:: sh - - ./security_barrier_camera_demo -i /opt/intel/openvino/deployment_tools/demo/car_1.bmp -m /vehicle-license-plate-detection-barrier-0106.xml -m_va /vehicle-attributes-recognition-barrier-0039.xml -m_lpr /license-plate-recognition-barrier-0001.xml -d CPU - -@endsphinxdirective - -**GPU:** -> **NOTE**: Running inference on Intel® Processor Graphics (GPU) requires [additional hardware configuration steps](https://docs.openvino.ai/latest/_docs_install_guides_installing_openvino_linux.html#additional-GPU-steps), as described earlier on this page. Running on GPU is not compatible with macOS*. - -@sphinxdirective -.. tab:: Linux - - .. code-block:: sh - - ./security_barrier_camera_demo -i /opt/intel/openvino/deployment_tools/demo/car_1.bmp -m /vehicle-license-plate-detection-barrier-0106.xml -m_va /vehicle-attributes-recognition-barrier-0039.xml -m_lpr /license-plate-recognition-barrier-0001.xml -d GPU - -.. tab:: Windows - - .. code-block:: bat - - .\security_barrier_camera_demo.exe -i \deployment_tools\demo\car_1.bmp -m /vehicle-license-plate-detection-barrier-0106.xml -m_va /vehicle-attributes-recognition-barrier-0039.xml -m_lpr /license-plate-recognition-barrier-0001.xml -d GPU - -.. tab:: macOS - - .. code-block:: sh - - ./security_barrier_camera_demo -i /opt/intel/openvino/deployment_tools/demo/car_1.bmp -m /vehicle-license-plate-detection-barrier-0106.xml -m_va /vehicle-attributes-recognition-barrier-0039.xml -m_lpr /license-plate-recognition-barrier-0001.xml -d GPU - -@endsphinxdirective - -**MYRIAD:** -> **NOTE**: Running inference on VPU devices (Intel® Movidius™ Neural Compute Stick or Intel® Neural Compute Stick 2) with the MYRIAD plugin requires [additional hardware configuration steps](https://docs.openvino.ai/latest/_docs_install_guides_installing_openvino_linux.html#additional-NCS-steps), as described earlier on this page. - -@sphinxdirective -.. tab:: Linux - - .. code-block:: sh - - ./security_barrier_camera_demo -i /opt/intel/openvino/deployment_tools/demo/car_1.bmp -m /vehicle-license-plate-detection-barrier-0106.xml -m_va /vehicle-attributes-recognition-barrier-0039.xml -m_lpr /license-plate-recognition-barrier-0001.xml -d MYRIAD - -.. tab:: Windows - - .. code-block:: bat - - .\security_barrier_camera_demo.exe -i \deployment_tools\demo\car_1.bmp -m /vehicle-license-plate-detection-barrier-0106.xml -m_va /vehicle-attributes-recognition-barrier-0039.xml -m_lpr /license-plate-recognition-barrier-0001.xml -d MYRIAD + 156 0.6875963 Blenheim spaniel + 215 0.0868125 Brittany spaniel + 218 0.0784114 Welsh springer spaniel + 212 0.0597296 English setter + 217 0.0212105 English springer, English springer spaniel + 219 0.0194193 cocker spaniel, English cocker spaniel, cocker + 247 0.0086272 Saint Bernard, St Bernard + 157 0.0058511 papillon + 216 0.0057589 clumber, clumber spaniel + 154 0.0052615 Pekinese, Pekingese, Peke @endsphinxdirective diff --git a/docs/glossary.md b/docs/glossary.md index d5b17d7af8c..a0b9d690e5b 100644 --- a/docs/glossary.md +++ b/docs/glossary.md @@ -56,21 +56,21 @@ Glossary of terms used in the OpenVINO™ -| Term | Description | -| :--- | :--- | -| Batch | Number of images to analyze during one call of infer. Maximum batch size is a property of the network and it is set before loading of the network to the plugin. In NHWC, NCHW and NCDHW image data layout representation, the N refers to the number of images in the batch | -| Tensor | Memory container used for storing inputs, outputs of the network, weights and biases of the layers | -| Device (Affinitity) | A preferred Intel(R) hardware device to run the inference (CPU, GPU, etc.) | -| Extensibility mechanism, Custom layers | The mechanism that provides you with capabilities to extend the OpenVINO™ Runtime and Model Optimizer so that they can work with topologies containing layers that are not yet supported | -| ov::Model | A class of the Model that OpenVINO™ Runtime reads from IR. Consists of topology, weights and biases | -| ov::CompiledModel | An instance of the loaded network which allows the OpenVINO™ Runtime to request (several) infer requests and perform inference synchronously or asynchronously | -| InferRequest | A class that represents the end point of inference on the model loaded to the plugin and represented by executable network. Inputs are set here, outputs should be requested from this interface as well | -| ov::ProfileInfo | Represents basic inference profiling information per layer | -| OpenVINO™ Runtime | A C++ library with a set of classes that you can use in your application to infer input data (images) and get the result | -| OpenVINO™ API | The basic default API for all supported devices, which allows you to load a model from Intermediate Representation, set input and output formats and execute the model on various devices | -| OpenVINO™ Core | OpenVINO™ Core is a software component that manages inference on certain Intel(R) hardware devices: CPU, GPU, MYRIAD, GNA, etc. | -| ov::Layout | Image data layout refers to the representation of images batch. Layout shows a sequence of 4D or 5D tensor data in memory. A typical NCHW format represents pixel in horizontal direction, rows by vertical dimension, planes by channel and images into batch | -| ov::element::Type | Represents data element type. For example, f32 is 32-bit floating point, f16 is 16-bit floating point. Element type can be changed before loading the network to the plugin | +| Term | Description | +| :--- |:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Batch | Number of images to analyze during one call of infer. Maximum batch size is a property of the model and it is set before compiling of the model by the device. In NHWC, NCHW and NCDHW image data layout representation, the N refers to the number of images in the batch | +| Tensor | Memory container used for storing inputs, outputs of the model, weights and biases of the operations | +| Device (Affinitity) | A preferred Intel(R) hardware device to run the inference (CPU, GPU, GNA, etc.) | +| Extensibility mechanism, Custom layers | The mechanism that provides you with capabilities to extend the OpenVINO™ Runtime and Model Optimizer so that they can work with models containing operations that are not yet supported | +| ov::Model | A class of the Model that OpenVINO™ Runtime reads from IR or converts from ONNX, PaddlePaddle formats. Consists of model structure, weights and biases | +| ov::CompiledModel | An instance of the compiled model which allows the OpenVINO™ Runtime to request (several) infer requests and perform inference synchronously or asynchronously | +| ov::InferRequest | A class that represents the end point of inference on the model compiled by the device and represented by a compiled model. Inputs are set here, outputs should be requested from this interface as well | +| ov::ProfilingInfo | Represents basic inference profiling information per operation | +| OpenVINO™ Runtime | A C++ library with a set of classes that you can use in your application to infer input tensors and get the results | +| OpenVINO™ API | The basic default API for all supported devices, which allows you to load a model from Intermediate Representation or convert from ONNX, PaddlePaddle file formars, set input and output formats and execute the model on various devices | +| OpenVINO™ Core | OpenVINO™ Core is a software component that manages inference on certain Intel(R) hardware devices: CPU, GPU, MYRIAD, GNA, etc. | +| ov::Layout | Image data layout refers to the representation of images batch. Layout shows a sequence of 4D or 5D tensor data in memory. A typical NCHW format represents pixel in horizontal direction, rows by vertical dimension, planes by channel and images into batch. See also [Layout API Overview](./OV_Runtime_UG/layout_overview.md) | +| ov::element::Type | Represents data element type. For example, f32 is 32-bit floating point, f16 is 16-bit floating point. | ## See Also diff --git a/docs/how_tos/how-to-links.md b/docs/how_tos/how-to-links.md index 598c3e6e2c8..e808efa1ef9 100644 --- a/docs/how_tos/how-to-links.md +++ b/docs/how_tos/how-to-links.md @@ -13,7 +13,7 @@ * [Accelerate Vision-based AI with Intel® Distribution of OpenVINO™ Toolkit](https://www.intel.ai/accelerate-vision-based-ai-with-intel-distribution-of-openvino-toolkit/) ## Custom Operations Guide -To learn about what is *custom operation* and how to work with them in the Deep Learning Deployment Toolkit, see the [Custom Operations Guide](../HOWTO/Custom_Layers_Guide.md). +To learn about what is *custom operation* and how to work with them in the Deep Learning Deployment Toolkit, see the [Custom Operations Guide](../Extensibility_UG/Intro.md). ## Introducing OpenVINO™ and Computer Vision | IoT Developer Show Season 2 | Intel Software diff --git a/docs/img/BATCH_device.PNG b/docs/img/BATCH_device.PNG new file mode 100644 index 00000000000..97245cef282 --- /dev/null +++ b/docs/img/BATCH_device.PNG @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d1461f042894cd61c2812f12ffa566e1723fdd16a1ee8398321e58d309143475 +size 123115 diff --git a/docs/img/badge_logo.svg b/docs/img/badge_logo.svg deleted file mode 100644 index af18a46ebdb..00000000000 --- a/docs/img/badge_logo.svg +++ /dev/null @@ -1 +0,0 @@ -launchlaunchbinderbinder \ No newline at end of file diff --git a/docs/img/caching_enabled.png b/docs/img/caching_enabled.png index f8a898764e1..2cc080c118b 100644 --- a/docs/img/caching_enabled.png +++ b/docs/img/caching_enabled.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:488a7a47e5086a6868c22219bc9d58a3508059e5a1dc470f2653a12552dea82f -size 36207 +oid sha256:ecf560b08b921da29d59a3c1f6332d092a0575dd00cf59806dc801c32a10790f +size 120241 diff --git a/docs/img/caching_times.png b/docs/img/caching_times.png index 11d9c8b088f..fa67a63f3fc 100644 --- a/docs/img/caching_times.png +++ b/docs/img/caching_times.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:2eed189f9cb3d30fe13b4ba4515edd4e6da5d01545660e65fa8a33d945967281 -size 28894 +oid sha256:357483dd3460848e98489073cd9d58b5c8ada9ec3df4fbfd0956ba9e779f9c15 +size 79843 diff --git a/docs/img/conv_depth_01.png b/docs/img/conv_depth_01.png deleted file mode 100644 index 516b01d6d1b..00000000000 --- a/docs/img/conv_depth_01.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:80edd1da1c5673d18afa44bc2c0503ba9ecdcc37c2acb94960303b61c602ceee -size 12649 diff --git a/docs/img/conv_simple_01.png b/docs/img/conv_simple_01.png deleted file mode 100644 index 6de6f46e36e..00000000000 --- a/docs/img/conv_simple_01.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d3e8856aa175d6fcf940af57a53f962ff6c58acf0a3838bfccc6a093bff1756d -size 9015 diff --git a/docs/img/conv_sum_relu_01.png b/docs/img/conv_sum_relu_01.png deleted file mode 100644 index 7007115294f..00000000000 --- a/docs/img/conv_sum_relu_01.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7d53ce33f180cf4d170bbeb69635ee7c49a67d3f6ee8b1c01ec12568fe1cca38 -size 17157 diff --git a/docs/img/fullyconnected_activation_01.png b/docs/img/fullyconnected_activation_01.png deleted file mode 100644 index 776b14b46fe..00000000000 --- a/docs/img/fullyconnected_activation_01.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:88745fd132531e943d59afe59ed6af8eaae6b62ba1fda2493dfef76080d31a25 -size 7788 diff --git a/docs/img/group_convolutions_01.png b/docs/img/group_convolutions_01.png deleted file mode 100644 index 237523823c3..00000000000 --- a/docs/img/group_convolutions_01.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9709bc83f903943b4d737d379babf80a391a72ad8eab98e71abcc0de5424fbfc -size 12361 diff --git a/docs/img/mkldnn_conv_sum.png b/docs/img/mkldnn_conv_sum.png deleted file mode 100644 index d1c56f77128..00000000000 --- a/docs/img/mkldnn_conv_sum.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:af2641e8e685b027123681ab542162932b008eff257ef5b7105950bfe8b4ade8 -size 10373 diff --git a/docs/img/mkldnn_conv_sum_result.png b/docs/img/mkldnn_conv_sum_result.png deleted file mode 100644 index 67dc87cd326..00000000000 --- a/docs/img/mkldnn_conv_sum_result.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:02efdda675c16def7c2705e978964ce8bf65d1ec6cedfdb0a5afc837fb57abf0 -size 5660 diff --git a/docs/img/mkldnn_group_conv.png b/docs/img/mkldnn_group_conv.png deleted file mode 100644 index c433a6b5484..00000000000 --- a/docs/img/mkldnn_group_conv.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e69242d80da7676311e20e5db67c01bd6562008ecf3a53df8fdedaefabb91b70 -size 7226 diff --git a/docs/img/nncf_workflow.png b/docs/img/nncf_workflow.png new file mode 100644 index 00000000000..53f3cc334e0 --- /dev/null +++ b/docs/img/nncf_workflow.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d7a58f31b2043fe9d92892b1f40ed8a7c596c36ef9d1cd1c71adb981009161bf +size 45665 diff --git a/docs/img/pooling_fakequant_01.png b/docs/img/pooling_fakequant_01.png deleted file mode 100644 index 2310488df40..00000000000 --- a/docs/img/pooling_fakequant_01.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:37c7908d2379cc2ba1909965c58de7bc55d131a330c47e173321c718846d6745 -size 7809 diff --git a/docs/img/tf_openvino.png b/docs/img/tf_openvino.png new file mode 100644 index 00000000000..21a62a5f22d --- /dev/null +++ b/docs/img/tf_openvino.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:21e4d7d0c2de5de20bee14196641e8efacd62ef732a8bfa765e7928d78f611a0 +size 86782 diff --git a/docs/index.rst b/docs/index.rst index 46b71ca485d..5aa299039ae 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -73,12 +73,12 @@ OpenVINO™ Documentation

Learn about the alternative, web-based version of OpenVINO. DL Workbench container installation Required.

-

Inference Engine

-

Learn about OpenVINO's inference mechanism which executes the IR and ONNX models on target devices.

+

OpenVINO™ Runtime

+

Learn about OpenVINO's inference mechanism which executes the IR, ONNX, Paddle models on target devices.

Tune & Optimize

-

Use quantization, pruning, and sparsity algorithms to make your application as efficient as possible.

+

Model-level (e.g. quantization) and Runtime (i.e. application) -level optimizations to make your inference as fast as possible.

Performance
Benchmarks

diff --git a/docs/install_guides/configurations-for-intel-gna.md b/docs/install_guides/configurations-for-intel-gna.md new file mode 100644 index 00000000000..6b8692ce5ae --- /dev/null +++ b/docs/install_guides/configurations-for-intel-gna.md @@ -0,0 +1,30 @@ +# Configurations for Intel® Gaussian & Neural Accelerator (GNA) with Intel® Distribution of OpenVINO™ toolkit {#openvino_docs_install_guides_configurations_for_intel_gna} + +This page introduces additional configurations for Intel® Gaussian & Neural Accelerator (GNA) with Intel® Distribution of OpenVINO™ toolkit on Linux and Windows. + +> **NOTE**: On platforms where Intel® GNA is not enabled in the BIOS, the driver cannot be installed, so the GNA plugin uses the software emulation mode only. + +### Drivers and Dependencies + +Intel® GNA hardware requires a driver to be installed on the system. + +@sphinxdirective + +.. _gna guide: + +@endsphinxdirective + +## Linux + +[Download Intel® GNA driver for Ubuntu Linux 18.04.3 LTS (with HWE Kernel version 5.4+)](https://storage.openvinotoolkit.org/drivers/gna/) + +@sphinxdirective + +.. _gna guide windows: + +@endsphinxdirective + +## Windows + +Intel® GNA driver for Windows is available through Windows Update\* + diff --git a/docs/install_guides/configurations-for-ncs2.md b/docs/install_guides/configurations-for-ncs2.md index 6b843d666ff..199c0560799 100644 --- a/docs/install_guides/configurations-for-ncs2.md +++ b/docs/install_guides/configurations-for-ncs2.md @@ -6,6 +6,8 @@ @endsphinxdirective +This page introduces additional configurations for Intel® Neural Compute Stick 2 with Intel® Distribution of OpenVINO™ toolkit on Linux, Raspbian OS and macOS. + ## Linux Once you have your Intel® Distribution of OpenVINO™ toolkit installed, follow the steps to be able to work on NCS2: @@ -83,9 +85,9 @@ Proceed to the [Get Started Guide](@ref get_started) section to learn the basic These steps are required only if you want to perform inference on Intel® Neural Compute Stick 2 powered by the Intel® Movidius™ Myriad™ X VPU. -To perform inference on Intel® Neural Compute Stick 2, the `libusb` library is required. You can build it from the [source code](https://github.com/libusb/libusb) or install using the macOS package manager you prefer: [Homebrew*](https://brew.sh/), [MacPorts*](https://www.macports.org/) or other. +To perform inference on Intel® Neural Compute Stick 2, the `libusb` library is required. You can build it from the [source code](https://github.com/libusb/libusb) or install using the macOS package manager you prefer: [Homebrew](https://brew.sh/), [MacPorts](https://www.macports.org/) or other. -For example, to install the `libusb` library using Homebrew\*, use the following command: +For example, to install the `libusb` library using Homebrew, use the following command: ```sh brew install libusb ``` diff --git a/docs/install_guides/deployment-manager-tool.md b/docs/install_guides/deployment-manager-tool.md index 6963b7f35c6..23e1ecebbae 100644 --- a/docs/install_guides/deployment-manager-tool.md +++ b/docs/install_guides/deployment-manager-tool.md @@ -18,6 +18,8 @@ The Deployment Manager is a Python* command-line tool that creates a deployment > **IMPORTANT**: The operating system on the target system must be the same as the development system on which you are creating the package. For example, if the target system is Ubuntu 18.04, the deployment package must be created from the OpenVINO™ toolkit installed on Ubuntu 18.04. +> **TIP**: If your application requires additional dependencies, including the Microsoft Visual C++ Redistributable, use the ['--user_data' option](https://docs.openvino.ai/latest/openvino_docs_install_guides_deployment_manager_tool.html#run-standard-cli-mode) to add them to the deployment archive. Install these dependencies on the target host before running inference. + ## Create Deployment Package Using Deployment Manager There are two ways to create a deployment package that includes inference-related components of the OpenVINO™ toolkit: you can run the Deployment Manager tool in either interactive or standard CLI mode. @@ -155,7 +157,7 @@ The script successfully completes, and the deployment package is generated in th After the Deployment Manager has successfully completed, you can find the generated `.tar.gz` (for Linux or macOS) or `.zip` (for Windows) package in the output directory you specified. -To deploy the Inference Engine components from the development machine to the target system, perform the following steps: +To deploy the OpenVINO Runtime components from the development machine to the target system, perform the following steps: 1. Copy the generated archive to the target system using your preferred method. @@ -221,4 +223,4 @@ The package is unpacked to the destination directory and the following files and @endsphinxdirective -You have now finished the deployment of the Inference Engine components to the target system. +You have now finished the deployment of the OpenVINO Runtime components to the target system. diff --git a/docs/install_guides/installing-model-dev-tools.md b/docs/install_guides/installing-model-dev-tools.md index c6adf47c102..77300869886 100644 --- a/docs/install_guides/installing-model-dev-tools.md +++ b/docs/install_guides/installing-model-dev-tools.md @@ -8,7 +8,88 @@ If you want to download, convert, optimize and tune pre-trained deep learning mo * Post-Training Optimization Tool * Model Downloader and other Open Model Zoo tools -From the 2022.1 release, OpenVINO Development Tools can only be installed via PyPI. +> **NOTE**: From the 2022.1 release, the OpenVINO™ Development Tools can only be installed via PyPI. + +## For Python Developers + +If you are a Python developer, you can find the main steps below to install OpenVINO Development Tools. For more details, see . + +While installing OpenVINO Development Tools, OpenVINO Runtime will also be installed as a dependency, so you don't need to install OpenVINO Runtime separately. + +### Step 1. Set Up Python Virtual Environment + +To avoid dependency conflicts, use a virtual environment. Skip this step only if you do want to install all dependencies globally. + +Use the following command to create a virtual environment: + +@sphinxdirective + +.. tab:: Linux and macOS + + .. code-block:: sh + + python3 -m venv openvino_env + +.. tab:: Windows + + .. code-block:: sh + + python -m venv openvino_env + + +@endsphinxdirective + + +### Step 2. Activate Virtual Environment + +@sphinxdirective + +.. tab:: Linux and macOS + + .. code-block:: sh + + source openvino_env/bin/activate + +.. tab:: Windows + + .. code-block:: sh + + openvino_env\Scripts\activate + + +@endsphinxdirective + + +### Step 3. Set Up and Update PIP to the Highest Version + +Use the following command: +```sh +python -m pip install --upgrade pip +``` + +### Step 4. Install the Package + +To install and configure the components of the development package for working with specific frameworks, use the following command: +``` +pip install openvino-dev[extras] +``` +where the `extras` parameter specifies one or more deep learning frameworks via these values: `caffe`, `kaldi`, `mxnet`, `onnx`, `pytorch`, `tensorflow`, `tensorflow2`. Make sure that you install the corresponding frameworks for your models. + +For example, to install and configure the components for working with TensorFlow 2.x and ONNX, use the following command: +``` +pip install openvino-dev[tensorflow2,onnx] +``` + +> **NOTE**: For TensorFlow, use the `tensorflow2` value as much as possible. The `tensorflow` value is provided only for compatibility reasons. + + +### Step 5. Verify the Installation + +To verify if the package is properly installed, run the command below (this may take a few seconds): +```sh +mo -h +``` +You will see the help message for Model Optimizer if installation finished successfully. ## For C++ Developers @@ -43,27 +124,24 @@ where the EXTRAS parameter specifies one or more deep learning frameworks via th If you have installed OpenVINO Runtime via the installer, to avoid version conflicts, specify your version in the command. For example: ``` -pip install openvino-dev[tensorflow2,mxnet,caffe]==2022.1 +pip install openvino-dev[tensorflow2,onnx]==2022.1 ``` > **NOTE**: For TensorFlow, use the `tensorflow2` value as much as possible. The `tensorflow` value is provided only for compatibility reasons. For more details, see . - -## For Python Developers +## What's Next? -You can use the following command to install the latest package version available in the index: -``` -pip install openvino-dev[EXTRAS] -``` -where the EXTRAS parameter specifies one or more deep learning frameworks via these values: `caffe`, `kaldi`, `mxnet`, `onnx`, `pytorch`, `tensorflow`, `tensorflow2`. Make sure that you install the corresponding frameworks for your models. +Now you may continue with the following tasks: -For example, to install and configure the components for working with TensorFlow 2.x, MXNet and Caffe, use the following command: -``` -pip install openvino-dev[tensorflow2,mxnet,caffe] -``` +* To convert models for use with OpenVINO, see [Model Optimizer Developer Guide](../MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md). +* See pre-trained deep learning models in our [Open Model Zoo](../model_zoo.md). +* Try out OpenVINO via [OpenVINO Notebooks](https://docs.openvino.ai/latest/notebooks/notebooks.html). +* To write your own OpenVINO™ applications, see [OpenVINO Runtime User Guide](../OV_Runtime_UG/openvino_intro.md). +* See sample applications in [OpenVINO™ Toolkit Samples Overview](../OV_Runtime_UG/Samples_Overview.md). -> **NOTE**: For TensorFlow, use the `tensorflow2` value as much as possible. The `tensorflow` value is provided only for compatibility reasons. +## Additional Resources -For more details, see . +- Intel® Distribution of OpenVINO™ toolkit home page: +- For IoT Libraries & Code Samples, see [Intel® IoT Developer Kit](https://github.com/intel-iot-devkit). diff --git a/docs/install_guides/installing-openvino-apt.md b/docs/install_guides/installing-openvino-apt.md index e46b34804c9..4e93c3bf09b 100644 --- a/docs/install_guides/installing-openvino-apt.md +++ b/docs/install_guides/installing-openvino-apt.md @@ -1,164 +1,178 @@ -# Install Intel® Distribution of OpenVINO™ toolkit for Linux* Using APT Repository {#openvino_docs_install_guides_installing_openvino_apt} +# Install Intel® Distribution of OpenVINO™ Toolkit for Linux Using APT Repository {#openvino_docs_install_guides_installing_openvino_apt} -This guide provides installation steps for Intel® Distribution of OpenVINO™ toolkit for Linux* distributed through the APT repository. +This guide provides installation steps for Intel® Distribution of OpenVINO™ toolkit for Linux distributed through the APT repository. -> **IMPORTANT**: By downloading and using this container and the included software, you agree to the terms and conditions of the [software license agreements](https://software.intel.com/content/dam/develop/external/us/en/documents/intel-openvino-license-agreements.pdf). Please, review the content inside the `/licensing` folder for more details. +> **NOTE**: From the 2022.1 release, the OpenVINO™ Development Tools can only be installed via PyPI. If you want to develop or optimize your models with OpenVINO, see [Install OpenVINO Development Tools](installing-model-dev-tools.md) for detailed steps. -> **NOTE**: Intel® Graphics Compute Runtime for OpenCL™ is not a part of OpenVINO™ APT distribution. You can install it from the [Intel® Graphics Compute Runtime for OpenCL™ GitHub repo](https://github.com/intel/compute-runtime). +> **IMPORTANT**: By downloading and using this container and the included software, you agree to the terms and conditions of the [software license agreements](https://software.intel.com/content/dam/develop/external/us/en/documents/intel-openvino-license-agreements.pdf). Please review the content inside the `/licensing` folder for more details. ## System Requirements -The complete list of supported hardware is available in the [Release Notes](https://software.intel.com/content/www/us/en/develop/articles/openvino-relnotes.html#inpage-nav-8). +The complete list of supported hardware is available in the [Release Notes](https://software.intel.com/content/www/us/en/develop/articles/openvino-relnotes.html). **Operating Systems** -- Ubuntu 18.04.x long-term support (LTS), 64-bit -- Ubuntu 20.04.0 long-term support (LTS), 64-bit +- Ubuntu 18.04 long-term support (LTS), 64-bit +- Ubuntu 20.04 long-term support (LTS), 64-bit -## Included with Runtime Package +## Install OpenVINO Runtime -The following components are installed with the OpenVINO runtime package: +### Step 1: Set Up the OpenVINO Toolkit APT Repository -| Component | Description| -|-----------|------------| -| [OpenVINO™ Runtime](../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md)| The engine that runs a deep learning model. It includes a set of libraries for an easy inference integration into your applications. | -| [OpenCV*](https://docs.opencv.org/master/) | OpenCV* community version compiled for Intel® hardware. | -| Deep Learning Streamer (DL Streamer) | Streaming analytics framework, based on GStreamer, for constructing graphs of media analytics components. For the DL Streamer documentation, see [DL Streamer Samples](@ref gst_samples_README), [API Reference](https://openvinotoolkit.github.io/dlstreamer_gst/), [Elements](https://github.com/openvinotoolkit/dlstreamer_gst/wiki/Elements), [Tutorial](https://github.com/openvinotoolkit/dlstreamer_gst/wiki/DL-Streamer-Tutorial). | +1. Install the GPG key for the repository -## Included with Developer Package + a. Download the [GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB](https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB). You can also use the following command: + ```sh + wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB + ``` + b. Add this key to the system keyring: + ```sh + sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB + ``` + > **NOTE**: You might need to install GnuPG: `sudo apt-get install gnupg` -The following components are installed with the OpenVINO developer package: +2. Add the repository via the following command: + @sphinxdirective -| Component | Description| -|-----------|------------| -| [Model Optimizer](../MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) | This tool imports, converts, and optimizes models that were trained in popular frameworks to a format usable by Intel tools, especially the Inference Engine. 
Popular frameworks include Caffe\*, TensorFlow\*, MXNet\*, and ONNX\*. | -| [OpenVINO™ Runtime](../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md) | The engine that runs a deep learning model. It includes a set of libraries for an easy inference integration into your applications.| -| [OpenCV*](https://docs.opencv.org/master/) | OpenCV\* community version compiled for Intel® hardware | -| [Sample Applications](../OV_Runtime_UG/Samples_Overview.md) | A set of simple console applications demonstrating how to use the Inference Engine in your applications. | -| [Demo Applications](@ref omz_demos) | A set of console applications that demonstrate how you can use the Inference Engine in your applications to solve specific use cases. | -| Additional Tools | A set of tools to work with your models including [Accuracy Checker utility](@ref omz_tools_accuracy_checker), [Post-Training Optimization Tool Guide](@ref pot_README), [Model Downloader](@ref omz_tools_downloader) and other | -| [Documentation for Pre-Trained Models ](@ref omz_models_group_intel) | Documentation for the pre-trained models available in the [Open Model Zoo repo](https://github.com/openvinotoolkit/open_model_zoo). | -| Deep Learning Streamer (DL Streamer) | Streaming analytics framework, based on GStreamer\*, for constructing graphs of media analytics components. For the DL Streamer documentation, see [DL Streamer Samples](@ref gst_samples_README), [API Reference](https://openvinotoolkit.github.io/dlstreamer_gst/), [Elements](https://github.com/openvinotoolkit/dlstreamer_gst/wiki/Elements), [Tutorial](https://github.com/openvinotoolkit/dlstreamer_gst/wiki/DL-Streamer-Tutorial). | + .. tab:: On Ubuntu 18 + + .. code-block:: sh + + echo "deb https://apt.repos.intel.com/openvino/2022 bionic main" | sudo tee /etc/apt/sources.list.d/intel-openvino-2022.list + + .. tab:: On Ubuntu 20 + + .. code-block:: sh + + echo "deb https://apt.repos.intel.com/openvino/2022 focal main" | sudo tee /etc/apt/sources.list.d/intel-openvino-2022.list + + @endsphinxdirective -## Install Packages +3. Update the list of packages via the update command: + ```sh + sudo apt update + ``` -### Set up the OpenVINO™ Toolkit APT Repository +4. Verify that the APT repository is properly set up. Run the apt-cache command to see a list of all available OpenVINO packages and components: + ```sh + apt-cache search openvino + ``` -#### Install the GPG key for the Repository -1. Download the public key from [https://apt.repos.intel.com/openvino/2021/GPG-PUB-KEY-INTEL-OPENVINO-2021](https://apt.repos.intel.com/openvino/2021/GPG-PUB-KEY-INTEL-OPENVINO-2021) and save it to a file. -2. Add this key to the system keyring: -```sh -sudo apt-key add -``` -> **NOTE**: You might need to install GnuPG: `sudo apt-get install gnupg` +### Step 2: Install OpenVINO Runtime Using the APT Package Manager -3. Check the list of APT keys running the following command: -```sh -sudo apt-key list -``` +OpenVINO will be installed in: `/opt/intel/openvino_..` + +A symlink will be created: `/opt/intel/openvino_` -#### Add the Repository +#### To Install the Latest Version Run the following command: ```sh -echo "deb https://apt.repos.intel.com/openvino/2021 all main" | sudo tee /etc/apt/sources.list.d/intel-openvino-2021.list +sudo apt install openvino ``` -#### Update the List of Packages - -Run the `update` command: -```sh -sudo apt update -``` - -#### Verify that the APT Repository is Properly Set Up - -Run the `apt-cache` command to see a list of all available OpenVINO packages and components: -```sh -apt-cache search openvino -``` -See the example commands below: - -* **Runtime Packages** - - On Ubuntu 18.04: - ```sh - sudo apt-cache search intel-openvino-runtime-ubuntu18 - ``` - On Ubuntu 20.04: - ```sh - sudo apt-cache search intel-openvino-runtime-ubuntu20 - ``` -* **Developer Packages** - - On Ubuntu 18.04: - ```sh - sudo apt-cache search intel-openvino-dev-ubuntu18 - ``` - On Ubuntu 20.04: - ```sh - sudo apt-cache search intel-openvino-dev-ubuntu20 - ``` - -### Install Runtime or Developer Packages using the APT Package Manager -Intel® OpenVINO™ Toolkit will be installed in: `/opt/intel/openvino_..` - -A symlink will be created: `/opt/intel/openvino_` - #### To Install a Specific Version -1. Get a list of OpenVINO packages available for installation: -```sh -sudo apt-cache search intel-openvino-runtime-ubuntu18 -``` -2. Install a specific version of an OpenVINO package: -```sh -sudo apt install intel-openvino--ubuntu-.. -``` -See the example commands below: -* **Runtime Package**
- On Ubuntu 18.04: - ```sh - sudo apt install intel-openvino-runtime-ubuntu18-2021.1.105 - ``` - On Ubuntu 20.04: - ```sh - sudo apt install intel-openvino-runtime-ubuntu20-2021.1.105 - ``` -* **Developer Package**
- On Ubuntu 18.04: - ```sh - sudo apt install intel-openvino-dev-ubuntu18-2021.1.105 - ``` - On Ubuntu 20.04: - ```sh - sudo apt install intel-openvino-dev-ubuntu20-2021.1.105 - ``` -#### To check for Installed Packages and Versions +1. Get a list of OpenVINO packages available for installation: + ```sh + sudo apt-cache search openvino + ``` +2. Install a specific version of an OpenVINO package: + ```sh + sudo apt install openvino-.. + ``` + For example: + ```sh + sudo apt install openvino-2022.1.0 + ``` -To get a list of installed OpenVINO packages: +#### To Check for Installed Packages and Versions +Run the following command: ```sh apt list --installed | grep openvino ``` -#### To Uninstall a Specific Version +#### To Uninstall the Latest Version -To uninstall a specific package: +Run the following command: ```sh -sudo apt autoremove intel-openvino--ubuntu-.. +sudo apt autoremove openvino ``` +#### To Uninstall a Specific Version -**Additional Resources** +Run the following command: +```sh +sudo apt autoremove openvino-.. +``` -- Intel® Distribution of OpenVINO™ toolkit home page: [https://software.intel.com/en-us/openvino-toolkit](https://software.intel.com/en-us/openvino-toolkit). -- OpenVINO™ toolkit online documentation: [https://docs.openvino.ai](https://docs.openvino.ai). -- [Model Optimizer Developer Guide](../MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md). -- [OpenVINO™ Runtime User Guide](../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md). -- For more information on Sample Applications, see the [Inference Engine Samples Overview](../OV_Runtime_UG/Samples_Overview.md). +### Step 3 (Optional): Install OpenCV from APT + +OpenCV is necessary to run C++ demos from Open Model Zoo. Some OpenVINO samples can also extend their capabilities when compiled with OpenCV as a dependency. OpenVINO provides a package to install OpenCV from APT: + +#### To Install the Latest Version of OpenCV + +Run the following command: +```sh +sudo apt install openvino-opencv +``` + +#### To Install a Specific Version of OpenCV + +Run the following command: +```sh +sudo apt install openvino-opencv-.. +``` + +### Step 4 (Optional): Install Software Dependencies + +After you have installed OpenVINO Runtime, if you decided to [install OpenVINO Development Tools](installing-model-dev-tools.md), make sure that you install external software dependencies first. + +Refer to
Install External Software Dependencies for detailed steps. + + +### Step 5 (Optional): Configure Inference on Non-CPU Devices + +@sphinxdirective + +.. tab:: GNA + + To enable the toolkit components to use Intel® Gaussian & Neural Accelerator (GNA) on your system, follow the steps in :ref:`GNA Setup Guide `. + +.. tab:: GPU + + To enable the toolkit components to use processor graphics (GPU) on your system, follow the steps in :ref:`GPU Setup Guide `. + +.. tab:: NCS 2 + + To perform inference on Intel® Neural Compute Stick 2 powered by the Intel® Movidius™ Myriad™ X VPU, follow the steps on :ref:`NCS2 Setup Guide `. + + +.. tab:: VPU + + To install and configure your Intel® Vision Accelerator Design with Intel® Movidius™ VPUs, see the :ref:`VPU Configuration Guide `. + After configuration is done, you are ready to run the verification scripts with the HDDL Plugin for your Intel® Vision Accelerator Design with Intel® Movidius™ VPUs. + + .. warning:: + While working with either HDDL or NCS, choose one of them as they cannot run simultaneously on the same machine. + +@endsphinxdirective + +## What's Next? + +Now you may continue with the following tasks: + +* To convert models for use with OpenVINO, see [Model Optimizer Developer Guide](../MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md). +* See pre-trained deep learning models in our [Open Model Zoo](../model_zoo.md). +* Try out OpenVINO via [OpenVINO Notebooks](https://docs.openvino.ai/latest/notebooks/notebooks.html). +* To write your own OpenVINO™ applications, see [OpenVINO Runtime User Guide](../OV_Runtime_UG/openvino_intro.md). +* See sample applications in [OpenVINO™ Toolkit Samples Overview](../OV_Runtime_UG/Samples_Overview.md). + +## Additional Resources + +- Intel® Distribution of OpenVINO™ toolkit home page: . - For IoT Libraries & Code Samples see the [Intel® IoT Developer Kit](https://github.com/intel-iot-devkit). - diff --git a/docs/install_guides/installing-openvino-conda.md b/docs/install_guides/installing-openvino-conda.md index 76bb49956ed..fe0d5299afa 100644 --- a/docs/install_guides/installing-openvino-conda.md +++ b/docs/install_guides/installing-openvino-conda.md @@ -1,57 +1,37 @@ -# Install Intel® Distribution of OpenVINO™ toolkit from Anaconda* Cloud {#openvino_docs_install_guides_installing_openvino_conda} +# Install Intel® Distribution of OpenVINO™ toolkit from Anaconda Cloud {#openvino_docs_install_guides_installing_openvino_conda} -This guide provides installation steps for Intel® Distribution of OpenVINO™ toolkit distributed through the Anaconda* Cloud. +This guide provides installation steps for Intel® Distribution of OpenVINO™ toolkit for Linux distributed through the Anaconda Cloud. -> **NOTE**: Only runtime packages are available from Anaconda* Cloud. - -## Introduction - -OpenVINO™ toolkit is a comprehensive toolkit for quickly developing applications and solutions that solve a variety of tasks including emulation of human vision, automatic speech recognition, natural language processing, recommendation systems, and many others. Based on latest generations of artificial neural networks, including Convolutional Neural Networks (CNNs), recurrent and attention-based networks, the toolkit extends computer vision and non-vision workloads across Intel® hardware, maximizing performance. It accelerates applications with high-performance, AI and deep learning inference deployed from edge to cloud. - -The Intel® Distribution of OpenVINO™ toolkit\*: -- Enables CNN-based deep learning inference on the edge -- Supports heterogeneous execution across Intel® CPU, Intel® Integrated Graphics, Intel® Neural Compute Stick 2, and Intel® Vision Accelerator Design with Intel® Movidius™ VPUs -- Speeds time-to-market via an easy-to-use library of computer vision functions and pre-optimized kernels -The **runtime package** includes the following components installed by default: - -| Component | Description | -|-----------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [OpenVINO™ Runtime](../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md) | This is the engine that runs the deep learning model. It includes a set of libraries for an easy inference integration into your applications. | +> **NOTE**: From the 2022.1 release, the OpenVINO™ Development Tools can only be installed via PyPI. If you want to develop or optimize your models with OpenVINO, see [Install OpenVINO Development Tools](installing-model-dev-tools.md) for detailed steps. ## System Requirements **Software** - - [Anaconda* distribution](https://www.anaconda.com/products/individual/) + - [Anaconda distribution](https://www.anaconda.com/products/individual/) **Operating Systems** -| Supported Operating System | [Python* Version (64-bit)](https://www.python.org/) | +| Supported Operating System | [Python Version (64-bit)](https://www.python.org/) | | :------------------------------------------------------------| :---------------------------------------------------| -| Ubuntu* 18.04 long-term support (LTS), 64-bit | 3.6, 3.7 | -| Ubuntu* 20.04 long-term support (LTS), 64-bit | 3.6, 3.7 | -| CentOS* 7.6, 64-bit | 3.6, 3.7 | -| macOS* 10.15.x | 3.6, 3.7 | -| Windows 10*, 64-bit | 3.6, 3.7 | +| Ubuntu 18.04 long-term support (LTS), 64-bit | 3.6, 3.7, 3.8, 3.9 | +| Ubuntu 20.04 long-term support (LTS), 64-bit | 3.6, 3.7, 3.8, 3.9 | +| Red Hat Enterprise Linux 8, 64-bit | 3.6, 3.7, 3.8, 3.9 | +| macOS 10.15 | 3.6, 3.7, 3.8, 3.9 | +| Windows 10, 64-bit | 3.6, 3.7, 3.8, 3.9 | -## Install the Runtime Package using the Anaconda* Package Manager +## Install OpenVINO Runtime Using the Anaconda Package Manager -1. Set up the Anaconda* environment:  +1. Set up the Anaconda environment (taking Python 3.7 for example):  ```sh conda create --name py37 python=3.7 - ``` - ```sh conda activate py37 ``` 2. Update Anaconda environment to the latest version: ```sh conda update --all ``` -3. Install pre-requisites: - ```sh - conda install numpy - ``` -4. Install the Intel® Distribution of OpenVINO™ Toolkit: +3. Install the Intel® Distribution of OpenVINO™ toolkit: - Ubuntu* 20.04 ```sh conda install openvino-ie4py-ubuntu20 -c intel @@ -60,28 +40,35 @@ The **runtime package** includes the following components installed by default: ```sh conda install openvino-ie4py-ubuntu18 -c intel ``` - - CentOS* 7.6 + - Red Hat Enterprise Linux 8, 64-bit ```sh - conda install openvino-ie4py-centos7 -c intel + conda install openvino-ie4py-rhel8 -c intel ``` - - Windows* 10 and macOS* + - Windows 10 and macOS ```sh conda install openvino-ie4py -c intel ``` -5. Verify the package is installed: +4. Verify the package is installed: ```sh - python -c "from openvino.inference_engine import IECore" + python -c "from openvino.runtime import Core" ``` If installation was successful, you will not see any error messages (no console output). Now you can start developing your application. + +## What's Next? + +Now you may continue with the following tasks: + +* To convert models for use with OpenVINO, see [Model Optimizer Developer Guide](../MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md). +* See pre-trained deep learning models in our [Open Model Zoo](../model_zoo.md). +* Try out OpenVINO via [OpenVINO Notebooks](https://docs.openvino.ai/latest/notebooks/notebooks.html). +* To write your own OpenVINO™ applications, see [OpenVINO Runtime User Guide](../OV_Runtime_UG/openvino_intro.md). +* See sample applications in [OpenVINO™ Toolkit Samples Overview](../OV_Runtime_UG/Samples_Overview.md). + ## Additional Resources -- Intel® Distribution of OpenVINO™ toolkit home page: [https://software.intel.com/en-us/openvino-toolkit](https://software.intel.com/en-us/openvino-toolkit). -- OpenVINO™ toolkit online documentation: [https://docs.openvino.ai](https://docs.openvino.ai). -- [Model Optimizer Developer Guide](../MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md). -- [OpenVINO™ Runtime User Guide](../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md). -- For more information on Sample Applications, see the [Inference Engine Samples Overview](../OV_Runtime_UG/Samples_Overview.md). -- Intel® Distribution of OpenVINO™ toolkit Anaconda* home page: [https://anaconda.org/intel/openvino-ie4py](https://anaconda.org/intel/openvino-ie4py) - +- Intel® Distribution of OpenVINO™ toolkit home page: . +- For IoT Libraries & Code Samples see the [Intel® IoT Developer Kit](https://github.com/intel-iot-devkit). +- Intel® Distribution of OpenVINO™ toolkit Anaconda home page: [https://anaconda.org/intel/openvino-ie4py](https://anaconda.org/intel/openvino-ie4py) diff --git a/docs/install_guides/installing-openvino-config-ivad-vpu.md b/docs/install_guides/installing-openvino-config-ivad-vpu.md index 9c66416150d..c0cd93c8e8c 100644 --- a/docs/install_guides/installing-openvino-config-ivad-vpu.md +++ b/docs/install_guides/installing-openvino-config-ivad-vpu.md @@ -2,6 +2,8 @@ @sphinxdirective +.. _vpu guide: + .. toctree:: :maxdepth: 2 :hidden: @@ -11,7 +13,7 @@ @endsphinxdirective -The steps in this guide are only required if you want to perform inference on Intel® Vision Accelerator Design with Intel® Movidius™ VPUs. +The steps in this guide are only required if you want to perform inference on Intel® Vision Accelerator Design with Intel® Movidius™ VPUs with OpenVINO™ on Linux or Windows. For troubleshooting issues, please see the [Troubleshooting Guide](troubleshooting.md) for more information. @@ -19,7 +21,7 @@ For troubleshooting issues, please see the [Troubleshooting Guide](troubleshooti For Intel® Vision Accelerator Design with Intel® Movidius™ VPUs, the following additional installation steps are required. -> **NOTE**: If you installed the Intel® Distribution of OpenVINO™ to the non-default install directory, replace `/opt/intel` with the directory in which you installed the software. +> **NOTE**: If you installed the Intel® Distribution of OpenVINO™ toolkit to the non-default install directory, replace `/opt/intel` with the directory in which you installed the software. 1. Set the environment variables: ```sh @@ -44,13 +46,19 @@ Now the dependencies are installed and you are ready to use the Intel® Vision A For advanced configuration steps for your **IEI Mustang-V100-MX8-R10** accelerator, see [Configurations for IEI Mustang-V100-MX8-R10 card](movidius-setup-guide.md). **IEI Mustang-V100-MX8-R11** accelerator doesn't require any additional steps. +@sphinxdirective + +.. _vpu guide windows: + +@endsphinxdirective + ## Windows To enable inference on Intel® Vision Accelerator Design with Intel® Movidius™ VPUs, the following additional installation steps are required: 1. Download and install Visual C++ Redistributable for Visual Studio 2017 2. Check with a support engineer if your Intel® Vision Accelerator Design with Intel® Movidius™ VPUs card requires SMBUS connection to PCIe slot (most unlikely). Install the SMBUS driver only if confirmed (by default, it's not required): - 1. Go to the `\runtime\3rdparty\hddl\drivers\SMBusDriver` directory, where `` is the directory in which the Intel Distribution of OpenVINO toolkit is installed. + 1. Go to the `\runtime\3rdparty\hddl\drivers\SMBusDriver` directory, where `` is the directory in which the Intel® Distribution of OpenVINO™ toolkit is installed. 2. Right click on the `hddlsmbus.inf` file and choose **Install** from the pop up menu. You are done installing your device driver and are ready to use your Intel® Vision Accelerator Design with Intel® Movidius™ VPUs. diff --git a/docs/install_guides/installing-openvino-docker-linux.md b/docs/install_guides/installing-openvino-docker-linux.md index 3ea708abbd3..a5421507fbe 100644 --- a/docs/install_guides/installing-openvino-docker-linux.md +++ b/docs/install_guides/installing-openvino-docker-linux.md @@ -1,68 +1,71 @@ -# Install Intel® Distribution of OpenVINO™ toolkit for Linux* from a Docker* Image {#openvino_docs_install_guides_installing_openvino_docker_linux} +# Install Intel® Distribution of OpenVINO™ toolkit for Linux from a Docker Image {#openvino_docs_install_guides_installing_openvino_docker_linux} -The Intel® Distribution of OpenVINO™ toolkit quickly deploys applications and solutions that emulate human vision. Based on Convolutional Neural Networks (CNN), the toolkit extends computer vision (CV) workloads across Intel® hardware, maximizing performance. The Intel® Distribution of OpenVINO™ toolkit includes the Intel® Deep Learning Deployment Toolkit. +This guide provides steps on creating a Docker image with Intel® Distribution of OpenVINO™ toolkit for Linux and using the image on different devices. -This guide provides device specifics for a Docker* image creation with Intel® Distribution of OpenVINO™ toolkit for Linux* and its further usage. +## System Requirements -## System Requirements +@sphinxdirective +.. tab:: Target Operating Systems with Python Version + + +----------------------------------------------+--------------------------+ + | Operating System | Supported Python Version | + +==============================================+==========================+ + | Ubuntu 18.04 long-term support (LTS), 64-bit | 3.6 | + +----------------------------------------------+--------------------------+ + | Ubuntu 20.04 long-term support (LTS), 64-bit | 3.8 | + +----------------------------------------------+--------------------------+ + | Red Hat Enterprise Linux 8, 64-bit | 3.6 | + +----------------------------------------------+--------------------------+ -**Target Operating Systems** +.. tab:: Host Operating Systems -- Ubuntu\* 18.04 long-term support (LTS), 64-bit -- Ubuntu\* 20.04 long-term support (LTS), 64-bit -- CentOS\* 7 -- Red Hat\* Enterprise Linux* 8 (64 bit) + * Linux + * Windows Subsystem for Linux 2 (WSL2) on CPU or GPU + * macOS on CPU only + + To launch a Linux image on WSL2 when trying to run inferences on a GPU, make sure that the following requirements are met: -**Host Operating Systems** + - Only Windows 10 with 21H2 update or above installed and Windows 11 are supported. + - Intel GPU driver on Windows host with version 30.0.100.9684 or above need be installed. Please see :ref:`this article ` for more details. + - From 2022.1 release, the Docker images contain preinstalled recommended version of OpenCL Runtime with WSL2 support. -- Linux +@endsphinxdirective -## Prebuilt images +## Installation Flow -Prebuilt images are available on: +There are two ways to install OpenVINO with Docker. You can choose either of them according to your needs: +* Use a prebuilt image. Do the following steps: + 1. Get a prebuilt image from provided sources. + 2. Run the image on different devices. To run inferences on Intel® Vision Accelerator Design with Intel® Movidius™ VPUs, configure the Docker image first before you run the image. + 3. (Optional) Run samples in the Docker image. +* If you want to customize your image, you can also build a Docker image manually by using the following steps: + 1. Prepare a Dockerfile. + 2. Configure the Docker image. + 3. Run the image on different devices. + 4. (Optional) Run samples in the Docker image. + +## Getting a Prebuilt Image from Provided Sources + +You can find prebuilt images on: - [Docker Hub](https://hub.docker.com/u/openvino) -- [Red Hat* Quay.io](https://quay.io/organization/openvino) -- [Red Hat* Ecosystem Catalog](https://catalog.redhat.com/software/containers/intel/openvino-runtime/606ff4d7ecb5241699188fb3) +- [Red Hat Quay.io](https://quay.io/organization/openvino) +- [Red Hat Ecosystem Catalog (runtime image)](https://catalog.redhat.com/software/containers/intel/openvino-runtime/606ff4d7ecb5241699188fb3) +- [Red Hat Ecosystem Catalog (development image)](https://catalog.redhat.com/software/containers/intel/openvino-dev/613a450dc9bc35f21dc4a1f7) +- [Azure Marketplace](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/intel_corporation.openvino) -## Build a Docker* Image +## Preparing a Dockerfile -You can use [available Dockerfiles](https://github.com/openvinotoolkit/docker_ci/tree/master/dockerfiles) or generate a Dockerfile with your setting via [DockerHub CI Framework](https://github.com/openvinotoolkit/docker_ci). -The Framework can generate a Dockerfile, build, test, and deploy an image with the Intel® Distribution of OpenVINO™ toolkit. -You can also try our [Tutorials](https://github.com/openvinotoolkit/docker_ci/tree/master/docs/tutorials) which demonstrate the usage of Docker containers with Intel® Distribution of OpenVINO™ toolkit. You can find device specific steps to configure an Intel® Distribution of OpenVINO™ toolkit Dockerfile below. +You can use the [available Dockerfiles on GitHub](https://github.com/openvinotoolkit/docker_ci/tree/master/dockerfiles) or generate a Dockerfile with your settings via [DockerHub CI Framework](https://github.com/openvinotoolkit/docker_ci) which can generate a Dockerfile, build, test and deploy an image with the Intel® Distribution of OpenVINO™ toolkit. +You can also try our [Tutorials](https://github.com/openvinotoolkit/docker_ci/tree/master/docs/tutorials) which demonstrate the usage of Docker containers with OpenVINO. -## Use Docker* Image for CPU +## Configuring the Image for Different Devices -- Kernel reports the same information for all containers as for native application, for example, CPU, memory information. -- All instructions that are available to host process available for process in container, including, for example, AVX2, AVX512. No restrictions. -- Docker\* does not use virtualization or emulation. The process in Docker* is just a regular Linux process, but it is isolated from external world on kernel level. Performance penalty is small. +If you want to run inferences on a CPU or Intel® Neural Compute Stick 2, no extra configuration is needed. Go to Running the image on different devices for the next step. -### Configure a Docker* Image for CPU +### Configuring Docker Image for GPU -You don't need to do specific steps to configure an Intel® Distribution of OpenVINO™ toolkit Dockerfile for CPU. You can use [available Dockerfiles](https://github.com/openvinotoolkit/docker_ci/tree/master/dockerfiles) or generate a Dockerfile with your setting via [DockerHub CI Framework](https://github.com/openvinotoolkit/docker_ci). - -### Run the Docker* Image for CPU - -Run the image with the following command: - -```sh -docker run -it --rm -``` - -## Use a Docker* Image for GPU - -### Configure a Docker* Image for GPU - -> **NOTE**: Only Intel® integrated graphics are supported. - -**Prerequisites:** - -- GPU is not available in container by default, you must attach it to the container. -- Kernel driver must be installed on the host. -- Intel® OpenCL™ runtime package must be included into the container. -- In the container, non-root user must be in the `video` and `render` groups. To add a user to the render group, follow the [Configuration Guide for the Intel® Graphics Compute Runtime for OpenCL™ on Ubuntu* 20.04](https://github.com/openvinotoolkit/docker_ci/blob/master/configure_gpu_ubuntu20.md). - -To configure a OpenVINO Docker* image with access to GPU, add the following commands to a Dockerfile: +By default, the distributed Docker image for OpenVINO has the recommended version of Intel® Graphics Compute Runtime for oneAPI Level Zero and OpenCL Driver for the operating system installed inside. If you want to build an image with a custom version of OpenCL Runtime included, you need to modify the Dockerfile using the lines below (the 19.41.14441 version is used as an example) and build the image manually: **Ubuntu 18.04/20.04**: @@ -84,22 +87,7 @@ RUN apt-get update && \ rm /tmp/opencl ``` -or you can use the installation script `install_NEO_OCL_driver.sh` if you previously installed OpenVINO in the Dockerfile, where `INTEL_OPENCL` is the variable to store the default version of Intel® Graphics Compute Runtime for OpenCL™ Driver: - -```sh -WORKDIR /tmp/opencl -RUN useradd -ms /bin/bash -G video,users openvino && \ - chown openvino -R /home/openvino - -# Please use `20.35.17767` for 10th generation Intel® Core™ processor (formerly Ice Lake) or 11th generation Intel® Core™ processor (formerly Tiger Lake) -ARG INTEL_OPENCL=19.41.14441 - -WORKDIR ${INTEL_OPENVINO_DIR}/install_dependencies -RUN ./install_NEO_OCL_driver.sh --no_numa -y --install_driver ${INTEL_OPENCL} && \ - rm -rf /var/lib/apt/lists/* -``` - -**CentOS 7/RHEL 8**: +**RHEL 8**: ```sh WORKDIR /tmp/opencl @@ -108,7 +96,7 @@ RUN useradd -ms /bin/bash -G video,users openvino && \ RUN groupmod -g 44 video RUN yum update -y && yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \ - yum update -y && yum install -y ocl-icd ocl-icd-devel && \ + yum update -y && yum install -y ocl-icd ocl-icd-devel && \ yum clean all && rm -rf /var/cache/yum && \ curl -L https://sourceforge.net/projects/intel-compute-runtime/files/19.41.14441/centos-7/intel-gmmlib-19.3.2-1.el7.x86_64.rpm/download -o intel-gmmlib-19.3.2-1.el7.x86_64.rpm && \ curl -L https://sourceforge.net/projects/intel-compute-runtime/files/19.41.14441/centos-7/intel-gmmlib-devel-19.3.2-1.el7.x86_64.rpm/download -o intel-gmmlib-devel-19.3.2-1.el7.x86_64.rpm && \ @@ -122,245 +110,142 @@ RUN yum update -y && yum install -y https://dl.fedoraproject.org/pub/epel/epel-r yum remove -y epel-release ``` -or you can use the installation script `install_NEO_OCL_driver.sh` if you previously installed OpenVINO in the Dockerfile, where `INTEL_OPENCL` is the variable to store the default version of Intel® Graphics Compute Runtime for OpenCL™ Driver: +### Configuring Docker Image for Intel® Vision Accelerator Design with Intel® Movidius™ VPUs -```sh -WORKDIR /tmp/opencl -RUN useradd -ms /bin/bash -G video,users openvino && \ - chown openvino -R /home/openvino -RUN groupmod -g 44 video +> **NOTE**: When building the Docker image, create a user in the Dockerfile that has the same UID (User Identifier) and GID (Group Identifier) as the user which that runs hddldaemon on the host, and then run the application in the Docker image with this user. This step is necessary to run the container as a non-root user. -# Please use `20.35.17767` for 10th generation Intel® Core™ processor (formerly Ice Lake) or 11th generation Intel® Core™ processor (formerly Tiger Lake) -ARG INTEL_OPENCL=19.41.14441 +To use the Docker container for inference on Intel® Vision Accelerator Design with Intel® Movidius™ VPUs, do the following steps: -WORKDIR ${INTEL_OPENVINO_DIR}/install_dependencies -RUN ./install_NEO_OCL_driver.sh --no_numa -y --install_driver ${INTEL_OPENCL} && \ - yum clean all && rm -rf /var/cache/yum && \ - yum remove -y epel-release +1. Set up the environment on the host machine to be used for running Docker. It is required to execute `hddldaemon`, which is responsible for communication between the HDDL plugin and the board. To learn how to set up the environment (the OpenVINO package or HDDL package must be pre-installed), see [Configuration guide for HDDL device](https://github.com/openvinotoolkit/docker_ci/blob/master/install_guide_vpu_hddl.md) or [Configurations for Intel® Vision Accelerator Design with Intel® Movidius™ VPUs on Linux](installing-openvino-config-ivad-vpu.md). +2. Run `hddldaemon` on the host in a separate terminal session using the following command: + ```sh + $HDDL_INSTALL_DIR/hddldaemon + ``` + +## Running the Docker Image on Different Devices + +### Running the Image on CPU + +Run the Docker image with the following command: +``` +docker run -it --rm ``` -### Run the Docker* Image for GPU +Note the following things: + +- Kernel reports the same information for all containers as for native application, for example, CPU, memory information. +- All instructions that are available to host process available for process in container, including, for example, AVX2, AVX512. No restrictions. +- Docker does not use virtualization or emulation. The process in Docker is just a regular Linux process, but it is isolated from external world on kernel level. Performance loss is minor. + + +### Running the Image on GPU + +> **NOTE**: Only Intel® integrated graphics are supported. + +Note the following things: + +- GPU is not available in the container by default. You must attach it to the container. +- Kernel driver must be installed on the host. +- In the container, non-root user must be in the `video` and `render` groups. To add a user to the render group, follow the [Configuration Guide for the Intel® Graphics Compute Runtime for OpenCL™ on Ubuntu 20.04](https://github.com/openvinotoolkit/docker_ci/blob/master/configure_gpu_ubuntu20.md). To make GPU available in the container, attach the GPU to the container using `--device /dev/dri` option and run the container: -```sh -docker run -it --rm --device /dev/dri -``` -> **NOTE**: If your host system is Ubuntu 20, follow the [Configuration Guide for the Intel® Graphics Compute Runtime for OpenCL™ on Ubuntu* 20.04](https://github.com/openvinotoolkit/docker_ci/blob/master/configure_gpu_ubuntu20.md). +* Ubuntu 18 or RHEL 8: + ```sh + docker run -it --rm --device /dev/dri + ``` + > **NOTE**: If your host system is Ubuntu 20, follow the [Configuration Guide for the Intel® Graphics Compute Runtime for OpenCL™ on Ubuntu* 20.04](https://github.com/openvinotoolkit/docker_ci/blob/master/configure_gpu_ubuntu20.md). -## Use a Docker* Image for Intel® Neural Compute Stick 2 +* WSL2: + ```sh + docker run -it --rm --device /dev/dxg --volume /usr/lib/wsl:/usr/lib/wsl + ``` + > **NOTE**: To launch a Linux image on WSL2, make sure that the additional requirements in System Requirements are met. -### Configure and Run the Docker* Image for Intel® Neural Compute Stick 2 -**Known limitations:** +### Running the Image on Intel® Neural Compute Stick 2 -- Intel® Neural Compute Stick 2 device changes its VendorID and DeviceID during execution and each time looks for a host system as a brand new device. It means it cannot be mounted as usual. -- UDEV events are not forwarded to the container by default it does not know about device reconnection. -- Only one device per host is supported. - -Use one of the following options as **Possible solutions for Intel® Neural Compute Stick 2:** - -#### Option 1 - -1. Get rid of UDEV by rebuilding `libusb` without UDEV support in the Docker* image (add the following commands to a `Dockerfile`): - - **Ubuntu 18.04/20.04**: -```sh -ARG BUILD_DEPENDENCIES="autoconf \ - automake \ - build-essential \ - libtool \ - unzip \ - udev" -RUN apt-get update && \ - apt-get install -y --no-install-recommends ${BUILD_DEPENDENCIES} && \ - rm -rf /var/lib/apt/lists/* - -WORKDIR /opt -RUN curl -L https://github.com/libusb/libusb/archive/v1.0.22.zip --output v1.0.22.zip && \ - unzip v1.0.22.zip - -WORKDIR /opt/libusb-1.0.22 -RUN ./bootstrap.sh && \ - ./configure --disable-udev --enable-shared && \ - make -j4 - -WORKDIR /opt/libusb-1.0.22/libusb -RUN /bin/mkdir -p '/usr/local/lib' && \ - /bin/bash ../libtool --mode=install /usr/bin/install -c libusb-1.0.la '/usr/local/lib' && \ - /bin/mkdir -p '/usr/local/include/libusb-1.0' && \ - /usr/bin/install -c -m 644 libusb.h '/usr/local/include/libusb-1.0' && \ - /bin/mkdir -p '/usr/local/lib/pkgconfig' - -WORKDIR /opt/libusb-1.0.22/ -RUN /usr/bin/install -c -m 644 libusb-1.0.pc '/usr/local/lib/pkgconfig' && \ - cp /opt/intel/openvino_2022/runtime/3rdparty/97-myriad-usbboot.rules /etc/udev/rules.d/ && \ - ldconfig -``` - - **CentOS 7**: -```sh -ARG BUILD_DEPENDENCIES="autoconf \ - automake \ - libtool \ - unzip \ - udev" - -RUN yum update -y && yum install -y ${BUILD_DEPENDENCIES} && \ - yum group install -y "Development Tools" && \ - yum clean all && rm -rf /var/cache/yum - -WORKDIR /opt -RUN curl -L https://github.com/libusb/libusb/archive/v1.0.22.zip --output v1.0.22.zip && \ - unzip v1.0.22.zip && rm -rf v1.0.22.zip - -WORKDIR /opt/libusb-1.0.22 -RUN ./bootstrap.sh && \ - ./configure --disable-udev --enable-shared && \ - make -j4 - -WORKDIR /opt/libusb-1.0.22/libusb -RUN /bin/mkdir -p '/usr/local/lib' && \ - /bin/bash ../libtool --mode=install /usr/bin/install -c libusb-1.0.la '/usr/local/lib' && \ - /bin/mkdir -p '/usr/local/include/libusb-1.0' && \ - /usr/bin/install -c -m 644 libusb.h '/usr/local/include/libusb-1.0' && \ - /bin/mkdir -p '/usr/local/lib/pkgconfig' && \ - printf "\nexport LD_LIBRARY_PATH=\${LD_LIBRARY_PATH}:/usr/local/lib\n" >> /opt/intel/openvino_2022/setupvars.sh - -WORKDIR /opt/libusb-1.0.22/ -RUN /usr/bin/install -c -m 644 libusb-1.0.pc '/usr/local/lib/pkgconfig' && \ - cp /opt/intel/openvino_2022/runtime/3rdparty/97-myriad-usbboot.rules /etc/udev/rules.d/ && \ - ldconfig -``` -2. Run the Docker* image: +Run the Docker image with the following command: ```sh docker run -it --rm --device-cgroup-rule='c 189:* rmw' -v /dev/bus/usb:/dev/bus/usb ``` -#### Option 2 -Run container in the privileged mode, enable the Docker network configuration as host, and mount all devices to the container: +While the command above is not working, you can also run container in the privileged mode, enable the Docker network configuration as host, and mount all devices to the container. Run the following command: ```sh docker run -it --rm --privileged -v /dev:/dev --network=host ``` -> **NOTES**: -> -> - It is not secure. -> - Conflicts with Kubernetes* and other tools that use orchestration and private networks may occur. -## Use a Docker* Image for Intel® Vision Accelerator Design with Intel® Movidius™ VPUs +> **NOTE**: This option is not recommended, as conflicts with Kubernetes and other tools that use orchestration and private networks may occur. Please use it with caution and only for troubleshooting purposes. -### Configure Docker* Image for Intel® Vision Accelerator Design with Intel® Movidius™ VPUs -To use the Docker container for inference on Intel® Vision Accelerator Design with Intel® Movidius™ VPUs: +#### Known Limitations -1. Set up the environment on the host machine, that is going to be used for running Docker*. -It is required to execute `hddldaemon`, which is responsible for communication between the HDDL plugin and the board. -To learn how to set up the environment (the OpenVINO package or HDDL package must be pre-installed), see [Configuration guide for HDDL device](https://github.com/openvinotoolkit/docker_ci/blob/master/install_guide_vpu_hddl.md) or [Configuration Guide for Intel® Vision Accelerator Design with Intel® Movidius™ VPUs](installing-openvino-config-ivad-vpu.md). -2. Prepare the Docker* image (add the following commands to a Dockerfile). - - **Ubuntu 18.04**: -```sh -WORKDIR /tmp -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - libboost-filesystem1.65-dev \ - libboost-thread1.65-dev \ - libjson-c3 libxxf86vm-dev && \ - rm -rf /var/lib/apt/lists/* && rm -rf /tmp/* -``` - - **Ubuntu 20.04**: -```sh -WORKDIR /tmp -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - libboost-filesystem-dev \ - libboost-thread-dev \ - libjson-c4 \ - libxxf86vm-dev && \ - rm -rf /var/lib/apt/lists/* && rm -rf /tmp/* -``` - - **CentOS 7**: -```sh -WORKDIR /tmp -RUN yum update -y && yum install -y \ - boost-filesystem \ - boost-thread \ - boost-program-options \ - boost-system \ - boost-chrono \ - boost-date-time \ - boost-regex \ - boost-atomic \ - json-c \ - libXxf86vm-devel && \ - yum clean all && rm -rf /var/cache/yum -``` -3. Run `hddldaemon` on the host in a separate terminal session using the following command: -```sh -$HDDL_INSTALL_DIR/hddldaemon -``` +- Intel® Neural Compute Stick 2 device changes its VendorID and DeviceID during execution and each time looks for a host system as a brand new device. It means it cannot be mounted as usual. +- UDEV events are not forwarded to the container by default, and it does not know about the device reconnection. The prebuilt Docker images and provided Dockerfiles include `libusb` rebuilt without UDEV support. +- Only one NCS2 device connected to the host can be used when running inference in a container. -### Run the Docker* Image for Intel® Vision Accelerator Design with Intel® Movidius™ VPUs -To run the built Docker* image for Intel® Vision Accelerator Design with Intel® Movidius™ VPUs, use the following command: +### Running the Image on Intel® Vision Accelerator Design with Intel® Movidius™ VPUs +> **NOTE**: To run inferences on Intel® Vision Accelerator Design with Intel® Movidius™ VPUs, make sure that you have configured the Docker image first. + +Use the following command: ```sh docker run -it --rm --device=/dev/ion:/dev/ion -v /var/tmp:/var/tmp ``` -> **NOTES**: -> -> - The device `/dev/ion` needs to be shared to be able to use ion buffers among the plugin, `hddldaemon` and the kernel. -> - Since separate inference tasks share the same HDDL service communication interface (the service creates mutexes and a socket file in `/var/tmp`), `/var/tmp` needs to be mounted and shared among them. - -In some cases, the ion driver is not enabled (for example, due to a newer kernel version or iommu (Input-Output Memory Management Unit) incompatibility). `lsmod | grep myd_ion` returns empty output. To resolve, use the following command: - +If your application runs inference of a network with a big size (>4MB) of input/output, the HDDL plugin will use shared memory. In this case, you must mount `/dev/shm` as volume: ```sh -docker run -it --rm --net=host -v /var/tmp:/var/tmp –-ipc=host +docker run -it --rm --device=/dev/ion:/dev/ion -v /var/tmp:/var/tmp -v /dev/shm:/dev/shm ``` -> **NOTES**: -> -> - When building Docker images, create a user in the Dockerfile that has the same UID(User Identifier) and GID(Group Identifier) as the user which runs hddldaemon on the host. -> - Run the application in the Docker image with this user. -> - Alternatively, you can start hddldaemon with the root user on host, but this approach is not recommended. +Note the following things: +* The device `/dev/ion` needs to be shared to be able to use ion buffers among the plugin, `hddldaemon` and the kernel. +* Since separate inference tasks share the same HDDL service communication interface (the service creates mutexes and a socket file in `/var/tmp`), `/var/tmp` needs to be mounted and shared among them. -### Run Demos in the Docker* Image -To run the Classification Demo Using SqueezeNet on a specific inference device, run the following commands with the root privileges (additional third-party dependencies will be installed): +#### If the ion Driver is Not Enabled + +In some cases, the ion driver is not enabled (for example, due to a newer kernel version or iommu (Input-Output Memory Management Unit) incompatibility). `lsmod | grep myd_ion` returns empty output. To resolve this issue, use the following command: +```sh +docker run -it --rm --ipc=host --net=host -v /var/tmp:/var/tmp +``` +If that still does not solve the issue, try starting `hddldaemon` with the root user on host. However, this approach is not recommended. Please use with caution. + + +## Running Samples in Docker Image + +To run the `Hello Classification Sample` on a specific inference device, run the following commands: **CPU**: ```sh -docker run -itu root:root --rm -/bin/bash -c "apt update && apt install sudo && samples/scripts/run_sample_squeezenet.sh -d CPU" +docker run -it --rm +/bin/bash -c "cd ~ && omz_downloader --name googlenet-v1 --precisions FP16 && omz_converter --name googlenet-v1 --precision FP16 && curl -O https://storage.openvinotoolkit.org/data/test_data/images/car_1.bmp && python3 /opt/intel/openvino/samples/python/hello_classification/hello_classification.py public/googlenet-v1/FP16/googlenet-v1.xml car_1.bmp CPU" ``` **GPU**: ```sh -docker run -itu root:root --rm --device /dev/dri:/dev/dri -/bin/bash -c "apt update && apt install sudo && samples/scripts/run_sample_squeezenet.sh -d GPU" +docker run -itu root:root --rm --device /dev/dri:/dev/dri +/bin/bash -c "omz_downloader --name googlenet-v1 --precisions FP16 && omz_converter --name googlenet-v1 --precision FP16 && curl -O https://storage.openvinotoolkit.org/data/test_data/images/car_1.bmp && python3 samples/python/hello_classification/hello_classification.py public/googlenet-v1/FP16/googlenet-v1.xml car_1.bmp GPU" ``` **MYRIAD**: ```sh docker run -itu root:root --rm --device-cgroup-rule='c 189:* rmw' -v /dev/bus/usb:/dev/bus/usb -/bin/bash -c "apt update && apt install sudo && samples/scripts/run_sample_squeezenet.sh -d MYRIAD" +/bin/bash -c "omz_downloader --name googlenet-v1 --precisions FP16 && omz_converter --name googlenet-v1 --precision FP16 && curl -O https://storage.openvinotoolkit.org/data/test_data/images/car_1.bmp && python3 samples/python/hello_classification/hello_classification.py public/googlenet-v1/FP16/googlenet-v1.xml car_1.bmp MYRIAD" ``` **HDDL**: ```sh -docker run -itu root:root --rm --device=/dev/ion:/dev/ion -v /var/tmp:/var/tmp -/bin/bash -c "apt update && apt install sudo && samples/scripts/run_sample_squeezenet.sh -d HDDL" +docker run -itu root:root --rm --device=/dev/ion:/dev/ion -v /var/tmp:/var/tmp -v /dev/shm:/dev/shm +/bin/bash -c "omz_downloader --name googlenet-v1 --precisions FP16 && omz_converter --name googlenet-v1 --precision FP16 && curl -O https://storage.openvinotoolkit.org/data/test_data/images/car_1.bmp && umask 000 && python3 samples/python/hello_classification/hello_classification.py public/googlenet-v1/FP16/googlenet-v1.xml car_1.bmp HDDL" ``` -## Troubleshooting - -If you got proxy issues, please setup proxy settings for Docker. See the Proxy section in the [Install the DL Workbench from Docker Hub* ](@ref workbench_docs_Workbench_DG_Run_Locally) topic. - ## Additional Resources - [DockerHub CI Framework](https://github.com/openvinotoolkit/docker_ci) for Intel® Distribution of OpenVINO™ toolkit. The Framework can generate a Dockerfile, build, test, and deploy an image with the Intel® Distribution of OpenVINO™ toolkit. You can reuse available Dockerfiles, add your layer and customize the image of OpenVINO™ for your needs. - - Intel® Distribution of OpenVINO™ toolkit home page: [https://software.intel.com/en-us/openvino-toolkit](https://software.intel.com/en-us/openvino-toolkit) - - Intel® Neural Compute Stick 2 Get Started: [https://software.intel.com/en-us/neural-compute-stick/get-started](https://software.intel.com/en-us/neural-compute-stick/get-started) diff --git a/docs/install_guides/installing-openvino-docker-windows.md b/docs/install_guides/installing-openvino-docker-windows.md index de667d1eeb7..b4bab683847 100644 --- a/docs/install_guides/installing-openvino-docker-windows.md +++ b/docs/install_guides/installing-openvino-docker-windows.md @@ -1,112 +1,118 @@ -# Install Intel® Distribution of OpenVINO™ toolkit for Windows* from Docker* Image {#openvino_docs_install_guides_installing_openvino_docker_windows} +# Install Intel® Distribution of OpenVINO™ toolkit for Windows from Docker Image {#openvino_docs_install_guides_installing_openvino_docker_windows} -The Intel® Distribution of OpenVINO™ toolkit quickly deploys applications and solutions that emulate human vision. Based on Convolutional Neural Networks (CNN), the toolkit extends computer vision (CV) workloads across Intel® hardware, maximizing performance. The Intel® Distribution of OpenVINO™ toolkit includes the Intel® Deep Learning Deployment Toolkit. +This guide provides steps for creating a Docker image with Intel® Distribution of OpenVINO™ toolkit for Windows and using the Docker image on different devices. -This guide provides device specifics for a Docker* image creation with Intel® Distribution of OpenVINO™ toolkit for Windows* and its further usage. +## System Requirements -## System Requirements +@sphinxdirective +.. tab:: Target Operating System with Python Version -**Target Operating Systems** + +------------------------------------+--------------------------+ + | Operating System | Supported Python Version | + +====================================+==========================+ + | Windows Server Core base LTSC 2019 | 3.8 | + +------------------------------------+--------------------------+ + | Windows 10, version 20H2 | 3.8 | + +------------------------------------+--------------------------+ -- Windows Server Core* +.. tab:: Host Operating Systems -**Host Operating Systems** + * Windows 10, 64-bit Pro, Enterprise or Education (1607 Anniversary Update, Build 14393 or later) editions + * Windows Server 2016 or higher -- Windows 10*, 64-bit Pro, Enterprise or Education (1607 Anniversary Update, Build 14393 or later) editions -- Windows Server* 2016 or higher +@endsphinxdirective -## Prebuilt Images +### Additional Requirements for GPU -Prebuilt images are available on [Docker Hub](https://hub.docker.com/u/openvino). - -## Build a Docker* Image - -You can use [available Dockerfiles](https://github.com/openvinotoolkit/docker_ci/tree/master/dockerfiles) or generate a Dockerfile with your setting via [DockerHub CI Framework](https://github.com/openvinotoolkit/docker_ci). -The Framework can generate a Dockerfile, build, test, and deploy an image with the Intel® Distribution of OpenVINO™ toolkit. You can find device specific steps to configure an Intel® Distribution of OpenVINO™ toolkit Dockerfile below. - -## Configure and Run the Docker* Image for CPU - -## Install Additional Dependencies - -### Install CMake - -To add CMake to the image, add the following commands to the Dockerfile: - -```bat -RUN powershell.exe -Command ` - Invoke-WebRequest -URI https://cmake.org/files/v3.14/cmake-3.14.7-win64-x64.msi -OutFile %TMP%\\cmake-3.14.7-win64-x64.msi ; ` - Start-Process %TMP%\\cmake-3.14.7-win64-x64.msi -ArgumentList '/quiet /norestart' -Wait ; ` - Remove-Item %TMP%\\cmake-3.14.7-win64-x64.msi -Force - -RUN SETX /M PATH "C:\Program Files\CMake\Bin;%PATH%" -``` - -In case of proxy issues, please add the `ARG HTTPS_PROXY` and `-Proxy %%HTTPS_PROXY%` settings to the `powershell.exe` command to the Dockerfile. Then build a Docker image: - -```bat -docker build . -t ` ---build-arg HTTPS_PROXY= -``` - -### Install Microsoft Visual Studio* Build Tools - -You can add Microsoft Visual Studio Build Tools* to a Windows* OS Docker image using the [offline](https://docs.microsoft.com/en-us/visualstudio/install/create-an-offline-installation-of-visual-studio?view=vs-2019) or [online](https://docs.microsoft.com/en-us/visualstudio/install/build-tools-container?view=vs-2019) installers for Build Tools. -Microsoft Visual Studio Build Tools* are licensed as a supplement your existing Microsoft Visual Studio* license. -Any images built with these tools should be for your personal use or for use in your organization in accordance with your existing Visual Studio* and Windows* licenses. - -To add MSBuild 2019 to the image, add the following commands to the Dockerfile: - -```bat -RUN powershell.exe -Command Invoke-WebRequest -URI https://aka.ms/vs/16/release/vs_buildtools.exe -OutFile %TMP%\\vs_buildtools.exe - -RUN %TMP%\\vs_buildtools.exe --quiet --norestart --wait --nocache ` - --installPath "C:\Program Files (x86)\Microsoft Visual Studio\2019\BuildTools" ` - --add Microsoft.VisualStudio.Workload.MSBuildTools ` - --add Microsoft.VisualStudio.Workload.UniversalBuildTools ` - --add Microsoft.VisualStudio.Workload.VCTools --includeRecommended ` - --remove Microsoft.VisualStudio.Component.Windows10SDK.10240 ` - --remove Microsoft.VisualStudio.Component.Windows10SDK.10586 ` - --remove Microsoft.VisualStudio.Component.Windows10SDK.14393 ` - --remove Microsoft.VisualStudio.Component.Windows81SDK || IF "%ERRORLEVEL%"=="3010" EXIT 0 && powershell set-executionpolicy remotesigned -``` - -In case of proxy issues, please use the [offline installer for Build Tools](https://docs.microsoft.com/en-us/visualstudio/install/create-an-offline-installation-of-visual-studio?view=vs-2019). - -## Run the Docker* Image for CPU - -To start the interactive session, run the following command allows inference on the CPU: - -```bat -docker run -it --rm -``` - -If you want to try some demos then run image with the root privileges (some additional 3-rd party dependencies will be installed): - -```bat -docker run -itu ContainerAdministrator --rm cmd /S /C "cd samples\scripts && run_sample_squeezenet.bat -d CPU" -``` - -## Configure and Run the Docker* Image for GPU - -GPU Acceleration in Windows containers feature requires to meet Windows host, OpenVINO toolkit and Docker* requirements: +To use GPU Acceleration in Windows containers, make sure that the following requirements for Windows host, OpenVINO and Docker are met: - [Windows requirements](https://docs.microsoft.com/en-us/virtualization/windowscontainers/deploy-containers/gpu-acceleration): - The container host must be running Windows Server 2019 or Windows 10 of version 1809 or higher. - The container base image must be `mcr.microsoft.com/windows:1809` or higher. Windows Server Core and Nano Server container images are not currently supported. - The container host must be running Docker Engine 19.03 or higher. - The container host must have GPU running display drivers of version WDDM 2.5 or higher. -- [OpenVINO™ GPU requirement](https://docs.openvino.ai/latest/openvino_docs_install_guides_installing_openvino_windows.html#Install-GPU): - - Intel Graphics Driver for Windows of version 15.65 or higher. -- [Docker isolation mode requirement](https://docs.microsoft.com/en-us/virtualization/windowscontainers/manage-containers/hyperv-container): +- GPU requirement for OpenVINO: Intel Graphics Driver for Windows of version 15.65 or higher. +- [Docker isolation mode requirements](https://docs.microsoft.com/en-us/virtualization/windowscontainers/manage-containers/hyperv-container): - Windows host and container version tags must match. - - [Windows host and container isolation process support](https://docs.microsoft.com/en-us/virtualization/windowscontainers/deploy-containers/version-compatibility) + - [Windows host and container isolation process support](https://docs.microsoft.com/en-us/virtualization/windowscontainers/deploy-containers/version-compatibility). -## Build a Docker* Image for Your Host System +## Installation Flow + +There are two ways to install OpenVINO with Docker. You can choose either of them according to your needs: +* Use a prebuilt image. Do the following steps: + 1. Get a prebuilt image from provided sources. + 2. Run the image on different devices. +* If you want to customize your image, you can also build a Docker image manually by using the following steps: + 1. Prepare a Dockerfile. + 2. Configure the Docker image. + 3. Run the image on different devices. + +## Getting a Prebuilt Image from Provided Sources + +You can find prebuilt images on: + +- [Docker Hub](https://hub.docker.com/u/openvino) +- [Azure Marketplace](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/intel_corporation.openvino) + +## Preparing a Dockerfile + +You can use the [available Dockerfiles on GitHub](https://github.com/openvinotoolkit/docker_ci/tree/master/dockerfiles) or generate a Dockerfile with your settings via [DockerHub CI Framework](https://github.com/openvinotoolkit/docker_ci) which can generate a Dockerfile, build, test and deploy an image with the Intel® Distribution of OpenVINO™ toolkit. + +## Configuring the Docker Image for Different Devices + +### Installing Additional Dependencies for CPU + +#### Installing CMake + + To add CMake to the image, add the following commands to the Dockerfile: + ```bat + RUN powershell.exe -Command ` + Invoke-WebRequest -URI https://cmake.org/files/v3.14/cmake-3.14.7-win64-x64.msi -OutFile %TMP%\\cmake-3.14.7-win64-x64.msi ; ` + Start-Process %TMP%\\cmake-3.14.7-win64-x64.msi -ArgumentList '/quiet /norestart' -Wait ; ` + Remove-Item %TMP%\\cmake-3.14.7-win64-x64.msi -Force + + RUN SETX /M PATH "C:\Program Files\CMake\Bin;%PATH%" + ``` + + In case of proxy issues, please add the `ARG HTTPS_PROXY` and `-Proxy %%HTTPS_PROXY%` settings to the `powershell.exe` command to the Dockerfile. Then build a Docker image: + ```bat + docker build . -t ` + --build-arg HTTPS_PROXY= + ``` + +#### Installing Microsoft Visual Studio Build Tools + + You can add Microsoft Visual Studio Build Tools to a Windows OS Docker image using the [offline](https://docs.microsoft.com/en-us/visualstudio/install/create-an-offline-installation-of-visual-studio?view=vs-2019) or [online](https://docs.microsoft.com/en-us/visualstudio/install/build-tools-container?view=vs-2019) installers for Build Tools. + + Microsoft Visual Studio Build Tools are licensed as a supplement your existing Microsoft Visual Studio license. + + Any images built with these tools should be for your personal use or for use in your organization in accordance with your existing Visual Studio and Windows licenses. + + To add MSBuild 2019 to the image, add the following commands to the Dockerfile: + ```bat + RUN powershell.exe -Command Invoke-WebRequest -URI https://aka.ms/vs/16/release/vs_buildtools.exe -OutFile %TMP%\\vs_buildtools.exe + + RUN %TMP%\\vs_buildtools.exe --quiet --norestart --wait --nocache ` + --installPath "C:\Program Files (x86)\Microsoft Visual Studio\2019\BuildTools" ` + --add Microsoft.VisualStudio.Workload.MSBuildTools ` + --add Microsoft.VisualStudio.Workload.UniversalBuildTools ` + --add Microsoft.VisualStudio.Workload.VCTools --includeRecommended ` + --remove Microsoft.VisualStudio.Component.Windows10SDK.10240 ` + --remove Microsoft.VisualStudio.Component.Windows10SDK.10586 ` + --remove Microsoft.VisualStudio.Component.Windows10SDK.14393 ` + --remove Microsoft.VisualStudio.Component.Windows81SDK || IF "%ERRORLEVEL%"=="3010" EXIT 0 && powershell set-executionpolicy remotesigned + ``` + + In case of proxy issues, please use the [offline installer for Build Tools](https://docs.microsoft.com/en-us/visualstudio/install/create-an-offline-installation-of-visual-studio?view=vs-2019). + +### Configuring the Image for GPU + +> **NOTE**: Since GPU is not supported in prebuilt images or [default Dockerfiles](https://github.com/openvinotoolkit/docker_ci/tree/master/dockerfiles), you must make sure the Additional Requirements for GPU in System Requirements are met, and do the following steps to build the image manually. 1. Reuse one of [available Dockerfiles](https://github.com/openvinotoolkit/docker_ci/tree/master/dockerfiles). You can also use your own Dockerfile. 2. Check your [Windows host and container isolation process compatibility](https://docs.microsoft.com/en-us/virtualization/windowscontainers/deploy-containers/version-compatibility). -3. Find the appropriate Windows container base image on [DockerHub*](https://hub.docker.com/_/microsoft-windows) and set up your host/container version in the `FROM` Dockerfile instruction. - For example, in [openvino_c_dev_2021.dockerfile](https://github.com/openvinotoolkit/docker_ci/blob/master/dockerfiles/winserver2019/openvino_c_dev_2021.dockerfile), change: +3. Find the appropriate Windows container base image on [DockerHub](https://hub.docker.com/_/microsoft-windows) and set up your host/container version in the `FROM` Dockerfile instruction. + For example, in the `openvino_c_dev_.dockerfile`, change: ```bat FROM mcr.microsoft.com/windows/servercore:ltsc2019 AS ov_base ``` @@ -114,7 +120,7 @@ GPU Acceleration in Windows containers feature requires to meet Windows host, Op ```bat FROM mcr.microsoft.com/windows:20H2 ``` -4. Build the Docker image +4. Build the Docker image by running the following command: ```bat docker build --build-arg package_url= -f -t . ``` @@ -124,7 +130,25 @@ GPU Acceleration in Windows containers feature requires to meet Windows host, Op copy C:\Windows\System32\OpenCL.dll C:\tmp ``` -## Run the Docker* Image for GPU +## Running the Docker Image on Different Devices + +### Running the Image on CPU + +To start the interactive session, run the following command: +```bat +docker run -it --rm +``` + +If you want to try some samples, run the image with the following command: +```bat +docker run -it --rm +cmd /S /C "omz_downloader --name googlenet-v1 --precisions FP16 && omz_converter --name googlenet-v1 --precision FP16 && curl -kO https://storage.openvinotoolkit.org/data/test_data/images/car_1.bmp && python samples\python\hello_classification\hello_classification.py public\googlenet-v1\FP16\googlenet-v1.xml car_1.bmp CPU" +``` + +### Running the Image on GPU + +> **NOTE**: Since GPU is not supported in prebuilt images or [default Dockerfiles](https://github.com/openvinotoolkit/docker_ci/tree/master/dockerfiles), you must make sure the Additional Requirements for GPU in System Requirements are met, and configure and build the image manually before you can run inferences on a GPU. + 1. To try inference on a GPU, run the image with the following command: ```bat @@ -138,18 +162,13 @@ GPU Acceleration in Windows containers feature requires to meet Windows host, Op ```bat copy C:\tmp\OpenCL.dll C:\Windows\System32\ && reg add "HKLM\SOFTWARE\Khronos\OpenCL\Vendors" /v "C:\Windows\System32\DriverStore\FileRepository\iigd_dch.inf_amd64_518f2921ba495409\ocl\bin\x64\intelocl64.dll" /t REG_DWORD /d 0 ``` -3. For example, run the `run_sample_squeezenet` demo with the command below: + For example, run the `Hello Classification Python` sample with the following command: ```bat - cd samples\scripts && run_sample_squeezenet.bat -d GPU + omz_downloader --name googlenet-v1 --precisions FP16 && omz_converter --name googlenet-v1 --precision FP16 && curl -kO https://storage.openvinotoolkit.org/data/test_data/images/car_1.bmp && python samples\python\hello_classification\hello_classification.py public\googlenet-v1\FP16\googlenet-v1.xml car_1.bmp GPU ``` - > **NOTE**: Addittional third-party dependencies will be installed. -## Troubleshooting - -If you got proxy issues, please setup proxy settings for Docker. See the Proxy section in the [Install the DL Workbench from Docker Hub* ](@ref workbench_docs_Workbench_DG_Run_Locally) topic. ## Additional Resources - [DockerHub CI Framework](https://github.com/openvinotoolkit/docker_ci) for Intel® Distribution of OpenVINO™ toolkit. The Framework can generate a Dockerfile, build, test, and deploy an image with the Intel® Distribution of OpenVINO™ toolkit. You can reuse available Dockerfiles, add your layer and customize the image of OpenVINO™ for your needs. - - Intel® Distribution of OpenVINO™ toolkit home page: [https://software.intel.com/en-us/openvino-toolkit](https://software.intel.com/en-us/openvino-toolkit) diff --git a/docs/install_guides/installing-openvino-images.md b/docs/install_guides/installing-openvino-images.md index 82cb0f4cdd5..101e247529b 100644 --- a/docs/install_guides/installing-openvino-images.md +++ b/docs/install_guides/installing-openvino-images.md @@ -10,5 +10,5 @@ You may install Intel® Distribution of OpenVINO™ toolkit from images and repo * [Yocto](installing-openvino-yocto.md) * [PyPI](installing-openvino-pip.md) -The open source version is available in the [OpenVINO™ toolkit GitHub repository](https://github.com/openvinotoolkit/openvino) and you can build it for supported platforms using the Inference Engine Build Instructions. +The open source version is available in the [OpenVINO™ toolkit GitHub repository](https://github.com/openvinotoolkit/openvino) and you can build it for supported platforms using the OpenVINO Build Instructions. diff --git a/docs/install_guides/installing-openvino-linux.md b/docs/install_guides/installing-openvino-linux.md index fb8a7e3bf04..77c4462c93c 100644 --- a/docs/install_guides/installing-openvino-linux.md +++ b/docs/install_guides/installing-openvino-linux.md @@ -1,4 +1,4 @@ -# Install and Configure Intel® Distribution of OpenVINO™ toolkit for Linux {#openvino_docs_install_guides_installing_openvino_linux} +# Install and Configure Intel® Distribution of OpenVINO™ Toolkit for Linux {#openvino_docs_install_guides_installing_openvino_linux} > **NOTE**: Since the OpenVINO™ 2022.1 release, the following development tools: Model Optimizer, Post-Training Optimization Tool, Model Downloader and other Open Model Zoo tools, Accuracy Checker, and Annotation Converter are not part of the installer. These tools are now only available on [pypi.org](https://pypi.org/project/openvino-dev/). @@ -7,8 +7,8 @@ @sphinxdirective .. tab:: Operating Systems - * Ubuntu 18.04.x long-term support (LTS), 64-bit - * Ubuntu 20.04.x long-term support (LTS), 64-bit + * Ubuntu 18.04 long-term support (LTS), 64-bit + * Ubuntu 20.04 long-term support (LTS), 64-bit .. note:: Since the OpenVINO™ 2022.1 release, CentOS 7.6, 64-bit is not longer supported. @@ -49,7 +49,7 @@ This guide provides step-by-step instructions on how to install the Intel® Dist @sphinxdirective .. important:: - Before you start your journey with installation of the Intel® Distribution of OpenVINO™, we encourage you to check up our :ref:`code samples ` in C, C++, and Python and :ref:`notebook tutorials ` that we prepared for you, so you could see all amazing things that you can achieve with our tool. + Before you start your journey with installation of the Intel® Distribution of OpenVINO™, we encourage you to check our :ref:`code samples ` in C, C++, Python and :ref:`notebook tutorials ` that we prepared for you, so you could see all the amazing things that you can achieve with our tool. @endsphinxdirective @@ -85,15 +85,9 @@ This guide provides step-by-step instructions on how to install the Intel® Dist ```sh ./l_openvino_toolkit_p_.sh -a --cli ``` - - @sphinxdirective + > **NOTE**: To get additional information on all parameters that can be used, use the help option: `--help`. Among others, you can find there `-s` option which offers silent mode, which together with `--eula approve` allows you to run whole installation with default values without any user inference. - .. note:: - To get additional information on all parameters that can be used, check up the help option: `--help`. Among others, you can find there `-s` option which offers silent mode, which together with `--eula approve` allows you to run whole installation with default values without any user inference. - - @endsphinxdirective - -6. Follow the instructions on your screen. During the installation you will be asked to accept the license agreement. The acceptance is required to continue. Check out the installation process on the image below:
+6. Follow the instructions on your screen. During the installation you will be asked to accept the license agreement. Your acceptance is required to continue. Check the installation process on the image below:
![](../img/openvino-install-linux-run-boostrapper-script.gif) Click on the image to see the details. @@ -110,7 +104,7 @@ The core components are now installed. Continue to the next section to install a ## Step 2: Install External Software Dependencies -This script allows to install Linux platform development tools and components to work with the product. +This script enables you to install Linux platform development tools and components to work with the product. 1. Go to the `install_dependencies` directory: ```sh @@ -137,7 +131,7 @@ If you have more than one OpenVINO™ version on your machine, you can easily sw The environment variables are set. Next, you can download some additional tools. -## Step 4 (Optional): Download additional components +## Step 4 (Optional): Download Additional Components > **NOTE**: Since the OpenVINO™ 2022.1 release, the following development tools: Model Optimizer, Post-Training Optimization Tool, Model Downloader and other Open Model Zoo tools, Accuracy Checker, and Annotation Converter are not part of the installer. The OpenVINO™ Development Tools can only be installed via PyPI now. See [Install OpenVINO™ Development Tools](installing-model-dev-tools.md) for detailed steps. @@ -145,7 +139,7 @@ The environment variables are set. Next, you can download some additional tools. .. dropdown:: OpenCV - OpenCV is necessary to run demos from Open Model Zoo (OMZ). Some OpenVINO samples and demos also use OpenCV as a dependency. The Intel® Distribution of OpenVINO™ provides a script to install OpenCV: ``/extras/scripts/download_opencv.sh``. + OpenCV is necessary to run demos from Open Model Zoo (OMZ). Some OpenVINO samples can also extend their capabilities when compiled with OpenCV as a dependency. The Intel® Distribution of OpenVINO™ provides a script to install OpenCV: ``/extras/scripts/download_opencv.sh``. .. note:: Make sure you have 2 prerequisites installed: ``curl`` and ``tar``. @@ -154,34 +148,38 @@ The environment variables are set. Next, you can download some additional tools. @endsphinxdirective -## Step 5 (Optional): Configure Inference on non-CPU Devices +## Step 5 (Optional): Configure Inference on Non-CPU Devices @sphinxdirective +.. tab:: GNA + + To enable the toolkit components to use Intel® Gaussian & Neural Accelerator (GNA) on your system, follow the steps in :ref:`GNA Setup Guide `. + .. tab:: GPU - Only if you want to enable the toolkit components to use processor graphics (GPU) on your system, follow the steps in :ref:`GPU Setup Guide `. + To enable the toolkit components to use processor graphics (GPU) on your system, follow the steps in :ref:`GPU Setup Guide `. .. tab:: NCS 2 - Only if you want to perform inference on Intel® Neural Compute Stick 2 powered by the Intel® Movidius™ Myriad™ X VPU, follow the steps on :ref:`NCS2 Setup Guide `. - For more details, see the `Get Started page for Intel® Neural Compute Stick 2 `_. + To perform inference on Intel® Neural Compute Stick 2 powered by the Intel® Movidius™ Myriad™ X VPU, follow the steps on :ref:`NCS2 Setup Guide `. + .. tab:: VPU - To install and configure your Intel® Vision Accelerator Design with Intel® Movidius™ VPUs, see the :ref:`VPUs Configuration Guide `. - After configuration is done, you are ready to run the verification scripts with the HDDL Plugin for your Intel® Vision Accelerator Design with Intel® Movidius™ VPUs. Check up our :ref:`Movidius VPU demos `. + To install and configure your Intel® Vision Accelerator Design with Intel® Movidius™ VPUs, see the :ref:`VPU Configuration Guide `. + After configuration is done, you are ready to run the verification scripts with the HDDL Plugin for your Intel® Vision Accelerator Design with Intel® Movidius™ VPUs. .. warning:: While working with either HDDL or NCS, choose one of them as they cannot run simultaneously on the same machine. @endsphinxdirective -## Step 6: What's next? +## Step 6: What's Next? Now you are ready to try out the toolkit. Developing in Python: - * [Start with tensorflow models with OpenVINO™](https://docs.openvino.ai/latest/notebooks/101-tensorflow-to-openvino-with-output.html) + * [Start with TensorFlow models with OpenVINO™](https://docs.openvino.ai/latest/notebooks/101-tensorflow-to-openvino-with-output.html) * [Start with ONNX and PyTorch models with OpenVINO™](https://docs.openvino.ai/latest/notebooks/102-pytorch-onnx-to-openvino-with-output.html) * [Start with PaddlePaddle models with OpenVINO™](https://docs.openvino.ai/latest/notebooks/103-paddle-onnx-to-openvino-classification-with-output.html) @@ -190,7 +188,7 @@ Developing in C++: * [Hello Classification C++ Sample](@ref openvino_inference_engine_samples_hello_classification_README) * [Hello Reshape SSD C++ Sample](@ref openvino_inference_engine_samples_hello_reshape_ssd_README) -## Uninstall the Intel® Distribution of OpenVINO™ Toolkit +## Uninstalling the Intel® Distribution of OpenVINO™ Toolkit To uninstall the toolkit, follow the steps on the [Uninstalling page](uninstalling-openvino.md). @@ -220,20 +218,20 @@ To uninstall the toolkit, follow the steps on the [Uninstalling page](uninstalli .. dropdown:: Additional Resources - * Convert models for use with OpenVINO™: :ref:`Model Optimizer Developer Guide ` - * Write your own OpenVINO™ applications: :ref:`OpenVINO™ Runtime User Guide ` - * Information on sample applications: :ref:`OpenVINO™ Toolkit Samples Overview ` - * Information on a supplied set of models: :ref:`Overview of OpenVINO™ Toolkit Pre-Trained Models ` + * Converting models for use with OpenVINO™: :ref:`Model Optimizer Developer Guide ` + * Writing your own OpenVINO™ applications: :ref:`OpenVINO™ Runtime User Guide ` + * Sample applications: :ref:`OpenVINO™ Toolkit Samples Overview ` + * Pre-trained deep learning models: :ref:`Overview of OpenVINO™ Toolkit Pre-Trained Models ` * IoT libraries and code samples in the GitHUB repository: `Intel® IoT Developer Kit`_ - - To learn more about converting models from specific frameworks, go to: - + + .. _Intel® IoT Developer Kit: https://github.com/intel-iot-devkit @endsphinxdirective diff --git a/docs/install_guides/installing-openvino-macos.md b/docs/install_guides/installing-openvino-macos.md index 618a8f3727e..383e56524b3 100644 --- a/docs/install_guides/installing-openvino-macos.md +++ b/docs/install_guides/installing-openvino-macos.md @@ -26,7 +26,7 @@ .. tab:: Software Requirements * `CMake 3.13 or higher `_ (choose "macOS 10.13 or later"). Add `/Applications/CMake.app/Contents/bin` to path (for default install). - * `Python 3.6 - 3.9 `_ (choose 3.6.x - 3.9.x). Install and add to path. + * `Python 3.6 - 3.9 `_ (choose 3.6 - 3.9). Install and add to path. * Apple Xcode Command Line Tools. In the terminal, run `xcode-select --install` from any directory * (Optional) Apple Xcode IDE (not required for OpenVINO™, but useful for development) @@ -60,7 +60,7 @@ This guide provides step-by-step instructions on how to install the Intel® Dist @endsphinxdirective -5. Follow the instructions on your screen. During the installation you will be asked to accept the license agreement. The acceptance is required to continue. +5. Follow the instructions on your screen. During the installation you will be asked to accept the license agreement. Your acceptance is required to continue. ![](../img/openvino-install-macos-run-boostrapper-script.gif) Click on the image to see the details. @@ -96,7 +96,7 @@ The environment variables are set. Continue to the next section if you want to d .. dropdown:: OpenCV - OpenCV is necessary to run demos from Open Model Zoo (OMZ). Some OpenVINO samples and demos also use OpenCV as a dependency. The Intel® Distribution of OpenVINO™ provides a script to install OpenCV: ``/extras/scripts/download_opencv.sh``. + OpenCV is necessary to run demos from Open Model Zoo (OMZ). Some OpenVINO samples can also extend their capabilities when compiled with OpenCV as a dependency. The Intel® Distribution of OpenVINO™ provides a script to install OpenCV: ``/extras/scripts/download_opencv.sh``. .. note:: Make sure you have 2 prerequisites installed: ``curl`` and ``tar``. @@ -142,20 +142,20 @@ To uninstall the toolkit, follow the steps on the [Uninstalling page](uninstalli .. dropdown:: Additional Resources - * Convert models for use with OpenVINO™: :ref:`Model Optimizer Developer Guide ` - * Write your own OpenVINO™ applications: :ref:`OpenVINO™ Runtime User Guide ` - * Information on sample applications: :ref:`OpenVINO™ Toolkit Samples Overview ` - * Information on a supplied set of models: :ref:`Overview of OpenVINO™ Toolkit Pre-Trained Models ` + * Converting models for use with OpenVINO™: :ref:`Model Optimizer Developer Guide ` + * Writing your own OpenVINO™ applications: :ref:`OpenVINO™ Runtime User Guide ` + * Sample applications: :ref:`OpenVINO™ Toolkit Samples Overview ` + * Pre-trained deep learning models: :ref:`Overview of OpenVINO™ Toolkit Pre-Trained Models ` * IoT libraries and code samples in the GitHUB repository: `Intel® IoT Developer Kit`_ - - To learn more about converting models from specific frameworks, go to: - + + .. _Intel® IoT Developer Kit: https://github.com/intel-iot-devkit @endsphinxdirective diff --git a/docs/install_guides/installing-openvino-overview.md b/docs/install_guides/installing-openvino-overview.md index b3243efaa31..6edfaf528df 100644 --- a/docs/install_guides/installing-openvino-overview.md +++ b/docs/install_guides/installing-openvino-overview.md @@ -8,22 +8,24 @@ Intel® Distribution of OpenVINO™ toolkit is a comprehensive toolkit for quick ## Installation Options +From the 2022.1 release, the OpenVINO installation package has been separated into two parts: OpenVINO Runtime and OpenVINO Development Tools. See the following instructions to decide your installation process. + ### Decide What to Install -**If you have already finished your model development and want to deploy your applications on various devices, install OpenVINO Runtime**, which contains a set of libraries for an easy inference integration into your applications and supports heterogeneous execution across Intel® CPU and Intel® GPU hardware. +**If you have already finished your model development and want to deploy your applications on various devices, [install OpenVINO Runtime](installing-openvino-runtime.md)**, which contains a set of libraries for an easy inference integration into your applications and supports heterogeneous execution across Intel® CPU and Intel® GPU hardware. -**If you want to download, convert, optimize and tune pre-trained deep learning models**, [install OpenVINO Development Tools](installing-model-dev-tools.md), which provides the following tools: +**If you want to download model from [Open Model Zoo](../model_zoo.md), convert to [OpenVINO IR](../MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md), [optimize](../optimization_guide/model_optimization_guide.md) and tune pre-trained deep learning models**, [install OpenVINO Development Tools](installing-model-dev-tools.md), which provides the following tools: * Model Optimizer + * Post-Training Optimization Tool * Benchmark Tool * Accuracy Checker and Annotation Converter - * Post-Training Optimization Tool * Model Downloader and other Open Model Zoo tools ### Choose Your Installation Method -For Python developers, you can [install OpenVINO from PyPI](installing-openvino-pip.md), which contains both OpenVINO Runtime and Development Tools and less steps. +For Python developers, you can [install OpenVINO from PyPI](installing-openvino-pip.md), which contains both OpenVINO Runtime and Development Tools and less steps. For C++ developers, you may choose one of the following installation options to install OpenVINO Runtime on your specific operating system: @@ -32,6 +34,6 @@ For C++ developers, you may choose one of the following installation options to * macOS: You can install OpenVINO Runtime using an [Installer](installing-openvino-macos.md) or [Anaconda Cloud](installing-openvino-conda.md). * [Raspbian OS](installing-openvino-raspbian.md). -> **NOTE**: From the 2022.1 release, the OpenVINO Development Tools can **only** be installed via PyPI. See [Install OpenVINO Development Tools](installing-model-dev-tools.md) for detailed steps. +> **NOTE**: From the 2022.1 release, OpenVINO Development Tools can **only** be installed via PyPI. See [Install OpenVINO Development Tools](installing-model-dev-tools.md) for detailed steps. -Besides, the open source version is also available in the [OpenVINO™ toolkit GitHub repository](https://github.com/openvinotoolkit/openvino/wiki/BuildingCode). You can build it for supported platforms using the Inference Engine Build Instructions. +Besides, the open source version is also available in the [OpenVINO™ toolkit GitHub repository](https://github.com/openvinotoolkit/openvino/). You can build it for supported platforms using the [OpenVINO Build Instructions](https://github.com/openvinotoolkit/openvino/wiki/BuildingCode). diff --git a/docs/install_guides/installing-openvino-pip.md b/docs/install_guides/installing-openvino-pip.md index 1db00af2ee2..ec1634d9a56 100644 --- a/docs/install_guides/installing-openvino-pip.md +++ b/docs/install_guides/installing-openvino-pip.md @@ -1,41 +1,97 @@ # Install Intel® Distribution of OpenVINO™ Toolkit from PyPI Repository {#openvino_docs_install_guides_installing_openvino_pip} -You can install Intel® Distribution of OpenVINO™ toolkit through the PyPI repository, including both OpenVINO™ Runtime and OpenVINO™ Development Tools. Besides, from the 2022.1 release, OpenVINO Development Tools can only be installed via PyPI. +You can install both OpenVINO™ Runtime and OpenVINO Development Tools through the PyPI repository. This page provides the main steps for installing OpenVINO Runtime. +> **NOTE**: From the 2022.1 release, the OpenVINO™ Development Tools can only be installed via PyPI. See [Install OpenVINO Development Tools](installing-model-dev-tools.md) for detailed steps. ## Installing OpenVINO Runtime -The OpenVINO Runtime contains a set of libraries for an easy inference integration into your applications and supports heterogeneous execution across Intel® CPU and Intel® GPU hardware. To install OpenVINO Runtime, use the following command: +For system requirements and troubleshooting, see . + +### Step 1. Set Up Python Virtual Environment + +To avoid dependency conflicts, use a virtual environment. Skip this step only if you do want to install all dependencies globally. + +Use the following command to create a virtual environment: + +@sphinxdirective + +.. tab:: Linux and macOS + + .. code-block:: sh + + python3 -m venv openvino_env + +.. tab:: Windows + + .. code-block:: sh + + python -m venv openvino_env + + +@endsphinxdirective + +### Step 2. Activate Virtual Environment + +@sphinxdirective + +.. tab:: On Linux and macOS + + .. code-block:: sh + + source openvino_env/bin/activate + +.. tab:: On Windows + + .. code-block:: sh + + openvino_env\Scripts\activate + + +@endsphinxdirective + +### Step 3. Set Up and Update PIP to the Highest Version + +Use the following command: +```sh +python -m pip install --upgrade pip +``` + +### Step 4. Install the Package + +Use the following command: ``` pip install openvino ``` -For system requirements and more detailed steps, see . +### Step 5. Verify that the Package Is Installed +Run the command below: +```sh +python -c "from openvino.runtime import Core" +``` + +If installation was successful, you will not see any error messages (no console output). ## Installing OpenVINO Development Tools -OpenVINO Development Tools include Model Optimizer, Benchmark Tool, Accuracy Checker, Post-Training Optimization Tool and Open Model Zoo tools including Model Downloader. While installing OpenVINO Development Tools, OpenVINO Runtime will also be installed as a dependency, so you don't need to install OpenVINO Runtime separately. +OpenVINO Development Tools include Model Optimizer, Benchmark Tool, Accuracy Checker, Post-Training Optimization Tool and Open Model Zoo tools including Model Downloader. If you want to install OpenVINO Development Tools, OpenVINO Runtime will also be installed as a dependency, so you don't need to install OpenVINO Runtime separately. -Use the following command to install OpenVINO Development Tools: -``` -pip install openvino-dev[EXTRAS] -``` -where the EXTRAS parameter specifies one or more deep learning frameworks via these values: `caffe`, `kaldi`, `mxnet`, `onnx`, `pytorch`, `tensorflow`, `tensorflow2`. Make sure that you install the corresponding frameworks for your models. +See [Install OpenVINO™ Development Tools](installing-model-dev-tools.md) for detailed steps. -For example, to install and configure the components for working with TensorFlow 2.x, MXNet and Caffe, use the following command: -``` -pip install openvino-dev[tensorflow2,mxnet,caffe] -``` -> **NOTE**: For TensorFlow, use the `tensorflow2` value as much as possible. The `tensorflow` value is provided only for compatibility reasons. - -For system requirements and more detailed steps, see . +## What's Next? + +Now you may continue with the following tasks: + +* To convert models for use with OpenVINO, see [Model Optimizer Developer Guide](../MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md). +* See pre-trained deep learning models in our [Open Model Zoo](../model_zoo.md). +* Try out OpenVINO via [OpenVINO Notebooks](https://docs.openvino.ai/latest/notebooks/notebooks.html). +* To write your own OpenVINO™ applications, see [OpenVINO Runtime User Guide](../OV_Runtime_UG/openvino_intro.md). +* See sample applications in [OpenVINO™ Toolkit Samples Overview](../OV_Runtime_UG/Samples_Overview.md). ## Additional Resources -- [Intel® Distribution of OpenVINO™ toolkit](https://software.intel.com/en-us/openvino-toolkit) -- [Model Optimizer Developer Guide](../MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) -- [OpenVINO™ Runtime User Guide](../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md) -- [Inference Engine Samples Overview](../OV_Runtime_UG/Samples_Overview.md) +- Intel® Distribution of OpenVINO™ toolkit home page: +- For IoT Libraries & Code Samples, see [Intel® IoT Developer Kit](https://github.com/intel-iot-devkit). diff --git a/docs/install_guides/installing-openvino-windows.md b/docs/install_guides/installing-openvino-windows.md index b65fa3785e6..0aa6244de66 100644 --- a/docs/install_guides/installing-openvino-windows.md +++ b/docs/install_guides/installing-openvino-windows.md @@ -71,7 +71,7 @@ This guide provides step-by-step instructions on how to install the Intel® Dist @endsphinxdirective -3. Follow the instructions on your screen. During the installation you will be asked to accept the license agreement. The acceptance is required to continue. Check out the installation process in the image below:
+3. Follow the instructions on your screen. During the installation you will be asked to accept the license agreement. Your acceptance is required to continue. Check out the installation process in the image below:
![](../img/openvino-install-win-run-boostrapper-script.gif) Click on the image to see the details.
@@ -107,23 +107,28 @@ The environment variables are set. Next, you can download some additional tools. .. dropdown:: OpenCV - OpenCV is necessary to run demos from Open Model Zoo (OMZ). Some OpenVINO samples and demos also use OpenCV as a dependency. The Intel® Distribution of OpenVINO™ provides a script to install OpenCV: ``/extras/scripts/download_opencv.sh``. + OpenCV is necessary to run demos from Open Model Zoo (OMZ). Some OpenVINO samples can also extend their capabilities when compiled with OpenCV as a dependency. The Intel® Distribution of OpenVINO™ provides a script to install OpenCV: ``/extras/scripts/download_opencv.sh``. .. note:: No prerequisites are needed. - There are 3 ways to run the script: + There are three ways to run the script: + + * GUI: right-click the script and select ``Run with PowerShell``. + + * Command prompt (CMD) console: - a. GUI: right click and select ``Run with PowerShell`` - b. Command prompt (CMD) console: .. code-block:: sh - + powershell \extras\scripts\download_opencv.ps1 - - c. PowerShell console: + + + * PowerShell console: + .. code-block:: sh - + .\\scripts\download_opencv.ps1 + If the Intel® Distribution of OpenVINO™ is installed to the system location (e.g. ``Program Files (x86)``) then privilege elevation dialog will be shown. The script can be run from CMD/PowerShell Administrator console to avoid this dialog in case of system-wide installation. The script is interactive by default, so during the execution it will wait for user to press ``Enter`` If you want to avoid this, use the ``-batch`` option, e.g. ``powershell \extras\scripts\download_opencv.ps1 -batch``. After the execution of the script, you will find OpenCV extracted to ``/extras/opencv``. @@ -133,13 +138,22 @@ The environment variables are set. Next, you can download some additional tools. ## Step 4 (Optional): Configure Inference on non-CPU Devices @sphinxdirective +.. tab:: GNA + + To enable the toolkit components to use Intel® Gaussian & Neural Accelerator (GNA) on your system, follow the steps in :ref:`GNA Setup Guide `. + .. tab:: GPU - Only do this if you want to enable the toolkit components to use processor graphics (GPU) on your system, follow the steps in :ref:`GPU Setup Guide `. + To enable the toolkit components to use processor graphics (GPU) on your system, follow the steps in :ref:`GPU Setup Guide `. .. tab:: VPU - To install and configure your Intel® Vision Accelerator Design with Intel® Movidius™ VPUs, see the :ref:`VPUs Configuration Guide `. + To install and configure your Intel® Vision Accelerator Design with Intel® Movidius™ VPUs, see the :ref:`VPU Configuration Guide `. + +.. tab:: NCS 2 + + No additional configurations are needed. + @endsphinxdirective @@ -157,7 +171,7 @@ Developing in C++: * [Hello Classification C++ Sample](@ref openvino_inference_engine_samples_hello_classification_README) * [Hello Reshape SSD C++ Sample](@ref openvino_inference_engine_samples_hello_reshape_ssd_README) -## Uninstall the Intel® Distribution of OpenVINO™ Toolkit +## Uninstalling the Intel® Distribution of OpenVINO™ Toolkit To uninstall the toolkit, follow the steps on the [Uninstalling page](uninstalling-openvino.md). @@ -165,20 +179,20 @@ To uninstall the toolkit, follow the steps on the [Uninstalling page](uninstalli .. dropdown:: Additional Resources - * Convert models for use with OpenVINO™: :ref:`Model Optimizer Developer Guide ` - * Write your own OpenVINO™ applications: :ref:`OpenVINO™ Runtime User Guide ` - * Information on sample applications: :ref:`OpenVINO™ Toolkit Samples Overview ` - * Information on a supplied set of models: :ref:`Overview of OpenVINO™ Toolkit Pre-Trained Models ` + * Converting models for use with OpenVINO™: :ref:`Model Optimizer Developer Guide ` + * Writing your own OpenVINO™ applications: :ref:`OpenVINO™ Runtime User Guide ` + * Sample applications: :ref:`OpenVINO™ Toolkit Samples Overview ` + * Pre-trained deep learning models: :ref:`Overview of OpenVINO™ Toolkit Pre-Trained Models ` * IoT libraries and code samples in the GitHUB repository: `Intel® IoT Developer Kit`_ - To learn more about converting models from specific frameworks, go to: - + .. _Intel® IoT Developer Kit: https://github.com/intel-iot-devkit @endsphinxdirective diff --git a/docs/install_guides/installing-openvino-yocto.md b/docs/install_guides/installing-openvino-yocto.md index c5340095d90..4842a34edd1 100644 --- a/docs/install_guides/installing-openvino-yocto.md +++ b/docs/install_guides/installing-openvino-yocto.md @@ -1,30 +1,28 @@ -# Create a Yocto* Image with OpenVINO™ toolkit {#openvino_docs_install_guides_installing_openvino_yocto} -This document provides instructions for creating a Yocto* image with OpenVINO™ toolkit. - -Instructions were validated and tested for [Yocto OpenVINO 2020.4 release](http://git.yoctoproject.org/cgit/cgit.cgi/meta-intel). +# Create a Yocto Image with Intel® Distribution of OpenVINO™ toolkit {#openvino_docs_install_guides_installing_openvino_yocto} +This document provides instructions for creating a Yocto image with Intel® Distribution of OpenVINO™ toolkit. ## System Requirements -Use the [Yocto Project* official documentation](https://www.yoctoproject.org/docs/latest/mega-manual/mega-manual.html#brief-compatible-distro) to set up and configure your host machine to be compatible with BitBake*. +Use the [Yocto Project official documentation](https://docs.yoctoproject.org/brief-yoctoprojectqs/index.html#compatible-linux-distribution) to set up and configure your host machine to be compatible with BitBake. -## Setup +## Step 1: Set Up Environment -### Set up Git repositories +### Set Up Git Repositories The following Git repositories are required to build a Yocto image: -- [Poky](https://www.yoctoproject.org/docs/latest/mega-manual/mega-manual.html#poky) -- [Meta-intel](http://git.yoctoproject.org/cgit/cgit.cgi/meta-intel/tree/README) +- [Poky](https://git.yoctoproject.org/poky) +- [Meta-intel](https://git.yoctoproject.org/meta-intel/tree/README) - [Meta-openembedded](http://cgit.openembedded.org/meta-openembedded/tree/README) - Meta-clang Clone these Git repositories to your host machine: ```sh -git clone https://git.yoctoproject.org/git/poky -git clone https://git.yoctoproject.org/git/meta-intel -git clone https://git.openembedded.org/meta-openembedded -git clone https://github.com/kraj/meta-clang.git +git clone https://git.yoctoproject.org/git/poky --branch honister +git clone https://git.yoctoproject.org/git/meta-intel --branch honister +git clone https://git.openembedded.org/meta-openembedded --branch honister +git clone https://github.com/kraj/meta-clang.git --branch honister ``` -### Set up BitBake* Layers +### Set up BitBake Layers ```sh source poky/oe-init-build-env @@ -36,7 +34,7 @@ bitbake-layers add-layer ../meta-clang ### Set up BitBake Configurations -Include extra configuration in conf/local.conf in your build directory as required. +Include extra configuration in `conf/local.conf` in your build directory as required. ```sh # Build with SSE4.2, AVX2 etc. extensions @@ -45,44 +43,44 @@ MACHINE = "intel-skylake-64" # Enable clDNN GPU plugin when needed. # This requires meta-clang and meta-oe layers to be included in bblayers.conf # and is not enabled by default. -PACKAGECONFIG_append_pn-openvino-inference-engine = " opencl" +PACKAGECONFIG:append:pn-openvino-inference-engine = " opencl" -# Enable building inference engine python API. +# Enable building OpenVINO Python API. # This requires meta-python layer to be included in bblayers.conf. -PACKAGECONFIG_append_pn-openvino-inference-engine = " python3" +PACKAGECONFIG:append:pn-openvino-inference-engine = " python3" -# This adds inference engine related libraries in the target image. -CORE_IMAGE_EXTRA_INSTALL_append = " openvino-inference-engine" +# This adds OpenVINO related libraries in the target image. +CORE_IMAGE_EXTRA_INSTALL:append = " openvino-inference-engine" -# This adds inference engine samples in the target image. -CORE_IMAGE_EXTRA_INSTALL_append = " openvino-inference-engine-samples" +# This adds OpenVINO samples in the target image. +CORE_IMAGE_EXTRA_INSTALL:append = " openvino-inference-engine-samples" -# Include inference engine python API package in the target image. -CORE_IMAGE_EXTRA_INSTALL_append = " openvino-inference-engine-python3" +# Include OpenVINO Python API package in the target image. +CORE_IMAGE_EXTRA_INSTALL:append = " openvino-inference-engine-python3" # Enable MYRIAD plugin -CORE_IMAGE_EXTRA_INSTALL_append = " openvino-inference-engine-vpu-firmware" +CORE_IMAGE_EXTRA_INSTALL:append = " openvino-inference-engine-vpu-firmware" -# Include model optimizer in the target image. -CORE_IMAGE_EXTRA_INSTALL_append = " openvino-model-optimizer" +# Include Model Optimizer in the target image. +CORE_IMAGE_EXTRA_INSTALL:append = " openvino-model-optimizer" ``` -## Build a Yocto Image with OpenVINO Packages +## Step 2: Build a Yocto Image with OpenVINO Packages -Run BitBake to build the minimal image with OpenVINO packages: +Run BitBake to build your image with OpenVINO packages. To build the minimal image, for example, run: ```sh bitbake core-image-minimal ``` -## Verify the Created Yocto Image with OpenVINO Packages +## Step 3: Verify the Yocto Image with OpenVINO Packages Verify that OpenVINO packages were built successfully. -Run 'oe-pkgdata-util list-pkgs | grep openvino' command. +Run the following command: ```sh oe-pkgdata-util list-pkgs | grep openvino ``` -Verify that it returns the list of packages below: +If the image was built successfully, it will return the list of packages as below: ```sh openvino-inference-engine openvino-inference-engine-dbg diff --git a/docs/install_guides/installing-openvino-yum.md b/docs/install_guides/installing-openvino-yum.md index dc17b4792aa..4e0204a0616 100644 --- a/docs/install_guides/installing-openvino-yum.md +++ b/docs/install_guides/installing-openvino-yum.md @@ -1,144 +1,162 @@ -# Install Intel® Distribution of OpenVINO™ toolkit for Linux* Using YUM Repository {#openvino_docs_install_guides_installing_openvino_yum} +# Install Intel® Distribution of OpenVINO™ Toolkit for Linux Using YUM Repository {#openvino_docs_install_guides_installing_openvino_yum} -This guide provides installation steps for the Intel® Distribution of OpenVINO™ toolkit for Linux* distributed through the YUM repository. +This guide provides installation steps for Intel® Distribution of OpenVINO™ toolkit for Linux distributed through the YUM repository. -> **IMPORTANT**: By downloading and using this container and the included software, you agree to the terms and conditions of the [software license agreements](https://software.intel.com/content/dam/develop/external/us/en/documents/intel-openvino-license-agreements.pdf). Please, review the content inside the `/licensing` folder for more details. +> **NOTE**: From the 2022.1 release, the OpenVINO™ Development Tools can only be installed via PyPI. If you want to develop or optimize your models with OpenVINO, see [Install OpenVINO Development Tools](installing-model-dev-tools.md) for detailed steps. -> **NOTE**: Intel® Graphics Compute Runtime for OpenCL™ is not a part of OpenVINO™ YUM distribution. You can install it from the [Intel® Graphics Compute Runtime for OpenCL™ GitHub repo](https://github.com/intel/compute-runtime). - -> **NOTE**: Only runtime packages are available via the YUM repository. +> **IMPORTANT**: By downloading and using this container and the included software, you agree to the terms and conditions of the [software license agreements](https://software.intel.com/content/dam/develop/external/us/en/documents/intel-openvino-license-agreements.pdf). Please review the content inside the `/licensing` folder for more details. ## System Requirements -The complete list of supported hardware is available in the [Release Notes](https://software.intel.com/content/www/us/en/develop/articles/openvino-relnotes.html#inpage-nav-8). +The complete list of supported hardware is available in the [Release Notes](https://software.intel.com/content/www/us/en/develop/articles/openvino-relnotes.html). -**Operating Systems** +**Operating systems** -- CentOS 7.6, 64-bit +- Red Hat Enterprise Linux 8, 64-bit -## Included with Runtime Package +## Install OpenVINO Runtime -The following components are installed with the OpenVINO runtime package: +### Step 1: Set Up the Repository -| Component | Description| -|-----------|------------| -| [OpenVINO™ Runtime](../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md)| The engine that runs a deep learning model. It includes a set of libraries for an easy inference integration into your applications. | -| [OpenCV*](https://docs.opencv.org/master/) | OpenCV* community version compiled for Intel® hardware. | -| Deep Learning Stream (DL Streamer) | Streaming analytics framework, based on GStreamer, for constructing graphs of media analytics components. For the DL Streamer documentation, see [DL Streamer Samples](@ref gst_samples_README), [API Reference](https://openvinotoolkit.github.io/dlstreamer_gst/), [Elements](https://github.com/openvinotoolkit/dlstreamer_gst/wiki/Elements), [Tutorial](https://github.com/openvinotoolkit/dlstreamer_gst/wiki/DL-Streamer-Tutorial). | - -## Install Packages - -## Set up the Repository - -> **NOTE**: You must be logged in as root to set up and install the repository. -
-Configure YUM with the OpenVINO repository to install OpenVINO. You have two options for this, using the `yum-config-manager` or manually by creating a text file and pointing YUM to the file. - -* **OPTION 1:** Import the `.repo` file using the `yum-config-manager`: - 1. `yum-utils` must be installed on your system. If it’s not currently installed, run the command: - ```sh - sudo yum install yum-utils +1. Create the YUM repo file in the `/tmp` directory as a normal user: ``` - 2. Add repository using the `yum-config-manager`: - ```sh - sudo yum-config-manager --add-repo https://yum.repos.intel.com/openvino/2021/setup/intel-openvino-2021.repo - ``` - 3. Import the gpg public key for the repository: - ```sh - sudo rpm --import https://yum.repos.intel.com/openvino/2021/setup/RPM-GPG-KEY-INTEL-OPENVINO-2021 - ``` - -* **OPTION 2:** Create the repository file manually: - - 1. Create the YUM repo file in the /tmp directory as a normal user: - ```sh - tee > /tmp/openvino-2021.repo << EOF - [intel-openvino-2021] - name=Intel(R) Distribution of OpenVINO 2021 - baseurl=https://yum.repos.intel.com/openvino/2021 + tee > /tmp/openvino-2022.repo << EOF + [OpenVINO] + name=Intel(R) Distribution of OpenVINO 2022 + baseurl=https://yum.repos.intel.com/openvino/2022 enabled=1 gpgcheck=1 - gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-INTEL-OPENVINO-2021 + repo_gpgcheck=1 + gpgkey=https://yum.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB EOF ``` - 2. Move the newly created openvino-2021.repo file to the YUM configuration directory /etc/yum.repos.d: - ```sh - sudo mv /tmp/openvino-2021.repo /etc/yum.repos.d - ``` - 3. Import the gpg public key for the repository: +2. Move the new openvino-2022.repo file to the YUM configuration directory `/etc/yum.repos.d`: ```sh - sudo rpm --import https://yum.repos.intel.com/openvino/2021/setup/RPM-GPG-KEY-INTEL-OPENVINO-2021 + sudo mv /tmp/openvino-2022.repo /etc/yum.repos.d ``` +3. Verify that the new repo is properly setup by running the following command: + ```sh + yum repolist | grep -i openvino + ``` + You will see the available list of packages. -### Verify that the new repo is properly setup -Run the following command: -```sh -yum repolist | grep -i openvino + +To list available OpenVINO packages, use the following command: +``` +yum list 'openvino*' ``` -Results: -```sh -intel-openvino-2021 Intel(R) Distribution of OpenVINO 2021 -``` - -### To list available OpenVINO packages -Use the following command: -```sh -yum list intel-openvino* -``` +### Step 2: Install OpenVINO Runtime Using the YUM Package Manager ---- - -## Install Runtime Packages Using the YUM Package Manager +Intel® Distribution of OpenVINO™ toolkit will be installed in: `/opt/intel/openvino_..` -Intel® OpenVINO will be installed in: `/opt/intel/openvino_..` -
A symlink will be created: `/opt/intel/openvino_` ---- +You can select one of the following procedures according to your need: -### To install the latest version -To install the full runtime version of the OpenVINO package: +#### To Install the Latest Version + +Run the following command: ```sh -sudo yum install intel-openvino-runtime-centos7 +sudo yum install openvino ``` ---- +#### To Install a Specific Version -### To install a specific version -To install the full runtime version of the OpenVINO package: +Run the following command: ```sh -sudo yum install intel-openvino-runtime-centos7-.. +sudo yum install openvino-.. ``` + For example: - ```sh -sudo yum install intel-openvino-runtime-centos7-2021.3.394 - ``` - ---- - -### To check for installed packages and version - -To check a specific version of an OpenVINO package: -```sh -yum list installed intel-openvino* +sudo yum install openvino-2022.1.0 ``` ---- +#### To Check for Installed Packages and Version -### To Uninstall a specific version - -To uninstall a specific full runtime package: +Run the following command: ```sh -sudo yum autoremove intel-openvino-runtime-centos-.. +yum list installed 'openvino*' ``` -**Additional Resources** -- Intel® Distribution of OpenVINO™ toolkit home page: [https://software.intel.com/en-us/openvino-toolkit](https://software.intel.com/en-us/openvino-toolkit) -- OpenVINO™ toolkit online documentation: [https://docs.openvino.ai](https://docs.openvino.ai) -- [Model Optimizer Developer Guide](../MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md). -- [OpenVINO™ Runtime User Guide](../OV_Runtime_UG/OpenVINO_Runtime_User_Guide.md). -- For more information on Sample Applications, see the [Inference Engine Samples Overview](../OV_Runtime_UG/Samples_Overview.md). -- For IoT Libraries & Code Samples see the [Intel® IoT Developer Kit](https://github.com/intel-iot-devkit). +#### To Uninstall the Latest Version +Run the following command: +```sh +sudo yum autoremove openvino +``` + +#### To Uninstall a Specific Version + +Run the following command: +```sh +sudo yum autoremove openvino-.. +``` + +### Step 3 (Optional): Install OpenCV from YUM + +OpenCV is necessary to run C++ demos from Open Model Zoo. Some OpenVINO samples can also extend their capabilities when compiled with OpenCV as a dependency. OpenVINO provides a package to install OpenCV from YUM: + +#### To Install the Latest Version of OpenCV + +Run the following command: +```sh +sudo yum install openvino-opencv +``` + +#### To Install a Specific Version of OpenCV + +Run the following command: +```sh +sudo yum install openvino-opencv-.. +``` + +### Step 4 (Optional): Install Software Dependencies + +After you have installed OpenVINO Runtime, if you decided to [install OpenVINO Model Development Tools](installing-model-dev-tools.md), make sure that you install external software dependencies first. + +Refer to Install External Software Dependencies for detailed steps. + +### Step 5 (Optional): Configure Inference on Non-CPU Devices + +@sphinxdirective + +.. tab:: GNA + + To enable the toolkit components to use Intel® Gaussian & Neural Accelerator (GNA) on your system, follow the steps in :ref:`GNA Setup Guide `. + +.. tab:: GPU + + To enable the toolkit components to use processor graphics (GPU) on your system, follow the steps in :ref:`GPU Setup Guide `. + +.. tab:: NCS 2 + + To perform inference on Intel® Neural Compute Stick 2 powered by the Intel® Movidius™ Myriad™ X VPU, follow the steps on :ref:`NCS2 Setup Guide `. + + +.. tab:: VPU + + To install and configure your Intel® Vision Accelerator Design with Intel® Movidius™ VPUs, see the :ref:`VPU Configuration Guide `. + After configuration is done, you are ready to run the verification scripts with the HDDL Plugin for your Intel® Vision Accelerator Design with Intel® Movidius™ VPUs. + + .. warning:: + While working with either HDDL or NCS, choose one of them as they cannot run simultaneously on the same machine. + +@endsphinxdirective + + +## What's Next? + +Now you may continue with the following tasks: + +* To convert models for use with OpenVINO, see [Model Optimizer Developer Guide](../MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md). +* See pre-trained deep learning models in our [Open Model Zoo](../model_zoo.md). +* Try out OpenVINO via [OpenVINO Notebooks](https://docs.openvino.ai/latest/notebooks/notebooks.html). +* To write your own OpenVINO™ applications, see [OpenVINO Runtime User Guide](../OV_Runtime_UG/openvino_intro.md). +* See sample applications in [OpenVINO™ Toolkit Samples Overview](../OV_Runtime_UG/Samples_Overview.md). + +## Additional Resources + +- Intel® Distribution of OpenVINO™ toolkit home page: +- For IoT Libraries & Code Samples, see [Intel® IoT Developer Kit](https://github.com/intel-iot-devkit). diff --git a/docs/install_guides/movidius-setup-guide.md b/docs/install_guides/movidius-setup-guide.md index e8c3a12832b..993d52dae57 100644 --- a/docs/install_guides/movidius-setup-guide.md +++ b/docs/install_guides/movidius-setup-guide.md @@ -4,15 +4,15 @@ The IEI Mustang-V100-MX8 is an OEM version of the Intel® Vision Accelerator Design with Intel® Movidius™ VPUs. -This guide assumes you have installed the [Mustang-V100-MX8](https://download.ieiworld.com/) and the [Intel® Distribution of OpenVINO™ Toolkit](https://software.intel.com/content/www/us/en/develop/tools/openvino-toolkit.html). +This guide assumes you have installed the [Mustang-V100-MX8](https://download.ieiworld.com/) and the [Intel® Distribution of OpenVINO™ toolkit](https://software.intel.com/content/www/us/en/develop/tools/openvino-toolkit.html). Instructions in this guide for configuring your accelerator include: -1. Installing the required IEI\* BSL reset software +1. Installing the required IEI BSL reset software 2. Configuration settings for the `hddldaemon` service -> **NOTE**: This guide does not apply to Uzel\* cards. +> **NOTE**: This guide does not apply to Uzel cards. -## IEI Reset Software Installation +## Installing IEI Reset Software Using the IEI Mustang-V100-MX8 requires downloading and installing the most current software for your system. @@ -21,14 +21,14 @@ Search for **Mustang-V100-MX8**. Download the appropriate software for your system, decompress the downloaded archive, enter the newly created directory, and run the install script: -On **Linux**\*: +On **Linux**: - Run the `install.sh script` with `sudo`, or as `root`. -On **Windows**\*, do one of the following:
+On **Windows**, do one of the following:
- **GUI**: Double-click `install.bat` - **CLI**: Open a console with administrator privileges, cd into the directory, and run `install.bat`. -## Mustang-V100-MX8 Service Configuration +## Configuring Mustang-V100-MX8 Service The `hddldaemon` is a system service, a binary executable that is run to manage the computational workload on the board. It is a required abstraction layer that handles inference, graphics processing, and any type of computation that should be run on the video processing units (VPUs). Depending on the board configuration, there can be 8 or 16 VPUs. @@ -36,21 +36,21 @@ The `hddldaemon` is a system service, a binary executable that is run to manage ### Conventions Used in This Document -`` refers to the following default OpenVINO™ Inference Engine directories: +`` refers to the following default OpenVINO™ Runtime directories: - **Linux:** ``` - /opt/intel/openvino_2022/inference_engine + /opt/intel/openvino_2022/runtime ``` - **Windows:** ``` -C:\Program Files (x86)\IntelSWTools\openvino\inference_engine +C:\Program Files (x86)\IntelSWTools\openvino\runtime ``` If you have installed OpenVINO™ in a different directory on your system, you will need to enter your unique directory path. ### Configuration File Location -`\external\hddl\config\hddl_service.config` +`\3rdparty\hddl\config\hddl_service.config` ### Service Configuration File Settings @@ -58,7 +58,7 @@ Below are some possible configuration options. > **NOTE**: After changing a configuration file, the `hddldaemon` must be restarted. -### Recommended Settings +#### Recommended Settings `device_snapshot_mode` Changes the output of the `hddldaemon` to display a table with individual VPU statistics. @@ -124,7 +124,7 @@ This setting reports the total FPS for the dispatching hddl_service (which will (default: `"true"`) -## Additional resources +## Additional Resources - [Intel Distribution of OpenVINO Toolkit home page](https://software.intel.com/en-us/openvino-toolkit) - [Intel Distribution of OpenVINO Toolkit documentation](https://docs.openvino.ai) diff --git a/docs/install_guides/pre-release-note.md b/docs/install_guides/pre-release-note.md new file mode 100644 index 00000000000..678b1f20224 --- /dev/null +++ b/docs/install_guides/pre-release-note.md @@ -0,0 +1,2 @@ + +> **NOTE**: This version is pre-release software and has not undergone full release validation or qualification. No support is offered on pre-release software and APIs/behavior are subject to change. It should NOT be incorporated into any production software/solution and instead should be used only for early testing and integration while awaiting a final release version of this software. diff --git a/docs/install_guides/pypi-openvino-dev.md b/docs/install_guides/pypi-openvino-dev.md index a7d39e61a70..977e21048bc 100644 --- a/docs/install_guides/pypi-openvino-dev.md +++ b/docs/install_guides/pypi-openvino-dev.md @@ -8,11 +8,11 @@ OpenVINO™ toolkit is a comprehensive toolkit for quickly developing applicatio | Component | Console Script | Description | |------------------|---------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [Model Optimizer](https://docs.openvino.ai/latest/openvino_docs_MO_DG_Deep_Learning_Model_Optimizer_DevGuide.html) | `mo` |**Model Optimizer** imports, converts, and optimizes models that were trained in popular frameworks to a format usable by Intel tools, especially the Inference Engine. 
Supported frameworks include Caffe\*, TensorFlow\*, MXNet\*, and ONNX\*. | +| [Model Optimizer](https://docs.openvino.ai/latest/openvino_docs_MO_DG_Deep_Learning_Model_Optimizer_DevGuide.html) | `mo` |**Model Optimizer** imports, converts, and optimizes models that were trained in popular frameworks to a format usable by OpenVINO components. 
Supported frameworks include Caffe\*, TensorFlow\*, MXNet\*, PaddlePaddle\*, and ONNX\*. | | [Benchmark Tool](https://docs.openvino.ai/latest/openvino_inference_engine_tools_benchmark_tool_README.html)| `benchmark_app` | **Benchmark Application** allows you to estimate deep learning inference performance on supported devices for synchronous and asynchronous modes. | | [Accuracy Checker](https://docs.openvino.ai/latest/omz_tools_accuracy_checker.html) and
[Annotation Converter](https://docs.openvino.ai/latest/omz_tools_accuracy_checker_annotation_converters.html) | `accuracy_check`
`convert_annotation` |**Accuracy Checker** is a deep learning accuracy validation tool that allows you to collect accuracy metrics against popular datasets. The main advantages of the tool are the flexibility of configuration and a set of supported datasets, preprocessing, postprocessing, and metrics.
**Annotation Converter** is a utility that prepares datasets for evaluation with Accuracy Checker. | | [Post-Training Optimization Tool](https://docs.openvino.ai/latest/pot_README.html)| `pot` |**Post-Training Optimization Tool** allows you to optimize trained models with advanced capabilities, such as quantization and low-precision optimizations, without the need to retrain or fine-tune models. Optimizations are also available through the [API](https://docs.openvino.ai/latest/pot_compression_api_README.html). | -| [Model Downloader and other Open Model Zoo tools](https://docs.openvino.ai/latest/omz_tools_downloader.html)| `omz_downloader`
`omz_converter`
`omz_quantizer`
`omz_info_dumper`| **Model Downloader** is a tool for getting access to the collection of high-quality and extremely fast pre-trained deep learning [public](https://docs.openvino.ai/latest/omz_models_group_public.html) and [Intel](https://docs.openvino.ai/latest/omz_models_group_intel.html)-trained models. These free pre-trained models can be used to speed up the development and production deployment process without training your own models. The tool downloads model files from online sources and, if necessary, patches them to make them more usable with Model Optimizer. A number of additional tools are also provided to automate the process of working with downloaded models:
**Model Converter** is a tool for converting Open Model Zoo models that are stored in an original deep learning framework format into the Inference Engine Intermediate Representation (IR) using Model Optimizer.
**Model Quantizer** is a tool for automatic quantization of full-precision models in the IR format into low-precision versions using the Post-Training Optimization Tool.
**Model Information Dumper** is a helper utility for dumping information about the models to a stable, machine-readable format. +| [Model Downloader and other Open Model Zoo tools](https://docs.openvino.ai/latest/omz_tools_downloader.html)| `omz_downloader`
`omz_converter`
`omz_quantizer`
`omz_info_dumper`| **Model Downloader** is a tool for getting access to the collection of high-quality and extremely fast pre-trained deep learning [public](https://docs.openvino.ai/latest/omz_models_group_public.html) and [Intel](https://docs.openvino.ai/latest/omz_models_group_intel.html)-trained models. These free pre-trained models can be used to speed up the development and production deployment process without training your own models. The tool downloads model files from online sources and, if necessary, patches them to make them more usable with Model Optimizer. A number of additional tools are also provided to automate the process of working with downloaded models:
**Model Converter** is a tool for converting Open Model Zoo models that are stored in an original deep learning framework format into the OpenVINO Intermediate Representation (IR) using Model Optimizer.
**Model Quantizer** is a tool for automatic quantization of full-precision models in the IR format into low-precision versions using the Post-Training Optimization Tool.
**Model Information Dumper** is a helper utility for dumping information about the models to a stable, machine-readable format. The developer package also installs the OpenVINO™ Runtime package as a dependency. @@ -102,7 +102,7 @@ For example, to install and configure the components for working with TensorFlow ``` You will see the help message for Model Optimizer if installation finished successfully. -- To verify that Inference Engine from the **runtime package** is available, run the command below: +- To verify that OpenVINO Runtime from the **runtime package** is available, run the command below: ```sh python -c "from openvino.runtime import Core" ``` @@ -110,6 +110,28 @@ For example, to install and configure the components for working with TensorFlow ## Troubleshooting + +### zsh: no matches found : openvino-dev[...] + +If you use zsh (Z shell) interpreter, that is the default shell for macOS starting with version 10.15 (Catalina), you may encounter the following error while installing `openvino-dev` package with extras: + +```sh +pip install openvino-dev[tensorflow2,mxnet,caffe] +zsh: no matches found: openvino-dev[tensorflow2,mxnet,caffe] +``` + +By default zsh interprets square brackets as an expression for pattern matching. To resolve this issue, you need to escape the command with quotes: + +```sh +pip install 'openvino-dev[tensorflow2,mxnet,caffe]' +``` + +To avoid such issues you can also disable globbing for PIP commands by defining an alias in `~/.zshrc` file: + +```sh +alias pip='noglob pip' +``` + ### Error: Microsoft Visual C++ 14.0 is required. Get it with "Build Tools for Visual Studio" On Windows* some dependencies may require compilation from source when installing. To resolve this issue, you need to install [Build Tools for Visual Studio* 2019](https://visualstudio.microsoft.com/downloads/#build-tools-for-visual-studio-2019) and repeat package installation. diff --git a/docs/install_guides/troubleshooting.md b/docs/install_guides/troubleshooting.md index 797bf86a10c..caf9225b60c 100644 --- a/docs/install_guides/troubleshooting.md +++ b/docs/install_guides/troubleshooting.md @@ -1,4 +1,4 @@ -# Troubleshooting {#openvino_docs_get_started_guide_troubleshooting} +# Troubleshooting Issues with OpenVINO™ Installation & Configuration {#openvino_docs_get_started_guide_troubleshooting} @@ -8,7 +8,7 @@ If you met proxy issues during the installation with Docker, please set up proxy settings for Docker. See the Proxy section in the [Install the DL Workbench from DockerHub*](https://docs.openvino.ai/latest/workbench_docs_Workbench_DG_Prerequisites.html#set-proxy) topic. -### Permission Errors for `/dev/shm` +### Permission Errors for /dev/shm If you encounter a permission error for files in `/dev/shm` (see `hddldaemon.log`). A possible cause is that the uid and gid of the container user are different from the uid and gid of the user who created `hddldaemon` service on the host. @@ -43,7 +43,7 @@ ${HDDL_INSTALL_DIR}/bin/bsl_reset ``` --- -### Get the "No space left on device" error while loading a network +### "No space left on device" error while loading a network When the application runs inference of a network with a big size(>4MB) of input/output or if the system is running out of the DMA buffer, the HDDL Plugin will fall back to use shared memory. In this case, if the application exits abnormally, the shared memory is not released automatically. @@ -53,7 +53,7 @@ sudo rm -f /dev/shm/hddl_* ``` --- -### How to solve the permission issue? +### Solutions to the permission issue Make sure that the following udev rules exist: - `/etc/udev/rules.d/97-myriad-usbboot.rules` @@ -67,7 +67,7 @@ sudo usermod -a -G users "$(whoami)" ``` --- -### `setup.sh` doesn't install the driver binaries to `/lib/modules` on CentOS systems +### setup.sh doesn't install the driver binaries to /lib/modules on CentOS systems As a temporary workaround, run the commands below to install the drivers. This issue will be fixed in future releases. @@ -128,7 +128,7 @@ sudo modprobe myd_ion Please contact your motherboard vendor to verify SMBUS pins are connected to the PCIe slot. --- -### Get "Error: ipc_connection_linux_UDS : bind() failed" in hddldaemon log. +### "Error: ipc_connection_linux_UDS : bind() failed" in hddldaemon log You may have run hddldaemon under another user. Run the command below and try again: ```sh @@ -136,7 +136,7 @@ sudo rm -rf /var/tmp/hddl_* ``` --- -### Get "I2C bus: SMBus I801 adapter at not found!" in hddldaemon log +### "I2C bus: SMBus I801 adapter at not found!" in hddldaemon log Run the following command to check if a SMBUS I801 adapter can be found: ```sh @@ -147,7 +147,7 @@ Then run: sudo modprobe i2c-i801 ``` --- -### Get "open /dev/ion failed!" in hddldaemon log +### "open /dev/ion failed!" in hddldaemon log Check if `myd_ion` kernel module is installed by running the following command: ```sh @@ -156,7 +156,7 @@ lsmod | grep myd_ion If you do not see any output from the command, reinstall the `myd_ion` module. --- -### Constantly get "\_name\_mapping open failed err=2,No such file or directory" in hddldaemon log +### Constantly getting "\_name\_mapping open failed err=2,No such file or directory" in hddldaemon log Check if myd_vsc kernel module is installed by running the following command: ```sh @@ -165,7 +165,7 @@ lsmod | grep myd_vsc If you do not see any output from the command reinstall the `myd_vsc` module. --- -### Get "Required key not available" when trying to install the `myd_ion` or `myd_vsc` modules +### "Required key not available" appears when trying to install the myd_ion or myd_vsc modules Run the following commands: ```sh diff --git a/docs/migration_ov_2_0/docs/common_inference_pipeline.md b/docs/migration_ov_2_0/docs/common_inference_pipeline.md deleted file mode 100644 index 7a524ea9008..00000000000 --- a/docs/migration_ov_2_0/docs/common_inference_pipeline.md +++ /dev/null @@ -1,161 +0,0 @@ -# OpenVINO™ Inference Pipeline {#openvino_inference_pipeline} - -Usually to inference network with the OpenVINO™ toolkit users need to do next steps: - 1. Create Core - 2. (Optional) Read model from the disk - 2.1. Configure Input and Output of the Model - 3. Load the Model to the Device - 4. Create an Inference Request - 5. Prepare Input - 6. Start Inference - 7. Process the Inference Results - -Code snippets below cover these steps and show how application code should be changed for migration to OpenVINO™ 2.0. - -## 1. Create Core - -Inference Engine API: - -@snippet snippets/ie_common.cpp ie:create_core - -OpenVINO™ 2.0 API: - -@snippet snippets/ov_common.cpp ov_api_2_0:create_core - -## 2. (Optional) Read model from the disk - -Inference Engine API: - -@snippet snippets/ie_common.cpp ie:read_model - -OpenVINO™ 2.0 API: - -@snippet snippets/ov_common.cpp ov_api_2_0:read_model - -Read model has the same structure as in the example from [OpenVINO™ Graph Construction](@ref openvino_graph_construction) guide. - -### 2.1 Configure Input and Output of the Model - -Inference Engine API: - -@snippet snippets/ie_common.cpp ie:get_inputs_outputs - -OpenVINO™ 2.0 API: - -@snippet snippets/ov_common.cpp ov_api_2_0:get_inputs_outputs - -## 3. Load the Model to the Device - -Inference Engine API: - -@snippet snippets/ie_common.cpp ie:compile_model - -OpenVINO™ 2.0 API: - -@snippet snippets/ov_common.cpp ov_api_2_0:compile_model - -## 4. Create an Inference Request - -Inference Engine API: - -@snippet snippets/ie_common.cpp ie:create_infer_request - -OpenVINO™ 2.0 API: - -@snippet snippets/ov_common.cpp ov_api_2_0:create_infer_request - -## 5. Prepare input - -### IR v10 - -Inference Engine API: - -@snippet snippets/ie_common.cpp ie:get_input_tensor - -OpenVINO™ 2.0 API: - -@snippet snippets/ov_common.cpp ov_api_2_0:get_input_tensor_v10 - -### IR v11 - -Inference Engine API: - -@snippet snippets/ie_common.cpp ie:get_input_tensor - -OpenVINO™ 2.0 API: - -@snippet snippets/ov_common.cpp ov_api_2_0:get_input_tensor_aligned - -### ONNX - -Inference Engine API: - -@snippet snippets/ie_common.cpp ie:get_input_tensor - -OpenVINO™ 2.0 API: - -@snippet snippets/ov_common.cpp ov_api_2_0:get_input_tensor_aligned - -### From Function - -Inference Engine API: - -@snippet snippets/ie_common.cpp ie:get_input_tensor - -OpenVINO™ 2.0 API: - -@snippet snippets/ov_common.cpp ov_api_2_0:get_input_tensor_aligned - -## 6. Start Inference - -Inference Engine API: - -@snippet snippets/ie_common.cpp ie:inference - -OpenVINO™ 2.0 API: - -@snippet snippets/ov_common.cpp ov_api_2_0:inference - - -## 7. Process the Inference Results - -### IR v10 - -Inference Engine API: - -@snippet snippets/ie_common.cpp ie:get_output_tensor - -OpenVINO™ 2.0 API: - -@snippet snippets/ov_common.cpp ov_api_2_0:get_output_tensor_v10 - -### IR v11 - -Inference Engine API: - -@snippet snippets/ie_common.cpp ie:get_output_tensor - -OpenVINO™ 2.0 API: - -@snippet snippets/ov_common.cpp ov_api_2_0:get_output_tensor_aligned - -### ONNX - -Inference Engine API: - -@snippet snippets/ie_common.cpp ie:get_output_tensor - -OpenVINO™ 2.0 API: - -@snippet snippets/ov_common.cpp ov_api_2_0:get_output_tensor_aligned - -### From Function - -Inference Engine API: - -@snippet snippets/ie_common.cpp ie:get_output_tensor - -OpenVINO™ 2.0 API: - -@snippet snippets/ov_common.cpp ov_api_2_0:get_output_tensor_aligned - diff --git a/docs/migration_ov_2_0/docs/graph_construction.md b/docs/migration_ov_2_0/docs/graph_construction.md deleted file mode 100644 index 8c7f22c17df..00000000000 --- a/docs/migration_ov_2_0/docs/graph_construction.md +++ /dev/null @@ -1,12 +0,0 @@ -# OpenVINO™ graph construction {#openvino_graph_construction} - -OpenVINO™ 2.0 includes nGraph engine in a common part. The `ngraph` namespace was changed to `ov`. -Code snippets below show how application code should be changed for migration to OpenVINO™ 2.0. - -nGraph API: - -@snippet snippets/ngraph.cpp ngraph:graph - -OpenVINO™ 2.0 API: - -@snippet snippets/ov_graph.cpp ov:graph diff --git a/docs/migration_ov_2_0/docs/intro.md b/docs/migration_ov_2_0/docs/intro.md deleted file mode 100644 index 90ac48cdff9..00000000000 --- a/docs/migration_ov_2_0/docs/intro.md +++ /dev/null @@ -1,24 +0,0 @@ -# OpenVINO™ API 2.0 Transition Guide {#openvino_2_0_transition_guide} - -@sphinxdirective - -.. toctree:: - :maxdepth: 1 - :hidden: - - openvino_inference_pipeline - openvino_graph_construction - -@endsphinxdirective - -The OpenVINO™ API 2.0 introduced in order to simplify migration from other frameworks and make the OpenVINO™ API more user-friendly. -The list with differences between APIs below: - - - OpenVINO™ API 2.0 uses tensor names or indexes to work with Inputs or Outputs, the old API works with operation names. - - Structures for Shapes, element types were changed. - - Naming style was changed. The old API uses CamelCaseStyle and OpenVINO™ API 2.0 uses snake_case for function names. - - Namespaces were aligned between components. - -Please look at next transition guides to understand how transit own application to OpenVINO™ API 2.0. - - [OpenVINO™ Graph Construction](graph_construction.md) - - [OpenVINO™ Common Inference pipeline](common_inference_pipeline.md) diff --git a/docs/model_server/README.md b/docs/model_server/README.md deleted file mode 100644 index 6bdd36eae29..00000000000 --- a/docs/model_server/README.md +++ /dev/null @@ -1,143 +0,0 @@ -# OpenVINO™ Model Server {#openvino_docs_ovms} - -OpenVINO™ Model Server (OVMS) is a scalable, high-performance solution for serving machine learning models optimized for Intel® architectures. -The server provides an inference service via gRPC or REST API - making it easy to deploy new algorithms and AI experiments using the same -architecture as [TensorFlow* Serving](https://github.com/tensorflow/serving) for any models trained in a framework that is supported -by [OpenVINO](https://software.intel.com/en-us/openvino-toolkit). - -The server implements gRPC and REST API framework with data serialization and deserialization using TensorFlow Serving API, - and OpenVINO™ as the inference execution provider. Model repositories may reside on a locally accessible file system (for example, NFS), - Google Cloud Storage\* (GCS), Amazon S3\*, MinIO\*, or Azure Blob Storage\*. - -OVMS is now implemented in C++ and provides much higher scalability compared to its predecessor in the Python version. -You can take advantage of all the power of Xeon® CPU capabilities or AI accelerators and expose it over the network interface. -Read the [release notes](https://github.com/openvinotoolkit/model_server/releases) to find out what's new in the C++ version. - -Review the [Architecture Concept](https://github.com/openvinotoolkit/model_server/blob/main/docs/architecture.md) document for more details. - -A few key features: -- Support for multiple frameworks. Serve models trained in popular formats such as Caffe\*, TensorFlow\*, MXNet\*, and ONNX*. -- Deploy new [model versions](https://github.com/openvinotoolkit/model_server/blob/main/docs/docker_container.md#model-version-policy) without changing client code. -- Support for AI accelerators including [Intel Movidius Myriad VPUs](../OV_Runtime_UG/supported_plugins/VPU.md), -[GPU](../OV_Runtime_UG/supported_plugins/GPU.md), and [HDDL](../OV_Runtime_UG/supported_plugins/HDDL.md). -- The server can be enabled both on [Bare Metal Hosts](https://github.com/openvinotoolkit/model_server/blob/main/docs/host.md) or in -[Docker* containers](https://github.com/openvinotoolkit/model_server/blob/main/docs/docker_container.md). -- [Kubernetes deployments](https://github.com/openvinotoolkit/model_server/blob/main/deploy). The server can be deployed in a Kubernetes cluster allowing the inference service to scale horizontally and ensure high availability. -- [Model reshaping](https://github.com/openvinotoolkit/model_server/blob/main/docs/docker_container.md#model-reshaping). The server supports reshaping models in runtime. -- [Model ensemble](https://github.com/openvinotoolkit/model_server/blob/main/docs/ensemble_scheduler.md) (preview). Connect multiple models to deploy complex processing solutions and reduce overhead of sending data back and forth. - -> **NOTE**: OVMS has been tested on CentOS\* and Ubuntu\*. Publicly released [Docker images](https://hub.docker.com/r/openvino/model_server) are based on CentOS. - -## Build OpenVINO Model Server - -1. Go to the root directory of the repository. - -2. Build the Docker image with the command below: -```bash -make docker_build -``` - -The command generates: -* Image tagged as `openvino/model_server:latest` with CPU, NCS, and HDDL support -* Image tagged as `openvino/model_server:latest-gpu` with CPU, NCS, HDDL, and iGPU support -* `.tar.gz` release package with OVMS binary and necessary libraries in the `./dist` directory. - -The release package is compatible with Linux machines on which `glibc` version is greater than or equal to the build image version. -For debugging, the command also generates an image with a suffix `-build`, namely `openvino/model_server-build:latest`. - -> **NOTE**: Images include OpenVINO 2021.1 release. - - -## Run OpenVINO Model Server - -Find a detailed description of how to use the OpenVINO Model Server in the [OVMS Quick Start Guide](https://github.com/openvinotoolkit/model_server/blob/main/docs/ovms_quickstart.md). - - -For more detailed guides on using the Model Server in various scenarios, visit the links below: - -* [Models repository configuration](https://github.com/openvinotoolkit/model_server/blob/main/docs/models_repository.md) - -* [Using a Docker container](https://github.com/openvinotoolkit/model_server/blob/main/docs/docker_container.md) - -* [Landing on bare metal or virtual machine](https://github.com/openvinotoolkit/model_server/blob/main/docs/host.md) - -* [Performance tuning](https://github.com/openvinotoolkit/model_server/blob/main/docs/performance_tuning.md) - -* [Model Ensemble Scheduler](https://github.com/openvinotoolkit/model_server/blob/main/docs/ensemble_scheduler.md) - - -## API Documentation - -### GRPC - -OpenVINO™ Model Server gRPC API is documented in the proto buffer files in [tensorflow_serving_api](https://github.com/tensorflow/serving/tree/r2.2/tensorflow_serving/apis). - -> **NOTE**: The implementations for `Predict`, `GetModelMetadata`, and `GetModelStatus` function calls are currently available. -> These are the most generic function calls and should address most of the usage scenarios. - -[Predict proto](https://github.com/tensorflow/serving/blob/r2.2/tensorflow_serving/apis/predict.proto) defines two message specifications: `PredictRequest` and `PredictResponse` used while calling Prediction endpoint. -* `PredictRequest` specifies information about the model spec, that is name and version, and a map of input data serialized via -[TensorProto](https://github.com/tensorflow/tensorflow/blob/r2.2/tensorflow/core/framework/tensor.proto) to a string format. -* `PredictResponse` includes a map of outputs serialized by -[TensorProto](https://github.com/tensorflow/tensorflow/blob/r2.2/tensorflow/core/framework/tensor.proto) and information about the used model spec. - -[Get Model Metadata proto](https://github.com/tensorflow/serving/blob/r2.2/tensorflow_serving/apis/get_model_metadata.proto) defines three message definitions used while calling Metadata endpoint: - `SignatureDefMap`, `GetModelMetadataRequest`, `GetModelMetadataResponse`. - - A function call `GetModelMetadata` accepts model spec information as input and returns Signature Definition content in the format similar to TensorFlow Serving. - -[Get Model Status proto](https://github.com/tensorflow/serving/blob/r2.2/tensorflow_serving/apis/get_model_status.proto) defines three message definitions used while calling Status endpoint: - `GetModelStatusRequest`, `ModelVersionStatus`, `GetModelStatusResponse` that report all exposed versions including their state in their lifecycle. - -Refer to the [example client code](https://github.com/openvinotoolkit/model_server/blob/main/example_client) to learn how to use this API and submit the requests using the gRPC interface. - -Using the gRPC interface is recommended for optimal performance due to its faster implementation of input data deserialization. It enables you to achieve lower latency, especially with larger input messages like images. - -### REST - -OpenVINO™ Model Server RESTful API follows the documentation from the [TensorFlow Serving REST API](https://www.tensorflow.org/tfx/serving/api_rest). - -Both row and column format of the requests are implemented. - -> **NOTE**: Just like with gRPC, only the implementations for `Predict`, `GetModelMetadata`, and `GetModelStatus` function calls are currently available. - -Only the numerical data types are supported. - -Review the exemplary clients below to find out more how to connect and run inference requests. - -REST API is recommended when the primary goal is in reducing the number of client side Python dependencies and simpler application code. - - -## Known Limitations - -* Currently, `Predict`, `GetModelMetadata`, and `GetModelStatus` calls are implemented using the TensorFlow Serving API. -* `Classify`, `Regress`, and `MultiInference` are not included. -* `Output_filter` is not effective in the `Predict` call. All outputs defined in the model are returned to the clients. - -## OpenVINO Model Server Contribution Policy - -* All contributed code must be compatible with the [Apache 2](https://www.apache.org/licenses/LICENSE-2.0) license. - -* All changes have to pass linter, unit, and functional tests. - -* All new features need to be covered by tests. - - -## References - -* [Speed and Scale AI Inference Operations Across Multiple Architectures - webinar recording](https://techdecoded.intel.io/essentials/speed-and-scale-ai-inference-operations-across-multiple-architectures/) - -* [OpenVINO™](https://software.intel.com/en-us/openvino-toolkit) - -* [TensorFlow Serving](https://github.com/tensorflow/serving) - -* [gRPC](https://grpc.io/) - -* [RESTful API](https://restfulapi.net/) - -* [Inference at Scale in Kubernetes](https://www.intel.ai/inference-at-scale-in-kubernetes) - - - ---- -\* Other names and brands may be claimed as the property of others. diff --git a/docs/model_zoo.md b/docs/model_zoo.md index 2d94c62ef79..6122a054d9a 100644 --- a/docs/model_zoo.md +++ b/docs/model_zoo.md @@ -20,6 +20,13 @@ omz_demos +.. toctree:: + :maxdepth: 1 + :hidden: + :caption: Model API + + omz_model_api_ovms_adapter + @endsphinxdirective diff --git a/docs/onnx_custom_op/CMakeLists.txt b/docs/onnx_custom_op/CMakeLists.txt deleted file mode 100644 index 2f2a77c58b7..00000000000 --- a/docs/onnx_custom_op/CMakeLists.txt +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (C) 2018-2022 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 -# - -# [cmake:onnx_custom_op] -set(CMAKE_CXX_STANDARD 11) - -set(TARGET_NAME "onnx_custom_op") - -find_package(OpenVINO REQUIRED COMPONENTS ONNX) - -add_library(${TARGET_NAME} STATIC onnx_custom_op.cpp onnx_custom_op.hpp) - -target_link_libraries(${TARGET_NAME} PUBLIC openvino::core openvino::frontend::onnx) -# [cmake:onnx_custom_op] - -# Enable code style check -add_clang_format_target(${TARGET_NAME}_clang FOR_TARGETS ${TARGET_NAME}) diff --git a/docs/onnx_custom_op/custom_relu_model.prototxt b/docs/onnx_custom_op/custom_relu_model.prototxt deleted file mode 100644 index 3845cc00ce2..00000000000 --- a/docs/onnx_custom_op/custom_relu_model.prototxt +++ /dev/null @@ -1,52 +0,0 @@ -ir_version: 3 -producer_name: "nGraph ONNX Importer" -graph { - node { - input: "in" - output: "out" - name: "customrelu" - op_type: "CustomRelu" - domain: "com.example" - attribute { - name: "alpha" - type: FLOAT - f: 2 - } - attribute { - name: "beta" - type: FLOAT - f: 3 - } - } - name: "custom relu graph" - input { - name: "in" - type { - tensor_type { - elem_type: 1 - shape { - dim { - dim_value: 8 - } - } - } - } - } - output { - name: "out" - type { - tensor_type { - elem_type: 1 - shape { - dim { - dim_value: 8 - } - } - } - } - } -} -opset_import { - domain: "com.example" - version: 1 -} diff --git a/docs/onnx_custom_op/onnx_custom_op.cpp b/docs/onnx_custom_op/onnx_custom_op.cpp deleted file mode 100644 index 399a6d2b4a3..00000000000 --- a/docs/onnx_custom_op/onnx_custom_op.cpp +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (C) 2018-2022 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -//! [onnx_custom_op:headers] -// onnx_import/onnx_utils.hpp provides ngraph::onnx_import::register_operator function, that registers operator in ONNX importer's set. -#include -// ngraph/opsets/opset5.hpp provides the declaration of predefined nGraph operator set -#include -//! [onnx_custom_op:headers] - -void register_custom_relu_operator() { - // CustomRelu is defined as follows: - // x >= 0 => f(x) = x * alpha - // x < 0 => f(x) = x * beta - -//! [onnx_custom_op:register_operator] - ngraph::onnx_import::register_operator( - "CustomRelu", 1, "com.example", [](const ngraph::onnx_import::Node& onnx_node) -> ngraph::OutputVector { - namespace opset = ngraph::opset5; - - ngraph::OutputVector ng_inputs{onnx_node.get_ng_inputs()}; - const ngraph::Output& data = ng_inputs.at(0); - // create constant node with a single element that's equal to zero - std::shared_ptr zero_node = opset::Constant::create(data.get_element_type(), ngraph::Shape{}, {0}); - // create a negative map for 'data' node, 1 for negative values , 0 for positive values or zero - // then convert it from boolean type to `data.get_element_type()` - std::shared_ptr negative_map = std::make_shared( - std::make_shared(data, zero_node), data.get_element_type()); - // create a positive map for 'data' node, 0 for negative values , 1 for positive values or zero - // then convert it from boolean type to `data.get_element_type()` - std::shared_ptr positive_map = std::make_shared( - std::make_shared(data, zero_node), data.get_element_type()); - - // fetch alpha and beta attributes from ONNX node - float alpha = onnx_node.get_attribute_value("alpha", 1); // if 'alpha' attribute is not provided in the model, then the default value is 1 - float beta = onnx_node.get_attribute_value("beta"); - // create constant node with a single element 'alpha' with type f32 - std::shared_ptr alpha_node = opset::Constant::create(ngraph::element::f32, ngraph::Shape{}, {alpha}); - // create constant node with a single element 'beta' with type f32 - std::shared_ptr beta_node = opset::Constant::create(ngraph::element::f32, ngraph::Shape{}, {beta}); - - return { - std::make_shared( - std::make_shared(alpha_node, std::make_shared(data, positive_map)), - std::make_shared(beta_node, std::make_shared(data, negative_map)) - ) - }; - }); -//! [onnx_custom_op:register_operator] -} - -void unregister_custom_relu_operator() { -//! [onnx_custom_op:unregister_operator] - ngraph::onnx_import::unregister_operator("CustomRelu", 1, "com.example"); -//! [onnx_custom_op:unregister_operator] -} diff --git a/docs/onnx_custom_op/onnx_custom_op.hpp b/docs/onnx_custom_op/onnx_custom_op.hpp deleted file mode 100644 index 3554226da6b..00000000000 --- a/docs/onnx_custom_op/onnx_custom_op.hpp +++ /dev/null @@ -1,8 +0,0 @@ -// Copyright (C) 2018-2022 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#pragma once - -void register_custom_relu_operator(); -void unregister_custom_relu_operator(); diff --git a/docs/ops/activation/SoftSign_9.md b/docs/ops/activation/SoftSign_9.md new file mode 100644 index 00000000000..4b0b2a9d715 --- /dev/null +++ b/docs/ops/activation/SoftSign_9.md @@ -0,0 +1,48 @@ +# SoftSign {#openvino_docs_ops_activation_SoftSign_9} + +**Versioned name**: *SoftSign-9* + +**Category**: *Activation function* + +**Short description**: *SoftSign* performs element-wise activation on a given input tensor. + +**Detailed description**: + +*SoftSign* operation is introduced in this [article](https://arxiv.org/abs/2010.09458). + +*SoftSign Activation Function* is a neuron activation function based on the mathematical function: + +\f[ +SoftSign(x) = \frac{x}{1+|x|} +\f] + +**Inputs**: + +* **1**: `data`. Input tensor of type *T* + +**Outputs**: + +* **1**: The resulting tensor of the same shape and type as the input tensor. + +**Types**: + +* **T**: Arbitrary supported floating-point type. + +**Example** + +```xml + + + + 256 + 56 + + + + + 256 + 56 + + + +``` \ No newline at end of file diff --git a/docs/ops/opset.md b/docs/ops/opset.md index af166242766..48e4d1ef01e 100644 --- a/docs/ops/opset.md +++ b/docs/ops/opset.md @@ -5,7 +5,8 @@ .. toctree:: :maxdepth: 1 :hidden: - + + openvino_docs_ops_opset9 openvino_docs_ops_opset8 openvino_docs_ops_opset7 openvino_docs_ops_opset6 @@ -14,24 +15,24 @@ openvino_docs_ops_opset3 openvino_docs_ops_opset2 openvino_docs_ops_opset1 - + @endsphinxdirective -According to capabilities of supported deep learning frameworks and hardware capabilities of a target inference device, all operations are combined into operations sets each fully supported in a specific version of OpenVINO™ toolkit. +According to capabilities of supported deep learning frameworks and hardware capabilities of a target inference device, all operations are combined into operations sets each fully supported in a specific version of OpenVINO™ toolkit. -This topic provides a complete list of available sets of operations supported in different versions of OpenVINO™ toolkit. It's highly recommended to use the actual version of the operations set for a particular release. For a list of operations included into an operations set, click a link in the table. +This topic provides a complete list of available sets of operations supported in different versions of OpenVINO™ toolkit. Use the relevant version of the operations set for a particular release. For a list of operations included into an operations set, click a link in the table. -| OpenVINO™ Version | Actual Operations Set | -| :---------------- | :------------------------------- | +| OpenVINO™ Version | Actual Operations Set | +| :---------------- | :------------------------------- | | 2022.1 | [opset8](opset8.md) | -| 2021.4 | [opset7](opset7.md) | -| 2021.3 | [opset6](opset6.md) | -| 2021.2 | [opset5](opset5.md) | -| 2021.1 | [opset4](opset4.md) | +| 2021.4 | [opset7](opset7.md) | +| 2021.3 | [opset6](opset6.md) | +| 2021.2 | [opset5](opset5.md) | +| 2021.1 | [opset4](opset4.md) | | 2020.4 | [opset3](opset3.md) | | 2020.3 | [opset2](opset2.md) | -| 2020.2 | [opset2](opset2.md) | -| 2020.1 | [opset1](opset1.md) | +| 2020.2 | [opset2](opset2.md) | +| 2020.1 | [opset1](opset1.md) | ## See Also [Deep Learning Network Intermediate Representation and Operations Sets in OpenVINO™](../MO_DG/IR_and_opsets.md) diff --git a/docs/ops/opset1.md b/docs/ops/opset1.md index 3eee791065d..fc4db9ce049 100644 --- a/docs/ops/opset1.md +++ b/docs/ops/opset1.md @@ -3,7 +3,7 @@ This specification document describes `opset1` operation set supported in OpenVINO. Support for each particular operation from the list below depends on the capabilities available in a inference plugin and may vary among different hardware platforms and devices. Examples of operation instances are expressed as IR V10 xml -snippets. Such IR is generated by the Model Optimizer. The semantics match corresponding nGraph operation classes +snippets. Such IR is generated by the Model Optimizer. The semantics match corresponding OpenVINO operation classes declared in `namespace opset1`. diff --git a/docs/ops/opset2.md b/docs/ops/opset2.md index 04f9dfe048f..3ff00c6b762 100644 --- a/docs/ops/opset2.md +++ b/docs/ops/opset2.md @@ -3,7 +3,7 @@ This specification document describes `opset2` operation set supported in OpenVINO. Support for each particular operation from the list below depends on the capabilities available in a inference plugin and may vary among different hardware platforms and devices. Examples of operation instances are expressed as IR V10 xml -snippets. Such IR is generated by the Model Optimizer. The semantics match corresponding nGraph operation classes +snippets. Such IR is generated by the Model Optimizer. The semantics match corresponding OpenVINO operation classes declared in `namespace opset2`. diff --git a/docs/ops/opset3.md b/docs/ops/opset3.md index 525e25d3449..dfdf64710be 100644 --- a/docs/ops/opset3.md +++ b/docs/ops/opset3.md @@ -3,7 +3,7 @@ This specification document describes `opset3` operation set supported in OpenVINO. Support for each particular operation from the list below depends on the capabilities available in a inference plugin and may vary among different hardware platforms and devices. Examples of operation instances are expressed as IR V10 xml -snippets. Such IR is generated by the Model Optimizer. The semantics match corresponding nGraph operation classes +snippets. Such IR is generated by the Model Optimizer. The semantics match corresponding OpenVINO operation classes declared in `namespace opset3`. diff --git a/docs/ops/opset4.md b/docs/ops/opset4.md index b20fe4ac619..96e864bb5ae 100644 --- a/docs/ops/opset4.md +++ b/docs/ops/opset4.md @@ -3,7 +3,7 @@ This specification document describes `opset4` operation set supported in OpenVINO. Support for each particular operation from the list below depends on the capabilities available in a inference plugin and may vary among different hardware platforms and devices. Examples of operation instances are expressed as IR V10 xml -snippets. Such IR is generated by the Model Optimizer. The semantics match corresponding nGraph operation classes +snippets. Such IR is generated by the Model Optimizer. The semantics match corresponding OpenVINO operation classes declared in `namespace opset4`. diff --git a/docs/ops/opset5.md b/docs/ops/opset5.md index f980a96a043..d0c6653a0c6 100644 --- a/docs/ops/opset5.md +++ b/docs/ops/opset5.md @@ -3,7 +3,7 @@ This specification document describes `opset5` operation set supported in OpenVINO. Support for each particular operation from the list below depends on the capabilities available in a inference plugin and may vary among different hardware platforms and devices. Examples of operation instances are expressed as IR V10 xml -snippets. Such IR is generated by the Model Optimizer. The semantics match corresponding nGraph operation classes +snippets. Such IR is generated by the Model Optimizer. The semantics match corresponding OpenVINO operation classes declared in `namespace opset5`. diff --git a/docs/ops/opset6.md b/docs/ops/opset6.md index 3154484d56e..a2f35e51834 100644 --- a/docs/ops/opset6.md +++ b/docs/ops/opset6.md @@ -3,7 +3,7 @@ This specification document describes `opset6` operation set supported in OpenVINO. Support for each particular operation from the list below depends on the capabilities available in a inference plugin and may vary among different hardware platforms and devices. Examples of operation instances are expressed as IR V10 xml -snippets. Such IR is generated by the Model Optimizer. The semantics match corresponding nGraph operation classes +snippets. Such IR is generated by the Model Optimizer. The semantics match corresponding OpenVINO operation classes declared in `namespace opset6`. diff --git a/docs/ops/opset7.md b/docs/ops/opset7.md index 8a786fc2b39..95a0734fa89 100644 --- a/docs/ops/opset7.md +++ b/docs/ops/opset7.md @@ -3,7 +3,7 @@ This specification document describes the `opset7` operation set supported in OpenVINO™. Support for each particular operation from the list below depends on the capabilities available in an inference plugin and may vary among different hardware platforms and devices. Examples of operation instances are provided as IR V10 xml -snippets. Such IR is generated by the Model Optimizer. The semantics match corresponding nGraph operation classes +snippets. Such IR is generated by the Model Optimizer. The semantics match corresponding OpenVINO operation classes declared in `namespace opset7`. diff --git a/docs/ops/opset8.md b/docs/ops/opset8.md index a6274cdf968..70a9e98fecb 100644 --- a/docs/ops/opset8.md +++ b/docs/ops/opset8.md @@ -3,7 +3,7 @@ This specification document describes the `opset8` operation set supported in OpenVINO™. Support for each particular operation from the list below depends on the capabilities of an inference plugin and may vary among different hardware platforms and devices. Examples of operation instances are provided as IR V10 xml -snippets. Such IR is generated by the Model Optimizer. The semantics match corresponding nGraph operation classes +snippets. Such IR is generated by the Model Optimizer. The semantics match corresponding OpenVINO operation classes declared in `namespace opset8`. diff --git a/docs/ops/opset9.md b/docs/ops/opset9.md new file mode 100644 index 00000000000..803fd4647bc --- /dev/null +++ b/docs/ops/opset9.md @@ -0,0 +1,181 @@ +# opset9 {#openvino_docs_ops_opset9} + +This specification document describes the `opset9` operation set supported in OpenVINO™. +Support for each particular operation from the list below depends on the capabilities of an inference plugin +and may vary among different hardware platforms and devices. Examples of operation instances are provided as IR V10 xml +snippets. Such IR is generated by the Model Optimizer. The semantics match corresponding nGraph operation classes +declared in `namespace opset9`. + + +## Table of Contents + +* [Abs](arithmetic/Abs_1.md) +* [Acos](arithmetic/Acos_1.md) +* [Acosh](arithmetic/Acosh_3.md) +* [AdaptiveAvgPool](pooling/AdaptiveAvgPool_8.md) +* [AdaptiveMaxPool](pooling/AdaptiveMaxPool_8.md) +* [Add](arithmetic/Add_1.md) +* [Asin](arithmetic/Asin_1.md) +* [Asinh](arithmetic/Asinh_3.md) +* [Assign](infrastructure/Assign_3.md) +* [Atan](arithmetic/Atan_1.md) +* [Atanh](arithmetic/Atanh_3.md) +* [AvgPool](pooling/AvgPool_1.md) +* [BatchNormInference](normalization/BatchNormInference_5.md) +* [BatchToSpace](movement/BatchToSpace_2.md) +* [BinaryConvolution](convolution/BinaryConvolution_1.md) +* [Broadcast](movement/Broadcast_3.md) +* [Bucketize](condition/Bucketize_3.md) +* [CTCGreedyDecoder](sequence/CTCGreedyDecoder_1.md) +* [CTCGreedyDecoderSeqLen](sequence/CTCGreedyDecoderSeqLen_6.md) +* [CTCLoss](sequence/CTCLoss_4.md) +* [Ceiling](arithmetic/Ceiling_1.md) +* [Clamp](activation/Clamp_1.md) +* [Concat](movement/Concat_1.md) +* [Constant](infrastructure/Constant_1.md) +* [Convert](type/Convert_1.md) +* [ConvertLike](type/ConvertLike_1.md) +* [Convolution](convolution/Convolution_1.md) +* [ConvolutionBackpropData](convolution/ConvolutionBackpropData_1.md) +* [Cos](arithmetic/Cos_1.md) +* [Cosh](arithmetic/Cosh_1.md) +* [CumSum](arithmetic/CumSum_3.md) +* [DeformableConvolution](convolution/DeformableConvolution_8.md) +* [DeformablePSROIPooling](detection/DeformablePSROIPooling_1.md) +* [DepthToSpace](movement/DepthToSpace_1.md) +* [DetectionOutput](detection/DetectionOutput_8.md) +* [DFT](signals/DFT_7.md) +* [Divide](arithmetic/Divide_1.md) +* [Einsum](matrix/Einsum_7.md) +* [Elu](activation/Elu_1.md) +* [EmbeddingBagOffsetsSum](sparse/EmbeddingBagOffsetsSum_3.md) +* [EmbeddingBagPackedSum](sparse/EmbeddingBagPackedSum_3.md) +* [EmbeddingSegmentsSum](sparse/EmbeddingSegmentsSum_3.md) +* [Equal](comparison/Equal_1.md) +* [Erf](arithmetic/Erf_1.md) +* [Exp](activation/Exp_1.md) +* [ExperimentalDetectronDetectionOutput_6](detection/ExperimentalDetectronDetectionOutput_6.md) +* [ExperimentalDetectronGenerateProposalsSingleImage_6](detection/ExperimentalDetectronGenerateProposalsSingleImage_6.md) +* [ExperimentalDetectronPriorGridGenerator_6](detection/ExperimentalDetectronPriorGridGenerator_6.md) +* [ExperimentalDetectronROIFeatureExtractor_6](detection/ExperimentalDetectronROIFeatureExtractor_6.md) +* [ExperimentalDetectronTopKROIs_6](sort/ExperimentalDetectronTopKROIs_6.md) +* [ExtractImagePatches](movement/ExtractImagePatches_3.md) +* [FakeQuantize](quantization/FakeQuantize_1.md) +* [Floor](arithmetic/Floor_1.md) +* [FloorMod](arithmetic/FloorMod_1.md) +* [Gather](movement/Gather_8.md) +* [GatherElements](movement/GatherElements_6.md) +* [GatherND](movement/GatherND_8.md) +* [GatherTree](movement/GatherTree_1.md) +* [Gelu](activation/GELU_7.md) +* [Greater](comparison/Greater_1.md) +* [GreaterEqual](comparison/GreaterEqual_1.md) +* [GRN](normalization/GRN_1.md) +* [GroupConvolution](convolution/GroupConvolution_1.md) +* [GroupConvolutionBackpropData](convolution/GroupConvolutionBackpropData_1.md) +* [GRUCell](sequence/GRUCell_3.md) +* [GRUSequence](sequence/GRUSequence_5.md) +* [HardSigmoid](activation/HardSigmoid_1.md) +* [HSigmoid](activation/HSigmoid_5.md) +* [HSwish](activation/HSwish_4.md) +* [IDFT](signals/IDFT_7.md) +* [I420toBGR](image/I420toBGR_8.md) +* [I420toRGB](image/I420toRGB_8.md) +* [If](condition/If_8.md) +* [Interpolate](image/Interpolate_4.md) +* [IRDFT](signals/IRDFT_9.md) +* [Less](comparison/Less_1.md) +* [LessEqual](comparison/LessEqual_1.md) +* [Log](arithmetic/Log_1.md) +* [LogicalAnd](logical/LogicalAnd_1.md) +* [LogicalNot](logical/LogicalNot_1.md) +* [LogicalOr](logical/LogicalOr_1.md) +* [LogicalXor](logical/LogicalXor_1.md) +* [LogSoftmax](activation/LogSoftmax_5.md) +* [Loop](infrastructure/Loop_5.md) +* [LRN](normalization/LRN_1.md) +* [LSTMCell](sequence/LSTMCell_1.md) +* [LSTMSequence](sequence/LSTMSequence_1.md) +* [MatMul](matrix/MatMul_1.md) +* [MatrixNMS](sort/MatrixNMS_8.md) +* [MaxPool](pooling/MaxPool_8.md) +* [Maximum](arithmetic/Maximum_1.md) +* [Minimum](arithmetic/Minimum_1.md) +* [Mish](activation/Mish_4.md) +* [Mod](arithmetic/Mod_1.md) +* [MVN](normalization/MVN_6.md) +* [MulticlassNMS](sort/MulticlassNMS_8.md) +* [Multiply](arithmetic/Multiply_1.md) +* [Negative](arithmetic/Negative_1.md) +* [NonMaxSuppression](sort/NonMaxSuppression_5.md) +* [NonZero](condition/NonZero_3.md) +* [NormalizeL2](normalization/NormalizeL2_1.md) +* [NotEqual](comparison/NotEqual_1.md) +* [NV12toBGR](image/NV12toBGR_8.md) +* [NV12toRGB](image/NV12toRGB_8.md) +* [OneHot](sequence/OneHot_1.md) +* [Pad](movement/Pad_1.md) +* [Parameter](infrastructure/Parameter_1.md) +* [Power](arithmetic/Power_1.md) +* [PReLU](activation/PReLU_1.md) +* [PriorBoxClustered](detection/PriorBoxClustered_1.md) +* [PriorBox](detection/PriorBox_8.md) +* [Proposal](detection/Proposal_4.md) +* [PSROIPooling](detection/PSROIPooling_1.md) +* [RandomUniform](generation/RandomUniform_8.md) +* [Range](generation/Range_4.md) +* [RDFT](signals/RDFT_9.md) +* [ReLU](activation/ReLU_1.md) +* [ReadValue](infrastructure/ReadValue_3.md) +* [ReduceL1](reduction/ReduceL1_4.md) +* [ReduceL2](reduction/ReduceL2_4.md) +* [ReduceLogicalAnd](reduction/ReduceLogicalAnd_1.md) +* [ReduceLogicalOr](reduction/ReduceLogicalOr_1.md) +* [ReduceMax](reduction/ReduceMax_1.md) +* [ReduceMean](reduction/ReduceMean_1.md) +* [ReduceMin](reduction/ReduceMin_1.md) +* [ReduceProd](reduction/ReduceProd_1.md) +* [ReduceSum](reduction/ReduceSum_1.md) +* [RegionYolo](detection/RegionYolo_1.md) +* [ReorgYolo](detection/ReorgYolo_1.md) +* [Reshape](shape/Reshape_1.md) +* [Result](infrastructure/Result_1.md) +* [ReverseSequence](movement/ReverseSequence_1.md) +* [RNNCell](sequence/RNNCell_3.md) +* [RNNSequence](sequence/RNNSequence_5.md) +* [ROIAlign](detection/ROIAlign_3.md) +* [ROIPooling](detection/ROIPooling_1.md) +* [Roll](movement/Roll_7.md) +* [Round](arithmetic/Round_5.md) +* [ScatterElementsUpdate](movement/ScatterElementsUpdate_3.md) +* [ScatterNDUpdate](movement/ScatterNDUpdate_3.md) +* [ScatterUpdate](movement/ScatterUpdate_3.md) +* [Select](condition/Select_1.md) +* [Selu](activation/Selu_1.md) +* [ShapeOf](shape/ShapeOf_3.md) +* [ShuffleChannels](movement/ShuffleChannels_1.md) +* [Sigmoid](activation/Sigmoid_1.md) +* [Sign](arithmetic/Sign_1.md) +* [Sin](arithmetic/Sin_1.md) +* [Sinh](arithmetic/Sinh_1.md) +* [Slice](movement/Slice_8.md) +* [SoftMax](activation/SoftMax_8.md) +* [SoftPlus](activation/SoftPlus_4.md) +* [SoftSign](activation/SoftSign_9.md) +* [SpaceToBatch](movement/SpaceToBatch_2.md) +* [SpaceToDepth](movement/SpaceToDepth_1.md) +* [Split](movement/Split_1.md) +* [Sqrt](arithmetic/Sqrt_1.md) +* [SquaredDifference](arithmetic/SquaredDifference_1.md) +* [Squeeze](shape/Squeeze_1.md) +* [StridedSlice](movement/StridedSlice_1.md) +* [Subtract](arithmetic/Subtract_1.md) +* [Swish](activation/Swish_4.md) +* [Tan](arithmetic/Tan_1.md) +* [Tanh](arithmetic/Tanh_1.md) +* [TensorIterator](infrastructure/TensorIterator_1.md) +* [Tile](movement/Tile_1.md) +* [TopK](sort/TopK_3.md) +* [Transpose](movement/Transpose_1.md) +* [Unsqueeze](shape/Unsqueeze_1.md) +* [VariadicSplit](movement/VariadicSplit_1.md) diff --git a/docs/ops/sequence/RNNCell_3.md b/docs/ops/sequence/RNNCell_3.md index 58f1b9ddd87..70ee0d9c8e0 100644 --- a/docs/ops/sequence/RNNCell_3.md +++ b/docs/ops/sequence/RNNCell_3.md @@ -57,11 +57,11 @@ Formula: * **2**: `H` - 2D tensor of type *T* `[batch_size, hidden_size]`, initial hidden state. **Required.** -* **3**: `W` - 2D tensor tensor of type *T* `[hidden_size, input_size]`, the weights for matrix multiplication. **Required.** +* **3**: `W` - 2D tensor of type *T* `[hidden_size, input_size]`, the weights for matrix multiplication. **Required.** -* **4**: `R` - 2D tensor tensor of type *T* `[hidden_size, hidden_size]`, the recurrence weights for matrix multiplication. **Required.** +* **4**: `R` - 2D tensor of type *T* `[hidden_size, hidden_size]`, the recurrence weights for matrix multiplication. **Required.** -* **5**: `B` 1D tensor tensor of type *T* `[hidden_size]`, the sum of biases (weights and recurrence weights). **Required.** +* **5**: `B` 1D tensor of type *T* `[hidden_size]`, the sum of biases (weights and recurrence weights). **Required.** **Outputs** diff --git a/docs/ops/signals/IRDFT_9.md b/docs/ops/signals/IRDFT_9.md new file mode 100644 index 00000000000..8e4e940d3e5 --- /dev/null +++ b/docs/ops/signals/IRDFT_9.md @@ -0,0 +1,218 @@ +# Inverse Discrete complex-to-real Fourier Transformation (IRDFT) {#openvino_docs_ops_signals_IRDFT_9} + +**Versioned name**: *IRDFT-9* + +**Category**: *Signal processing* + +**Short description**: *IRDFT* operation performs the inverse complex-to-real discrete Fourier transformation of the input tensor by specified dimensions. + +**Attributes**: + + No attributes available. + +**Inputs** + +* **1**: `data` - Input tensor of type *T* with data for the IRDFT transformation. The last dimension of the input tensor must be equal to 2, that is the input tensor shape must have the form `[D_0, D_1, ..., D_{N-1}, 2]`, representing the real and imaginary components of complex numbers in `[:, ..., :, 0]` and in `[:, ..., :, 1]` correspondingly. **Required.** +* **2**: `axes` - 1D tensor of type *T_IND* specifying dimension indices where IRDFT is applied, and `axes` is any unordered list of indices of different dimensions of the input tensor, for example, `[0, 4]`, `[4, 0]`, `[4, 2, 1]`, `[1, 2, 3]`, `[-3, 0, -2]`. These indices should be integers from `-(r - 1)` to `(r - 2)` inclusively, where `r = rank(data)`. A negative axis `a` is interpreted as an axis `r - 1 + a`. Other dimensions do not change. The order of elements in the `axes` attribute matters, and is mapped directly to elements in the third input `signal_size`. **Required.** +* **NOTE**: The following constraint must be satisfied: `rank(data) >= len(axes) + 1 and (rank(data) - 1) not in axes and (-1) not in axes`. +* **3**: `signal_size` - 1D tensor of type *T_SIZE* describing signal size with respect to axes from the input `axes`. If `signal_size[i] == -1`, then IRDFT is calculated for full size of the axis `axes[i]`. If `signal_size[i] > data_shape[: r - 1][axes[i]]`, then input data is zero-padded with respect to the axis `axes[i]` at the end. Finally, if `signal_size[i] < data_shape[: r - 1][axes[i]]`, then input data is trimmed with respect to the axis `axes[i]`. More precisely, if `signal_size[i] < data_shape[: r - 1][axes[i]]`, the slice `0: signal_size[i]` of the axis `axes[i]` is considered. Optionally, with default value `[data_shape[: r - 1][a] for a in axes]`. +* **NOTE**: If the input `signal_size` is specified, then the size of `signal_size` must be the same as the size of `axes`. + +**Outputs** + +* **1**: Resulting tensor with elements of the same type as input `data` tensor and with rank `r - 1`, where `r = rank(data)`. The shape of the output has the form `[S_0, S_1, ..., S_{r-2}]`, where all `S_a` are calculated as follows: + +1. Calculate `normalized_axes`, where each `normalized_axes[i] = axes[i]`, if `axes[i] >= 0`, and `normalized_axes[i] = axes[i] + r - 1` otherwise. + +2. If `a not in normalized_axes`, then `S_a = data_shape[a]`. + +3. If `a in normalized_axes`, then `a = normalized_axes[i]` for some `i`. In such case, `S_a = 2 * (data_shape[a] - 1)` if the `signal_size` input is not specified, or, if it is specified, `signal_size[i] = -1`; and `S_a = signal_size[a]` otherwise. + + When `i != len(normalized_axes) - 1`, `S_a` is calculated as `S_a = data_shape[a]` if the `signal_size` input is not specified, or, if it is specified, `signal_size[i] = -1`; and `S_a = signal_size[a]` otherwise. + + When `i = len(normalized_axes) - 1`, `S_a` is calculated as `S_a = 2 * (data_shape[a] - 1)` if the `signal_size` input is not specified, or, if it is specified, `signal_size[i] = -1`; and `S_a = signal_size[a]` otherwise. + +**Types** + +* *T*: any supported floating-point type. + +* *T_IND*: `int64` or `int32`. + +* *T_SIZE*: `int64` or `int32`. + +**Detailed description**: *IRDFT* performs the discrete Fourier transformation of the input tensor, according to the following rules. + +For simplicity, assume that an input tensor `A` has the shape `[B_0, ..., B_{k-1}, M_0, ..., M_{q-1}, 2]`, `axes=[k,...,k + q - 1]`, and `signal_size=[S_0,...,S_{q-1}]`. + +Let `D` be a value of the input tensor `A`. + +Next, put +\f[X[j_0,\dots,j_{k-1},j_k,\dots,j_{k+q-1}]=D[j_0,\dots,j_{k-1},j_k,\dots,j_{k+q-1},0]+iD[j_0,\dots,j_{k-1},j_k,\dots,j_{k+q-1},1]\f] +for all indices `j_0,...,j_{k+q-1}`, where `i` is an imaginary unit, that is `X` is a complex tensor. + +Define the complex tensor `F` with the shape `[B_0, ..., B_{k-1}, 2 * (M_0 - 1), ..., 2 * (M_{q-1} - 1)]` using the formula +\f[F[j_0,\dots,j_{k-1},j_k,\dots,j_p,\dots,j_{k+q-1}] = \begin{cases}X[j_0,\dots,j_{k-1},j_k,\dots,j_p,\dots,j_{k+q-1}],\text{ when }j_p=0,\dots,M_p-1;\\ \overline{X[j_0,\dots,j_{k-1},j_k,\dots,2(M_{p-1} - 1) - j_p,\dots,j_{k+q-1}]},\text{ otherwise.}\end{cases}\f] + +Construct the complex tensor `G` with the shape `[B_0, ..., B_{k-1}, S_0, ..., S_{q-1}]` by the following way. If `S_a > 2 * (M_a - 1)`, then the axis `k + a` of `F` will be padded by zeros; if `S_a < 2 * (M_a - 1)`, then the axis `k + a` of `F` will be trimmed, that is, we will consider only the slice `0: S_a` of this axis; finally, if `S_a = 2 * (M_a - 1)`, then we consider the full axis `k + a` of `F`. + +Let `Y` be a complex tensor with the shape `[B_0, ..., B_{k-1}, S_0, ..., S_{q-1}]` such that +\f[Y[n_0,\dots,n_{k-1},m_0,\dots,m_{q-1}]=\frac{1}{\prod\limits_{j=0}^{q-1}S_j}\sum\limits_{p_0=0}^{S_0}\cdots\sum\limits_{p_{q-1}=0}^{S_{q-1}}X[n_0,\dots,n_{k-1},j_0,\dots,j_{q-1}]\exp\left(2\pi i\sum\limits_{b=0}^{q-1}\frac{m_bj_b}{S_b}\right)\f] +for all indices `n_0,...,n_{k-1}`, `m_0,...,m_{q-1}`. + +Finally, the result of the inverse discrete complex-to-real Fourier transform is a real part of the tensor `Y`. + +Calculations for the generic case of axes and signal sizes are similar. + +**Example**: + +There is no `signal_size` input (4D input tensor): +```xml + + + + 1 + 161 + 161 + 2 + + + 2 + + + + 1 + 161 + 320 + + + +``` + +There is no `signal_size` input (3D input tensor): +```xml + + + + 161 + 161 + 2 + + + 2 + + + + 161 + 320 + + + +``` + + +There is `signal_size` input (4D input tensor): +```xml + + + + 1 + 161 + 161 + 2 + + + 2 + + + 2 + + + + 1 + 512 + 100 + + + +``` + + +There is `signal_size` input (3D input tensor): +```xml + + + + 161 + 161 + 2 + + + 2 + + + 2 + + + + 512 + 100 + + + +``` + + +There is `signal_size` input (5D input tensor, `-1` in `signal_size`, unsorted axes): +```xml + + + + 16 + 768 + 580 + 320 + 2 + + + 3 + + + 3 + + + + 16 + 768 + 1024 + 170 + + + +``` + + +There is `signal_size` input (5D input tensor, `-1` in `signal_size`, unsorted axes, the second example): +```xml + + + + 16 + 768 + 580 + 320 + 2 + + + 3 + + + 3 + + + + 16 + 768 + 2056 + 258 + + + +``` diff --git a/docs/ops/signals/RDFT_9.md b/docs/ops/signals/RDFT_9.md new file mode 100644 index 00000000000..75989ef85dd --- /dev/null +++ b/docs/ops/signals/RDFT_9.md @@ -0,0 +1,210 @@ +# Discrete Fourier Transformation for real-valued input (RDFT) {#openvino_docs_ops_signals_RDFT_9} + +**Versioned name**: *RDFT-9* + +**Category**: *Signal processing* + +**Short description**: *RDFT* operation performs the discrete real-to-complex Fourier transformation of the input tensor by specified dimensions. + +**Attributes**: + + No attributes available. + +**Inputs** + +* **1**: `data` - Input tensor of type *T* with data for the RDFT transformation. **Required.** +* **2**: `axes` - 1D tensor of type *T_IND* specifying dimension indices where RDFT is applied, and `axes` is any unordered list of indices of different dimensions of input tensor, for example, `[0, 4]`, `[4, 0]`, `[4, 2, 1]`, `[1, 2, 3]`, `[-3, 0, -2]`. These indices should be integers from `-r` to `r - 1` inclusively, where `r = rank(data)`. A negative axis `a` is interpreted as an axis `r + a`. Other dimensions do not change. The order of elements in `axes` attribute matters, and is mapped directly to elements in the third input `signal_size`. **Required.** +* **3**: `signal_size` - 1D tensor of type *T_SIZE* describing signal size with respect to axes from the input `axes`. If `signal_size[i] == -1`, then RDFT is calculated for full size of the axis `axes[i]`. If `signal_size[i] > data_shape[axes[i]]`, then input data is zero-padded with respect to the axis `axes[i]` at the end. Finally, `signal_size[i] < data_shape[axes[i]]`, then input data is trimmed with respect to the axis `axes[i]`. More precisely, if `signal_size[i] < data_shape[axes[i]]`, the slice `0: signal_size[i]` of the axis `axes[i]` is considered. Optionally, with default value `[data_shape[a] for a in axes]`. +* **NOTE**: If the input `signal_size` is specified, the size of `signal_size` must be the same as the size of `axes`. + +**Outputs** + +* **1**: Resulting tensor with elements of the same type as input `data` tensor and with rank `r + 1`, where `r = rank(data)`. The shape of the output has the form `[S_0, S_1, ..., S_{r-1}, 2]`, where all `S_a` are calculated as follows: + +1. Calculate `normalized_axes`, where each `normalized_axes[i] = axes[i]`, if `axes[i] >= 0`, and `normalized_axes[i] = axes[i] + r` otherwise. + +2. If `a not in normalized_axes`, then `S_a = data_shape[a]`. + +3. If `a in normalized_axes`, then `a = normalized_axes[i]` for some `i`. + + When `i != len(normalized_axes) - 1`, `S_a` is calculated as `S_a = data_shape[a]` if the `signal_size` input is not specified, or, if it is specified, `signal_size[i] = -1`; and `S_a = signal_size[a]` otherwise. + + When `i = len(normalized_axes) - 1`, `S_a` is calculated as `S_a = data_shape[a] // 2 + 1` if the `signal_size` input is not specified, or, if it is specified, `signal_size[i] = -1`; and `S_a = signal_size[a] // 2 + 1` otherwise. + +**Types** + +* *T*: any supported floating-point type. + +* *T_IND*: `int64` or `int32`. + +* *T_SIZE*: `int64` or `int32`. + +**Detailed description**: *RDFT* performs the discrete Fourier transformation of real-valued input tensor with respect to specified axes. Calculations are performed according to the following rules. + +For simplicity, assume that an input tensor `A` has the shape `[B_0, ..., B_{k-1}, M_0, ..., M_{q-1}]`, `axes=[k,...,k+q-1]`, and `signal_size=[S_0,...,S_{1-1}]`. + +Let `D` be an input tensor `A`, taking into account the `signal_size`, and, hence, `D` has the shape `[B_0, ..., B_{k-1}, S_0, ..., S_{1-1}]`. + +Next, let +\f[X=X[j_0,\dots,j_{k-1},j_k,\dots,j_{k+q-1}]\f] +for all indices `j_0,...,j_{k+q-1}`, be a real-valued input tensor. + +Then the transformation RDFT of the tensor `X` is the tensor `Y` of the shape `[B_0, ..., B_{k-1}, S_0 // 2 + 1, ..., S_{r-1} // 2 + 1]`, such that +\f[Y[n_0,\dots,n_{k-1},m_0,\dots,m_{q-1}]=\sum\limits_{p_0=0}^{S_0}\cdots\sum\limits_{p_{q-1}=0}^{S_{q-1}}X[n_0,\dots,n_{k-1},j_0,\dots,j_{q-1}]\exp\left(-2\pi i\sum\limits_{b=0}^{q-1}\frac{m_bj_b}{S_b}\right)\f] +for all indices `n_0,...,n_{k-1}`, `m_0,...,m_{q-1}`. + +Calculations for the generic case of axes and signal sizes are similar. + +**Example**: + +There is no `signal_size` input (3D input tensor): +```xml + + + + 1 + 320 + 320 + + + 2 + + + + 1 + 320 + 161 + 2 + + + +``` + +There is no `signal_size` input (2D input tensor): +```xml + + + + 320 + 320 + + + 2 + + + + 320 + 161 + 2 + + + +``` + + +There is `signal_size` input (3D input tensor): +```xml + + + + 1 + 320 + 320 + + + 2 + + + 2 + + + + 1 + 512 + 51 + 2 + + + +``` + + +There is `signal_size` input (2D input tensor): +```xml + + + + 320 + 320 + + + 2 + + + 2 + + + + 512 + 51 + 2 + + + +``` + + +There is `signal_size` input (4D input tensor, `-1` in `signal_size`, unsorted axes): +```xml + + + + 16 + 768 + 580 + 320 + + + 3 + + + 3 + + + + 16 + 768 + 513 + 170 + 2 + + + +``` + + +There is `signal_size` input (4D input tensor, `-1` in `signal_size`, unsorted axes, the second example): +```xml + + + + 16 + 768 + 580 + 320 + + + 3 + + + 3 + + + + 16 + 768 + 1029 + 258 + 2 + + + +``` diff --git a/docs/optimization_guide/dldt_deployment_optimization_common.md b/docs/optimization_guide/dldt_deployment_optimization_common.md new file mode 100644 index 00000000000..5844230245d --- /dev/null +++ b/docs/optimization_guide/dldt_deployment_optimization_common.md @@ -0,0 +1,51 @@ +# General Optimizations {#openvino_docs_deployment_optimization_guide_common} + +## Inputs Pre-processing with OpenVINO + +In many cases, a network expects a pre-processed image, so make sure you do not perform unnecessary steps in your code: +- Model Optimizer can efficiently bake the mean and normalization (scale) values into the model (for example, to the weights of the first convolution). Please see [relevant Model Optimizer command-line options](../MO_DG/prepare_model/Additional_Optimizations.md). +- Let the OpenVINO accelerate other means of [Image Pre-processing and Conversion](../OV_Runtime_UG/preprocessing_overview.md). +- Note that in many cases, you can directly share the (input) data with the OpenVINO, for example consider [remote tensors API of the GPU Plugin](../OV_Runtime_UG//supported_plugins/GPU_RemoteTensor_API.md). + +## Prefer OpenVINO Async API +The API of the inference requests offers Sync and Async execution. While the `ov::InferRequest::infer()` is inherently synchronous and executes immediately (effectively serializing the execution flow in the current application thread), the Async "splits" the `infer()` into `ov::InferRequest::start_async()` and `ov::InferRequest::wait()`. Please consider the [API examples](../OV_Runtime_UG/ov_infer_request.md). + +A typical use-case for the `ov::InferRequest::infer()` is running a dedicated application thread per source of inputs (e.g. a camera), so that every step (frame capture, processing, results parsing and associated logic) is kept serial within the thread. +In contrast, the `ov::InferRequest::start_async()` and `ov::InferRequest::wait()` allow the application to continue its activities and poll or wait for the inference completion when really needed. So one reason for using asynchronous code is _efficiency_. + +**NOTE**: Although the Synchronous API can be somewhat easier to start with, in the production code always prefer to use the Asynchronous (callbacks-based, below) API, as it is the most general and scalable way to implement the flow control for any possible number of requests (and hence both latency and throughput scenarios). + +Let's see how the OpenVINO Async API can improve overall throughput rate of the application. The key advantage of the Async approach is as follows: while a device is busy with the inference, the application can do other things in parallel (e.g. populating inputs or scheduling other requests) rather than wait for the inference to complete. + +In the example below, inference is applied to the results of the video decoding. So it is possible to keep two parallel infer requests, and while the current is processed, the input frame for the next is being captured. This essentially hides the latency of capturing, so that the overall frame rate is rather determined only by the slowest part of the pipeline (decoding IR inference) and not by the sum of the stages. + +You can compare the pseudo-codes for the regular and async-based approaches: + +- In the regular way, the frame is captured with OpenCV and then immediately processed:
+ +@snippet snippets/dldt_optimization_guide8.cpp part8 + +![Intel® VTune™ screenshot](../img/vtune_regular.png) + +- In the "true" async mode, the `NEXT` request is populated in the main (application) thread, while the `CURRENT` request is processed:
+ +@snippet snippets/dldt_optimization_guide9.cpp part9 + +![Intel® VTune™ screenshot](../img/vtune_async.png) + +The technique can be generalized to any available parallel slack. For example, you can do inference and simultaneously encode the resulting or previous frames or run further inference, like emotion detection on top of the face detection results. +Refer to the [Object Detection С++ Demo](@ref omz_demos_object_detection_demo_cpp), [Object Detection Python Demo](@ref omz_demos_object_detection_demo_python)(latency-oriented Async API showcase) and [Benchmark App Sample](../../samples/cpp/benchmark_app/README.md) for complete examples of the Async API in action. + +### Notes on Callbacks +Notice that the Async's `ov::InferRequest::wait()` waits for the specific request only. However, running multiple inference requests in parallel provides no guarantees on the completion order. This may complicate a possible logic based on the `ov::InferRequest::wait`. The most scalable approach is using callbacks (set via the `ov::InferRequest::set_callback`) that are executed upon completion of the request. The callback functions will be used by the OpenVINO runtime to notify on the results (or errors. +This is more event-driven approach. + +Few important points on the callbacks: +- It is the application responsibility to ensure that any callback function is thread-safe +- Although executed asynchronously by a dedicated threads the callbacks should NOT include heavy operations (e.g. I/O) and/or blocking calls. Keep the work done by any callback to a minimum. + +## "get_tensor" Idiom + +`get_tensor` is a recommended way to populate the inference inputs (and read back the outputs), as it internally allocates the data with right padding/alignment for the device. For example, the GPU inputs/outputs tensors are mapped to the host (which is fast) only when the `get_tensor` is used, while for the `set_tensor` a copy into the internal GPU structures may happen. +Please consider the [API examples](../OV_Runtime_UG/ov_infer_request.md). +In contrast, the `set_tensor` is a preferable way to handle remote tensors, [for example with the GPU device](../OV_Runtime_UG//supported_plugins/GPU_RemoteTensor_API.md). diff --git a/docs/optimization_guide/dldt_deployment_optimization_guide.md b/docs/optimization_guide/dldt_deployment_optimization_guide.md index 4293b3b4140..fe13deb6801 100644 --- a/docs/optimization_guide/dldt_deployment_optimization_guide.md +++ b/docs/optimization_guide/dldt_deployment_optimization_guide.md @@ -1,303 +1,44 @@ -# Deployment Optimization Guide {#openvino_docs_deployment_optimization_guide_dldt_optimization_guide} +# Runtime Inference Optimizations {#openvino_docs_deployment_optimization_guide_dldt_optimization_guide} @sphinxdirective .. toctree:: :maxdepth: 1 :hidden: - - openvino_docs_deployment_optimization_guide_dldt_optimization_guide_additional + + openvino_docs_deployment_optimization_guide_common + openvino_docs_deployment_optimization_guide_latency + openvino_docs_deployment_optimization_guide_tput + openvino_docs_deployment_optimization_guide_hints @endsphinxdirective -To optimize your performance results during runtime step it is possible to experiment with: +## Deployment Optimizations Overview {#openvino_docs_deployment_optimization_guide_overview} +Runtime or deployment optimizations focus is tuning of the inference parameters (e.g. optimal number of the requests executed simultaneously) and other means of how a model is _executed_. -* Preprocess +Here, possible optimization should start with defining the use-case. For example, whether the target scenario emphasizes throughput over latency like processing millions of samples by overnight jobs in the data centers. +In contrast, real-time usages would likely trade off the throughput to deliver the results at minimal latency. +Often this is a combined scenario that targets highest possible throughput while maintaining a specific latency threshold. -* Throughput mode +Each of the [OpenVINO supported devices](../OV_Runtime_UG/supported_plugins/Device_Plugins.md) offers low-level performance configuration. This allows to leverage the optimal model performance on the _specific_ device, but may require careful re-tuning when the model or device has changed. +**If the performance portability is of concern, consider using the [OpenVINO High-Level Performance Hints](../OV_Runtime_UG/performance_hints.md) first.** -* Async API +Finally, how the full-stack application uses the inference component _end-to-end_ is important. +For example, what are the stages that needs to be orchestrated? In some cases a significant part of the workload time is spent on bringing and preparing the input data. As detailed in the section on the [general optimizations](./dldt_deployment_optimization_common.md), the inputs population can be performed asynchronously to the inference. Also, in many cases the (image) [pre-processing can be offloaded to the OpenVINO](../OV_Runtime_UG/preprocessing_overview.md). For variably-sized inputs, consider [dynamic shapes](../OV_Runtime_UG/ov_dynamic_shapes.md) to efficiently connect the data input pipeline and the model inference. +These are common performance tricks that help both latency and throughput scenarios. -* Lowering inference precision + Similarly, the _model-level_ optimizations like [quantization that unlocks the int8 inference](../OV_Runtime_UG/Int8Inference.md) are general and help any scenario. As referenced in the [performance introduction topic](./dldt_optimization_guide.md), these are covered in the [dedicated document](./model_optimization_guide.md). Additionally, the `ov::hint::inference_precision` allows the devices to trade the accuracy for the performance at the _runtime_ (e.g. by allowing the fp16/bf16 execution for the layers that remain in fp32 after quantization of the original fp32 model). + +Further documents cover the _runtime_ performance optimizations topics. Please also consider [matrix support of the features by the individual devices](../OV_Runtime_UG/supported_plugins/Device_Plugins.md). -* Device optimization +[General, application-level optimizations](./dldt_deployment_optimization_common.md): + +* Inputs Pre-processing with the OpenVINO -* Combination of devices +* Async API and 'get_tensor' Idiom -## Preprocess - -### Letting the Inference Engine Accelerate Image Pre-processing/Conversion - -In many cases, a network expects a pre-processed image, so make sure you do not perform unnecessary steps in your code: -- Model Optimizer can efficiently bake the mean and normalization (scale) values into the model (for example, weights of the first convolution). See Model Optimizer Knobs Related to Performance. -- If regular 8-bit per channel images are your native media (for instance, decoded frames), do not convert to the `FP32` on your side, as this is something that plugins can accelerate. Use the `InferenceEngine::Precision::U8` as your input format:
- -@snippet snippets/dldt_optimization_guide1.cpp part1 - -Note that in many cases, you can directly share the (input) data with the Inference Engine. - -## Throughput Mode - -One way to increase computational efficiency is batching, which combines many (potentially tens) of input images to achieve optimal throughput. Internally, the execution resources are split/pinned into execution *streams*. Using this feature gains much better performance for the networks that originally are not scaled well with a number of threads (for example, lightweight topologies). This is especially pronounced for the many-core server machines. - -![](../img/THROUGHPUT.svg) - -Run the Benchmark App and play with number of infer requests running in parallel, next section. Try different values of the -nstreams argument from 1 to a number of CPU cores and find one that provides the best performance. - -The throughput mode relaxes the requirement to saturate the CPU by using a large batch: running multiple independent inference requests in parallel often gives much better performance, than using a batch only. This allows you to simplify the app-logic, as you don't need to combine multiple inputs into a batch to achieve good CPU performance. Instead, it is possible to keep a separate infer request per camera or another source of input and process the requests in parallel using Async API. - -## Inference Engine Async API - -Inference Engine Async API can improve overall frame rate of the application. While accelerator is busy with the inference, the application can continue doing things on the host rather than wait for the inference to complete. - -In the example below, inference is applied to the results of the video decoding. So it is possible to keep two parallel infer requests, and while the current is processed, the input frame for the next is being captured. This essentially hides the latency of capturing, so that the overall frame rate is rather determined only by the slowest part of the pipeline (decoding IR inference) and not by the sum of the stages. - -You can compare the pseudo-codes for the regular and async-based approaches: - -- In the regular way, the frame is captured with OpenCV and then immediately processed:
- -@snippet snippets/dldt_optimization_guide8.cpp part8 - -![Intel® VTune™ screenshot](../img/vtune_regular.png) - -- In the "true" async mode, the `NEXT` request is populated in the main (application) thread, while the `CURRENT` request is processed:
- -@snippet snippets/dldt_optimization_guide9.cpp part9 - -![Intel® VTune™ screenshot](../img/vtune_async.png) - -The technique can be generalized to any available parallel slack. For example, you can do inference and simultaneously encode the resulting or previous frames or run further inference, like emotion detection on top of the face detection results. - -There are important performance caveats though: for example, the tasks that run in parallel should try to avoid oversubscribing the shared compute resources. If the inference is performed on the HDDL and the CPU is essentially idle, it makes sense to do things on the CPU in parallel. However, multiple infer requests can oversubscribe that. Notice that heterogeneous execution can implicitly use the CPU, refer to Heterogeneity. - -Also, if the inference is performed on the graphics processing unit (GPU), it can take little gain to do the encoding, for instance, of the resulting video, on the same GPU in parallel, because the device is already busy. - -Refer to the [Object Detection С++ Demo](@ref omz_demos_object_detection_demo_cpp), [Object Detection Python Demo](@ref omz_demos_object_detection_demo_python)(latency-oriented Async API showcase) and [Benchmark App Sample](../../samples/cpp/benchmark_app/README.md) (which has both latency and throughput-oriented modes) for complete examples of the Async API in action. - -### Request-Based API and “GetBlob” Idiom - -Infer Request based API offers two types of request: Sync and Async. The Sync is considered below. The Async splits (synchronous) `Infer` into `StartAsync` and `Wait` (see Inference Engine Async API). - -More importantly, an infer request encapsulates the reference to the “executable” network and actual inputs/outputs. Now, when you load the network to the plugin, you get a reference to the executable network (you may consider that as a queue). Actual infer requests are created by the executable network: - -```sh - -@snippet snippets/dldt_optimization_guide6.cpp part6 -``` - -`GetBlob` is a recommend way to communicate with the network, as it internally allocates the data with right padding/alignment for the device. For example, the GPU inputs/outputs blobs are mapped to the host (which is fast) if the `GetBlob` is used. But if you called the `SetBlob`, the copy (from/to the blob you have set) into the internal GPU plugin structures will happen. - -### Performance Aspects of Running Multiple Requests Simultaneously - -If your application simultaneously executes multiple infer requests: - -- For the CPU, the best solution, you can use the CPU "throughput" mode. -- If latency is of more concern, you can try the `EXCLUSIVE_ASYNC_REQUESTS` [configuration option](../OV_Runtime_UG/supported_plugins/CPU.md) that limits the number of the simultaneously executed requests for all (executable) networks that share the specific device to just one: - -@snippet snippets/dldt_optimization_guide7.cpp part7 - -For more information on the executable networks notation, see Request-Based API and “GetBlob” Idiom. - -- The heterogeneous device uses the `EXCLUSIVE_ASYNC_REQUESTS` by default. - -- `KEY_EXCLUSIVE_ASYNC_REQUESTS` option affects only device queues of the individual application. - -- For GPU, the actual work is serialized by a plugin and/or a driver anyway. - -- Finally, for any VPU flavor, using multiple requests is a must for achieving good throughput. - -In the Inference Engine, there is no notion of requests priorities. It is left to the user side (for example, not queuing the low priority infer request, until another higher priority is waiting). Notice that it would require additional logic to synchronize between executable networks (queues) in your application code. - -## Automatic Lowering of the Inference Precision - -Inference precision directly affects the performance. - -Model Optimizer can produce an IR with different precision. For example, an FP16 IR initially targets VPU and GPU devices, while, for example, for the CPU, an FP16 IR is typically up-scaled to the regular FP32 automatically upon loading. But notice that further device-specific inference precision settings are available, -for example, [8-bit integer](../OV_Runtime_UG/Int8Inference.md) or [bfloat16](../OV_Runtime_UG/Bfloat16Inference.md), which is specific to the CPU inference, below. -Note that for the [MULTI device](../OV_Runtime_UG/supported_plugins/MULTI.md) plugin that supports automatic inference on multiple devices in parallel, you can use an FP16 IR (no need for FP32). -You can find more information, including preferred data types for specific devices, in the -[Supported Devices](../OV_Runtime_UG/supported_plugins/Supported_Devices.md) document. - - -By default, plugins enable the optimizations that allow lower precision if the acceptable range of accuracy is preserved. -For example, for the CPU that supports the AVX512_BF16 instructions, an FP16/FP32 model is converted to a [bfloat16](../OV_Runtime_UG/Bfloat16Inference.md) IR to accelerate inference. - -To compare the associated speedup, run the example command below to disable this feature on the CPU device with the AVX512_BF16 support and get regular FP32 execution: - -```sh -$ benchmark_app -m -enforcebf16=false - ``` - -Notice that for quantized (e.g. INT8) models the bfloat16 calculations (of the layers that remain in FP32) is disabled by default. -Refer to the [CPU Plugin documentation](../OV_Runtime_UG/supported_plugins/CPU.md) for more details. - -Similarly, the GPU device automatically executes FP16 for the layers that remain in FP16 in the quantized models (assuming that the FP16 model was quantized). -Refer to the ENABLE_FP16_FOR_QUANTIZED_MODELS key in the [GPU Plugin documentation](../OV_Runtime_UG/supported_plugins/GPU.md). - -## Device Optimizations - -The Inference Engine supports several target devices (CPU, GPU, Intel® Movidius™ Myriad™ 2 VPU, Intel® Movidius™ Myriad™ X VPU, Intel® Vision Accelerator Design with Intel® Movidius™ Vision Processing Units (VPU)), and each of them has a corresponding plugin. If you want to optimize a specific device, you must keep in mind the following tips to increase the performance. - -### CPU Checklist - -CPU plugin completely relies on the Intel® Math Kernel Library for Deep Neural Networks (Intel® MKL-DNN) for major primitives acceleration, for example, Convolutions or FullyConnected. - -The only hint you can get from that is how the major primitives are accelerated (and you cannot change this). For example, on the Core machines, you should see variations of the `jit_avx2` when inspecting the internal inference performance counters (and additional '_int8' postfix for [int8 inference](../OV_Runtime_UG/Int8Inference.md)). If you are an advanced user, you can further trace the CPU execution with (see Intel® VTune™). - -Internally, the Inference Engine has a threading abstraction level, which allows for compiling the [open source version](https://github.com/opencv/dldt) with either Intel® Threading Building Blocks (Intel® TBB) which is now default, or OpenMP* as an alternative parallelism solution. When using inference on the CPU, this is particularly important to align threading model with the rest of your application (and any third-party libraries that you use) to avoid oversubscription. For more information, see Note on the App-Level Threading section. - - Since R1 2019, the OpenVINO™ toolkit comes pre-compiled with Intel TBB, - so any OpenMP* API or environment settings (like `OMP_NUM_THREADS`) has no effect. - Certain tweaks (like number of threads used for inference on the CPU) are still possible via [CPU configuration options](../OV_Runtime_UG/supported_plugins/CPU.md). - Finally, the OpenVINO CPU inference is NUMA-aware, please refer to the Tips for inference on NUMA systems section. - -Other general recommendations: -- Usually, batching improves CPU performance. However, the need to gather frames in the batch might complicate the application logic. Instead, you can keep a separate infer request per camera or other source of input and process the requests in parallel. For more information, see the next section. -- If your application simultaneously performs inference of multiple models on the same CPU, make sure you do not oversubscribe the machine. See Performance Aspects of Running Multiple Requests Simultaneously for more information. -- Notice that the heterogeneous execution might implicitly load the CPU. For details, refer to the Heterogeneity section. -- Consider [8-bit integer inference on the CPU](../OV_Runtime_UG/Int8Inference.md). - -#### Throughput Mode for CPU -Unlike most accelerators, CPU is perceived as an inherently latency-oriented device. -In fact, the OpenVINO does support the "throughput" mode for the CPU, which allows the Inference Engine to efficiently run multiple inference requests on the CPU simultaneously, greatly improving the overall throughput. - -Internally, the execution resources are split/pinned into execution "streams". -This feature usually provides much better performance for the networks than batching. This is especially true for the many-core server machines: -![](../img/cpu_streams_explained_1.png) - -Compared with the batching, the parallelism is somewhat transposed (i.e. performed over inputs, and much less within CNN ops): -![](../img/cpu_streams_explained.png) - -Try the [Benchmark App](../../samples/cpp/benchmark_app/README.md) sample and play with number of streams running in parallel. The rule of thumb is tying up to a number of CPU cores on your machine. -For example, on an 8-core CPU, compare the `-nstreams 1` (which is a legacy, latency-oriented scenario) to the 2, 4, and 8 streams. - -In addition, you can play with the batch size to find the throughput sweet spot. - -If your application is hard or impossible to change in accordance with the multiple-requests logic, consider the "multiple-instance" trick to improve the throughput: -- For multi-socket execution, it is recommended to set [`KEY_CPU_THREADS_NUM`](../OV_Runtime_UG/supported_plugins/CPU.md) to the number of cores per socket, and run as many instances of the application as you have sockets. -- Similarly, for extremely lightweight networks (running faster than 1ms) and/or many-core machines (16+ cores), try limiting the number of CPU inference threads to just `#‍phys` cores and further, while trying to saturate the machine with running multiple instances of the application. - -### GPU Checklist - -Inference Engine relies on the [Compute Library for Deep Neural Networks (clDNN)](https://01.org/cldnn) for Convolutional Neural Networks acceleration on Intel® GPUs. Internally, clDNN uses OpenCL™ to implement the kernels. Thus, many general tips apply: - -- Prefer `FP16` over `FP32`, as the Model Optimizer can generate both variants and the `FP32` is default. -- Try to group individual infer jobs by using batches. -- Notice that using the GPU introduces one-time overhead (order of few seconds) of compiling the OpenCL kernels. The compilation happens upon loading the network to the GPU plugin and does not affect the inference time. -- If your application is simultaneously using the inference on the CPU or otherwise loads the host heavily, make sure that the OpenCL driver threads do not starve. You can use [CPU configuration options](../OV_Runtime_UG/supported_plugins/CPU.md) to limit number of inference threads for the CPU plugin. -- In the GPU-only scenario, a GPU driver might occupy a CPU core with spin-looped polling for completion. If the _CPU_ utilization is a concern, consider the `KEY_CLDND_PLUGIN_THROTTLE` configuration option. - -> **NOTE**: See the [Benchmark App Sample](../../samples/cpp/benchmark_app/README.md) code for a usage example. -Notice that while disabling the polling, this option might reduce the GPU performance, so usually this option is used with multiple [GPU streams](../OV_Runtime_UG/supported_plugins/GPU.md). - - -### Intel® Movidius™ Myriad™ X Visual Processing Unit and Intel® Vision Accelerator Design with Intel® Movidius™ VPUs - -Since Intel® Movidius™ Myriad™ X Visual Processing Unit (Intel® Movidius™ Myriad™ 2 VPU) communicates with the host over USB, minimum four infer requests in flight are recommended to hide the data transfer costs. See Request-Based API and “GetBlob” Idiom and [Benchmark App Sample](../../samples/cpp/benchmark_app/README.md) for more information. - -Intel® Vision Accelerator Design with Intel® Movidius™ VPUs requires to keep at least 32 inference requests in flight to fully saturate the device. - -## Heterogeneity - -Heterogeneous execution (constituted by the dedicated Inference Engine [“Hetero” plugin](../OV_Runtime_UG/supported_plugins/HETERO.md)) enables to schedule a network inference to the multiple devices. - -### Typical Heterogeneous Scenarios of Concern - -The primary points for executing a network in heterogeneous mode are as follows: - -- Calculate the heaviest pieces of the network with an accelerator while falling back to the CPU for the layers that are not supported by the accelerator.
- This is particularly useful when certain custom (user) kernels are implemented only for the CPU (and much harder or even impossible to implement for the accelerator). - -- Use all available compute devices more efficiently, for example, by running branches of the network on the different devices. - -### Heterogeneous Flow - -The execution through heterogeneous plugin has three distinct steps: - -1. **Applying affinity setting for the layers**, that is, binding them to the devices. - - - This can be done automatically using *fallback priorities*, or on the *per-layer* basis. - - - The affinity setting is made before loading the network to the (heterogeneous) plugin, so this is always a **static** setup with respect to execution. - -2. **Loading a network to the heterogeneous plugin**, which internally splits the network into subgraphs.
- You can check the decisions the plugin makes, see Analysing the Heterogeneous Execution. - -3. **Executing the infer requests**. From user’s side, this looks identical to a single-device case, while internally, the subgraphs are executed by actual plugins/devices. - -Performance benefits of the heterogeneous execution depend heavily on the communications granularity between devices. If transmitting/converting data from one part device to another takes more time than the execution, the heterogeneous approach makes little or no sense. Using Intel® VTune™ helps to visualize the execution flow on a timeline (see Intel® VTune™ Examples). - -Similarly, if there are too much subgraphs, the synchronization and data transfers might eat the entire performance. In some cases, you can define the (coarser) affinity manually to avoid sending data back and forth many times during one inference. - -The general affinity “rule of thumb” is to keep computationally-intensive kernels on the accelerator, and "glue" or helper kernels on the CPU. Notice that this includes the granularity considerations. For example, running some custom activation (that comes after every accelerator-equipped convolution) on the CPU might result in performance degradation due to too much data type and/or layout conversions, even though the activation itself can be extremely fast. In this case, it might make sense to consider implementing the kernel for the accelerator (see Optimizing Custom Kernels). The conversions typically manifest themselves as outstanding (comparing to CPU-only execution) 'Reorder' entries (see Internal Inference Performance Counters). - -For general details on the heterogeneous plugin, refer to the [corresponding section in the Inference Engine Developer Guide](../OV_Runtime_UG/supported_plugins/HETERO.md). - -### Trying the Heterogeneous Plugin with Inference Engine Samples - -Every Inference Engine sample supports the `-d` (device) option. - -For example, here is a command to run an [Classification Sample Async](../../samples/cpp/classification_sample_async/README.md): - -```sh -./classification_sample_async -m /Model.xml -i /picture.jpg -d HETERO:GPU,CPU -``` - -where: - -- `HETERO` stands for Heterogeneous plugin. -- `GPU,CPU` points to fallback policy with first priority on GPU and further fallback to CPU. - -You can point more than two devices: `-d HETERO:HDDL,GPU,CPU`. - -### General Tips on GPU/CPU Execution - -The following tips are provided to give general guidance on optimizing execution on GPU/CPU devices. - -- Generally, GPU performance is better on heavy kernels (like Convolutions) and large inputs. So if the network inference time is already too small (~1ms of execution time), using the GPU would unlikely give a boost. - -- A typical strategy to start with is to test the CPU-only and GPU-only scenarios first (with samples this is plain `-d CPU` or `-d GPU`). If there are specific kernels that are not supported by the GPU, the best option to try is the `HETERO:GPU,CPU` that automatically applies default splitting (based on the plugins layers support). Then, you can play with the manual affinity settings (for example, to further minimize the number of subgraphs). - -- The general affinity “rule of thumb” is to keep computationally-intensive kernels on the accelerator, and "glue" (or helper) kernels on the CPU. Notice that this includes the granularity considerations. For example, running some (custom) activation on the CPU would result in too many conversions. - -- It is advised to do performance analysis to determine “hotspot” kernels, which should be the first candidates for offloading. At the same time, it is often more efficient to offload some reasonably sized sequence of kernels, rather than individual kernels, to minimize scheduling and other run-time overheads. - -- Notice that GPU can be busy with other tasks (like rendering). Similarly, the CPU can be in charge for the general OS routines and other application threads (see Note on the App-Level Threading). Also, a high interrupt rate due to many subgraphs can raise the frequency of the one device and drag the frequency of another down. - -- Device performance can be affected by dynamic frequency scaling. For example, running long kernels on both devices simultaneously might eventually result in one or both devices stopping use of the Intel® Turbo Boost Technology. This might result in overall performance decrease, even comparing to single-device scenario. - -- Mixing the `FP16` (GPU) and `FP32` (CPU) execution results in conversions and, thus, performance issues. If you are seeing a lot of heavy outstanding (compared to the CPU-only execution) Reorders, consider implementing actual GPU kernels. Refer to Internal Inference Performance Counters for more information. - -### Analyzing Heterogeneous Execution - -There is a dedicated configuration option that enables dumping the visualization of the subgraphs created by the heterogeneous plugin, please see code example in the [HETERO plugin documentation](../OV_Runtime_UG/supported_plugins/HETERO.md) - -After enabling the configuration key, the heterogeneous plugin generates two files: - -- `hetero_affinity.dot` - per-layer affinities. This file is generated only if default fallback policy was executed (as otherwise you have set the affinities by yourself, so you know them). -- `hetero_subgraphs.dot` - affinities per sub-graph. This file is written to the disk during execution of `Core::LoadNetwork` for the heterogeneous flow. - -You can use GraphViz\* utility or `.dot` converters (for example, to `.png` or `.pdf`), like xdot\*, available on Linux\* OS with `sudo apt-get install xdot`. - -You can also use performance data (in the [Benchmark App](../../samples/cpp/benchmark_app/README.md), it is an option `-pc`) to get performance data on each subgraph. Again, refer to the [HETERO plugin documentation](../OV_Runtime_UG/supported_plugins/HETERO.md) and to Internal Inference Performance Counters for a general counters information. - -## Multi-Device Execution -OpenVINO™ toolkit supports automatic multi-device execution, please see [MULTI-Device plugin description](../OV_Runtime_UG/supported_plugins/MULTI.md). -In the next chapter you can find the device-specific tips, while this section covers few recommendations -for the multi-device execution: -- MULTI usually performs best when the fastest device is specified first in the list of the devices. - This is particularly important when the parallelism is not sufficient - (e.g. the number of request in the flight is not enough to saturate all devices). -- It is highly recommended to query the optimal number of inference requests directly from the instance of the ExecutionNetwork - (resulted from the LoadNetwork call with the specific multi-device configuration as a parameter). -Please refer to the code of the [Benchmark App](../../samples/cpp/benchmark_app/README.md) sample for details. -- Notice that for example CPU+GPU execution performs better with certain knobs - which you can find in the code of the same [Benchmark App](../../samples/cpp/benchmark_app/README.md) sample. - One specific example is disabling GPU driver polling, which in turn requires multiple GPU streams (which is already a default for the GPU) to amortize slower - inference completion from the device to the host. -- Multi-device logic always attempts to save on the (e.g. inputs) data copies between device-agnostic, user-facing inference requests - and device-specific 'worker' requests that are being actually scheduled behind the scene. - To facilitate the copy savings, it is recommended to start the requests in the order that they were created - (with ExecutableNetwork's CreateInferRequest). - -Refer to [Deployment Optimization Guide Additional Configurations](dldt_deployment_optimization_guide_additional.md) to read more about performance during deployment step and learn about threading, working with multi-socket CPUs and Basic Interoperability with Other APIs. +Use-case specific optimizations along with some implementation details: + +* Optimizing for [throughput](./dldt_deployment_optimization_tput.md) and [latency](./dldt_deployment_optimization_latency.md) + +* [OpenVINO's high-level performance hints](./dldt_deployment_optimization_hints.md) as the portable, future-proof approach for performance configuration diff --git a/docs/optimization_guide/dldt_deployment_optimization_guide_additional.md b/docs/optimization_guide/dldt_deployment_optimization_guide_additional.md deleted file mode 100644 index 9f685e57468..00000000000 --- a/docs/optimization_guide/dldt_deployment_optimization_guide_additional.md +++ /dev/null @@ -1,70 +0,0 @@ -# Deployment Optimization Guide Additional Configurations {#openvino_docs_deployment_optimization_guide_dldt_optimization_guide_additional} - -To optimize your performance results during runtime step, you can experiment with: - -* multi socket CPUs - -* threading - -* Basic Interoperability with Other APIs - - -## Best Latency on the Multi-Socket CPUs -Note that when latency is of concern, there are additional tips for multi-socket systems. -When input is limited to the single image, the only way to achieve the best latency is to limit execution to the single socket. -The reason is that single image is simply not enough -to saturate more than one socket. Also NUMA overheads might dominate the execution time. -Below is the example command line that limits the execution to the single socket using numactl for the best *latency* value -(assuming the machine with 28 phys cores per socket): -``` -limited to the single socket). -$ numactl -m 0 --physcpubind 0-27 benchmark_app -m -api sync -nthreads 28 - ``` -Note that if you have more than one input, running as many inference requests as you have NUMA nodes (or sockets) -usually gives the same best latency as a single request on the single socket, but much higher throughput. Assuming two NUMA nodes machine: -``` -$ benchmark_app -m -nstreams 2 - ``` -Number of NUMA nodes on the machine can be queried via 'lscpu'. -Please see more on the NUMA support in the [Optimization Guide](../OV_Runtime_UG/supported_plugins/MULTI.md). - - - ## Threading - - - As explained in the CPU Checklist section, by default the Inference Engine uses Intel TBB as a parallel engine. Thus, any OpenVINO-internal threading (including CPU inference) uses the same threads pool, provided by the TBB. But there are also other threads in your application, so oversubscription is possible at the application level: -- The rule of thumb is that you should try to have the overall number of active threads in your application equal to the number of cores in your machine. Keep in mind the spare core(s) that the OpenCL driver under the GPU plugin might also need. -- One specific workaround to limit the number of threads for the Inference Engine is using the [CPU configuration options](../OV_Runtime_UG/supported_plugins/CPU.md). -- To avoid further oversubscription, use the same threading model in all modules/libraries that your application uses. Notice that third party components might bring their own threading. For example, using Inference Engine which is now compiled with the TBB by default might lead to [performance troubles](https://www.threadingbuildingblocks.org/docs/help/reference/appendices/known_issues/interoperability.html) when mixed in the same app with another computationally-intensive library, but compiled with OpenMP. You can try to compile the [open source version](https://github.com/opencv/dldt) of the Inference Engine to use the OpenMP as well. But notice that in general, the TBB offers much better composability, than other threading solutions. -- If your code (or third party libraries) uses GNU OpenMP, the Intel® OpenMP (if you have recompiled Inference Engine with that) must be initialized first. This can be achieved by linking your application with the Intel OpenMP instead of GNU OpenMP, or using `LD_PRELOAD` on Linux* OS. - -## Basic Interoperability with Other APIs - -The general approach for sharing data between Inference Engine and media/graphics APIs like Intel® Media Server Studio (Intel® MSS) is based on sharing the *system* memory. That is, in your code, you should map or copy the data from the API to the CPU address space first. - -For Intel MSS, it is recommended to perform a viable pre-processing, for example, crop/resize, and then convert to RGB again with the [Video Processing Procedures (VPP)](https://software.intel.com/en-us/node/696108). Then lock the result and create an Inference Engine blob on top of that. The resulting pointer can be used for the `SetBlob`: - -@snippet snippets/dldt_optimization_guide2.cpp part2 - -**WARNING**: The `InferenceEngine::NHWC` layout is not supported natively by most InferenceEngine plugins so internal conversion might happen. - -@snippet snippets/dldt_optimization_guide3.cpp part3 - -Alternatively, you can use RGBP (planar RGB) output from Intel MSS. This allows to wrap the (locked) result as regular NCHW which is generally friendly for most plugins (unlike NHWC). Then you can use it with `SetBlob` just like in previous example: - -@snippet snippets/dldt_optimization_guide4.cpp part4 - -The only downside of this approach is that VPP conversion to RGBP is not hardware accelerated (and performed on the GPU EUs). Also, it is available only on LInux. - -## OpenCV* Interoperability Example - -Unlike APIs that use dedicated address space and/or special data layouts (for instance, compressed OpenGL* textures), regular OpenCV data objects like `cv::Mat` reside in the conventional system memory. That is, the memory can be actually shared with the Inference Engine and only data ownership to be transferred. - -Again, if the OpenCV and Inference Engine layouts match, the data can be wrapped as Inference Engine (input/output) blob. Notice that by default, Inference Engine accepts the **planar** and **not interleaved** inputs in NCHW, so the NHWC (which is exactly the interleaved layout) should be specified explicitly: - -**WARNING**: The `InferenceEngine::NHWC` layout is not supported natively by most InferenceEngine plugins so internal conversion might happen. - -@snippet snippets/dldt_optimization_guide5.cpp part5 - -Notice that original `cv::Mat`/blobs cannot be used simultaneously by the application and the Inference Engine. Alternatively, the data that the pointer references to can be copied to unlock the original data and return ownership to the original API. - -To learn more about optimizations during developing step, visit [Deployment Optimization Guide](dldt_deployment_optimization_guide.md) page. diff --git a/docs/optimization_guide/dldt_deployment_optimization_hints.md b/docs/optimization_guide/dldt_deployment_optimization_hints.md new file mode 100644 index 00000000000..c06cfc4caa2 --- /dev/null +++ b/docs/optimization_guide/dldt_deployment_optimization_hints.md @@ -0,0 +1,22 @@ +# High-level Performance Hints (Presets) {#openvino_docs_deployment_optimization_guide_hints} + +Traditionally, each of the OpenVINO's [supported devices](../OV_Runtime_UG/supported_plugins/Supported_Devices.md) offers a bunch of low-level performance settings. +Tweaking this detailed configuration requires deep architecture understanding. +Also, while the resulting performance may be optimal for the specific combination of the device and the model that is inferred, it is actually neither device/model nor future-proof: +- Even within a family of the devices (like various CPUs), things like different number of CPU cores would eventually result in different execution configuration to be optimal. +- Similarly the optimal batch size is very much specific to the particular instance of the GPU. +- Compute vs memory-bandwidth requirements for the model being inferenced, as well as inference precision, possible model's quantization and other factors add more unknowns to the resulting performance equation. +- Finally, the optimal execution parameters of one device do not transparently map to another device type, for example: + - Both the CPU and GPU devices support the notion of the 'streams' (i.e. inference instances that are executed in parallel, please see `ov::num_streams`), yet the optimal number of the streams is deduced very differently. + +Beyond execution _parameters_ there are potentially many device-specific details like _scheduling_ that greatly affect the performance. +Specifically, GPU-oriented tricks like batching, which combines many (potentially tens) of input images to achieve optimal throughput, do not always map well to the CPU, as e.g. detailed in the next sections. +The hints allow to really hide _execution_ specifics required to saturate the device. For example, no need to explicitly combine multiple inputs into a batch to achieve good GPU performance. +Instead, it is possible to keep a separate infer request per camera or another source of input and process the requests in parallel using OpenVINO Async API. + +The only requirement for the application to leverage the throughput is about **running multiple inference requests in parallel**. +OpenVINO's device-specific implementation of the hints will take care of the rest. This allows a developer to greatly simplify the app-logic. + +In summary, when the performance _portability_ is of concern, consider the [High-Level Performance Hints](../OV_Runtime_UG/performance_hints.md). +Below you can find the implementation details (particularly how the OpenVINO implements the 'throughput' approach) for the specific devices. +Keep in mind that while different throughput-oriented scheduling approaches ([like the batching or other means of executing individual inference requests](./dldt_deployment_optimization_tput.md)) can work together, the hints make these decisions to be transparent to the application. \ No newline at end of file diff --git a/docs/optimization_guide/dldt_deployment_optimization_latency.md b/docs/optimization_guide/dldt_deployment_optimization_latency.md new file mode 100644 index 00000000000..cf75edc6bc1 --- /dev/null +++ b/docs/optimization_guide/dldt_deployment_optimization_latency.md @@ -0,0 +1,35 @@ +## Optimizing for the Latency {#openvino_docs_deployment_optimization_guide_latency} + +@sphinxdirective + +.. toctree:: + :maxdepth: 1 + :hidden: + + openvino_docs_IE_DG_Model_caching_overview + +@endsphinxdirective + +## Latency Specifics +A significant fraction of applications focused on the situations where typically a single model is loaded (and single input is used) at a time. +This is a regular "consumer" use case and a default (also for the legacy reasons) performance setup for any OpenVINO device. +Notice that an application can create more than one request if needed (for example to support asynchronous inputs population), the question is really about how many requests are being executed in parallel. + +Similarly, when multiple models are served on the same device, it is important whether the models are executed simultaneously, or in chain (for example in the inference pipeline). +As expected, the lowest latency is achieved with only one concurrent inference at a moment. Accordingly, any additional concurrency usually results in the latency growing fast. + +However, for example, specific configurations, like multi-socket CPUs can deliver as high number of requests (at the same minimal latency) as there are NUMA nodes in the machine. +Thus, human expertise is required to get the most out of the device even in the latency case. Consider using [OpenVINO high-level performance hints](../OV_Runtime_UG/performance_hints.md) instead. + +**NOTE**: [OpenVINO performance hints](./dldt_deployment_optimization_hints.md) is a recommended way for performance configuration, which is both device-agnostic and future-proof. + +In the case when there are multiple models to be used simultaneously, consider using different devices for inferencing the different models. Finally, when multiple models are executed in parallel on the device, using additional `ov::hint::model_priority` may help to define relative priorities of the models (please refer to the documentation on the [matrix features support for OpenVINO devices](../OV_Runtime_UG/supported_plugins/Device_Plugins.md) to check for the support of the feature by the specific device). + +## First-Inference Latency and Model Load/Compile Time +There are cases when model loading/compilation are heavily contributing to the end-to-end latencies. +For example when the model is used exactly once, or when due to on-device memory limitations the model is unloaded (to free the memory for another inference) and reloaded at some cadence. + +Such a "first-inference latency" scenario however may pose an additional limitation on the model load\compilation time, as inference accelerators (other than the CPU) usually require certain level of model compilation upon loading. +The [model caching](../OV_Runtime_UG/Model_caching_overview.md) is a way to amortize the loading/compilation time over multiple application runs. If the model caching is not possible (as e.g. it requires write permissions for the applications), the CPU device is almost exclusively offers the fastest model load time. Also, consider using the [AUTO device](../OV_Runtime_UG/auto_device_selection.md). It allows to transparently use the CPU for inference, while the actual accelerator loads the model (upon that, the inference hot-swapping also happens automatically). + +Finally, notice that any [throughput-oriented options](./dldt_deployment_optimization_tput.md) may increase the model up time significantly. diff --git a/docs/optimization_guide/dldt_deployment_optimization_tput.md b/docs/optimization_guide/dldt_deployment_optimization_tput.md new file mode 100644 index 00000000000..5fdfe20bc57 --- /dev/null +++ b/docs/optimization_guide/dldt_deployment_optimization_tput.md @@ -0,0 +1,68 @@ +# Optimizing for Throughput {#openvino_docs_deployment_optimization_guide_tput} + +## General Throughput Considerations +As described in the section on the [latency-specific considerations](./dldt_deployment_optimization_latency.md) one possible use-case is delivering the every single request at the minimal delay. +Throughput on the other hand, is about inference scenarios in which potentially large number of inference requests are served simultaneously. +Here, the overall application throughput can be significantly improved with the right performance configuration. +Also, if the model is not already compute- or memory bandwidth-limited, the associated increase in latency is not linearly dependent on the number of requests executed in parallel. + +With the OpenVINO there two major means of running the multiple requests simultaneously: batching and "streams", explained in this document. +Yet, different GPUs behave differently with batch sizes, just like different CPUs require different number of execution streams to maximize the throughput. +Predicting inference performance is difficult and and finding optimal execution parameters requires direct experiments measurements. +One possible throughput optimization strategy is to set an upper bound for latency and then increase the batch size or number of the streams until that tail latency is met (or the throughput is not growing anymore). +Also, consider [Deep Learning Workbench](https://docs.openvino.ai/latest/workbench_docs_Workbench_DG_Introduction.html). + +Finally, the [automatic multi-device execution](../OV_Runtime_UG/multi_device.md) helps to improve the throughput, please also see the section below. +While the same approach of optimizing the parameters of each device separately does work, the resulting multi-device performance is a fraction (that is different for different models) of the “ideal” (plain sum) performance. + +Overall, the latency-throughput is not linearly dependent and very _device_ specific. It is also tightly integrated with _model_ characteristics. +As for the possible inference devices the scenery had already become pretty diverse, the OpenVINO has introduced the dedicated notion of the high-level performance configuration "hints" to describe the target application scenarios. +The hints are described [here](./dldt_deployment_optimization_hints.md). + +**NOTE**: [OpenVINO performance hints](./dldt_deployment_optimization_hints.md) is a recommended way for performance configuration, which is both device-agnostic and future-proof. + +The rest of the document provides low-level details on the OpenVINO's low-level ways to optimize the throughput. + +## Low-Level Implementation Details +### OpenVINO Streams +As detailed in the section OpenVINO Async API running multiple inference requests asynchronously is important for general application efficiency. +Additionally, most devices support running multiple inference requests in parallel in order to improve the device utilization. The _level_ of the parallelism (i.e. how many requests are really executed in parallel on the device) is commonly referred as a number of 'streams'. Some devices run several requests per stream to amortize the host-side costs. +Notice that streams (that can be considered as independent queues) are really executing the requests in parallel, but not in the lock step (as e.g. the batching does), this makes the streams much more compatible with [dynamically-shaped inputs](../OV_Runtime_UG/ov_dynamic_shapes.md) when individual requests can have different shapes. + +Also, notice that for efficient asynchronous execution, the streams are actually handling inference with special pool of the threads. +So each time you start inference requests (potentially from different application threads), they are actually muxed into a inference queue of the particular `ov:compiled_model`. +If there is a vacant stream, it pops the request from the queue and actually expedites that to the on-device execution. + +The usage of multiple streams is an inherently throughput-oriented approach, as every stream requires a dedicated memory to operate in parallel to the rest streams (read-only data like weights are usually shared between all streams). +Also, the streams inflate the load/compilation time. +This is why the [latency hint](./dldt_deployment_optimization_hints.md) governs a device to create a bare minimum of streams (usually just one). + +Finally, the streams are always preferable compared to creating multiple instances of the same model, as weights memory is shared across streams, reducing possible memory consumption. + +### Throughput on the CPU: Internals +In order to best serve multiple inference requests simultaneously, the inference threads are grouped/pinned to the particular CPU cores, constituting the CPU streams. +This provides much better performance for the networks than batching especially for the many-core machines: +![](../img/cpu_streams_explained_1.png) + +Compared with the batching, the parallelism is somewhat transposed (i.e. performed over inputs, with much less synchronization within CNN ops): +![](../img/cpu_streams_explained.png) + +Notice that [high-level performance hints](../OV_Runtime_UG/performance_hints.md) allows the implementation to select the optimal number of the streams, _depending on the model compute demands_ and CPU capabilities (including [int8 inference](../OV_Runtime_UG/Int8Inference.md) hardware acceleration, number of cores, etc). + +### Automatic Batching Internals +While the GPU plugin fully supports general notion of the streams, the associated performance (throughput) improvements are usually modest. +The primary reason is that, while the streams allow to hide the communication overheads and hide certain bubbles in device utilization, running multiple OpenCL kernels on the GPU simultaneously is less efficient, compared to calling a kernel on the multiple inputs at once. + +When the parallel slack is small (e.g. only 2-4 requests executed simultaneously), then using the streams for the GPU may suffice. Also streams are fully compatible with [dynamically-shaped inputs](../OV_Runtime_UG/ov_dynamic_shapes.md) when individual requests can have different shapes. +Typically, for 4 and more requests the batching delivers better throughput for the GPUs. Using the [High-Level Performance Hints](../OV_Runtime_UG/performance_hints.md) is the most portable and future-proof option, allowing the OpenVINO to find best combination of streams and batching for a given scenario. +As explained in the section on the [automatic batching](../OV_Runtime_UG/automatic_batching.md), the feature performs on-the-fly grouping of the inference requests to improve device utilization. +The Automatic Batching relaxes the requirement for an application to saturate devices like GPU by _explicitly_ using a large batch. It performs transparent inputs gathering from +individual inference requests followed by the actual batched execution, with no programming effort from the user: +![](../img/BATCH_device.PNG) + +Essentially, the Automatic Batching shifts the asynchronousity from the individual requests to the groups of requests that constitute the batches. Thus, for the execution to be efficient it is very important that the requests arrive timely, without causing a batching timeout. +Normally, the timeout should never be hit. It is rather a graceful way to handle the application exit (when the inputs are not arriving anymore, so the full batch is not possible to collect). + +So if your workload experiences the timeouts (resulting in the performance drop, as the timeout value adds itself to the latency of every request), consider balancing the timeout value vs the batch size. For example in many cases having smaller timeout value and batch size may yield better performance than large batch size, but coupled with the timeout value that cannot guarantee accommodating the full number of the required requests. + +Finally, following the "get_tensor idiom" section from the [general optimizations](./dldt_deployment_optimization_common.md) helps the Automatic Batching to save on inputs/outputs copies. Thus, in your application always prefer the "get" versions of the tensor data access APIs. diff --git a/docs/optimization_guide/dldt_optimization_guide.md b/docs/optimization_guide/dldt_optimization_guide.md index 7381545800e..a90f744ff2b 100644 --- a/docs/optimization_guide/dldt_optimization_guide.md +++ b/docs/optimization_guide/dldt_optimization_guide.md @@ -1,40 +1,36 @@ -# Performance Optimization Guide {#openvino_docs_optimization_guide_dldt_optimization_guide} +# Introduction to Performance Optimization {#openvino_docs_optimization_guide_dldt_optimization_guide} +Before exploring possible optimization techniques, let us first define what the inference performance is and how to measure that. +Notice that reported inference performance often tends to focus on the speed of execution. +In fact these are at least four connected factors of accuracy, throughput/latency and efficiency. The rest of the document discusses how to balance these key factors. -Before exploring optimization techniques, let us first define what performance is and how it is measured. - -## What Is Performance - -Performance means how fast the model is in deployment. Two key metrics are used to measure performance: latency and throughput. +## What Is Inference Performance +Generally, performance means how fast the model processes the live data. Two key metrics are used to measure the performance: latency and throughput are fundamentally interconnected. ![](../img/LATENCY_VS_THROUGHPUT.svg) -Latency measures inference time (ms) required to process a single input. When it comes to batch input need to measure throughput (images per second or frames per second, FPS). To calculate throughput, divide number of frames that were processed by the processing time. +Latency measures inference time (ms) required to process a single input. When it comes to the executing multiple inputs executed simultaneously (e.g. via batching) then the overall throughput (inferences per second, or frames per second, FPS, in the specific case of visual processing) is usually of more concern. +To calculate throughput, divide number of frames that were processed by the processing time. + +It is important to separate the "pure" inference time of a neural network and the end-to-end application performance. For example data transfers between the host and a device may unintentionally affect the performance when a host input tensor is processed on the accelerator like dGPU. Similarly, the image-preprocessing may also contribute significantly to the to inference time. As detailed in the [getting performance numbers](../MO_DG/prepare_model/Getting_performance_numbers.md) section, when drilling into _inference_ performance, one option is to measure all such items separately. +For the end-to-end scenario though, consider the image pre-processing thru the OpenVINO and the asynchronous execution is a way to amortize the communication costs like data transfers. You can find further details in the [general optimizations document](./dldt_deployment_optimization_common.md). + +"First-inference latency" is another specific case (e.g. when fast application start-up is required) where the resulting performance may be well dominated by the model loading time. Consider [model caching](../OV_Runtime_UG/Model_caching_overview.md) as a way to improve model loading/compilation time. + +Finally, memory footprint restrictions is another possible concern when designing an application. While this is a motivation for the _model_ optimization techniques referenced in the next section, notice that the the throughput-oriented execution is usually much more memory-hungry, as detailed in the [Deployment Optimization Guide](../optimization_guide/dldt_deployment_optimization_guide.md). + > **NOTE**: To get performance numbers for OpenVINO, as well as tips how to measure it and compare with native framework, check [Getting performance numbers](../MO_DG/prepare_model/Getting_performance_numbers.md) page. -## How to Improve Performance +## Improving the Performance: Model vs Runtime Optimizations -> **NOTE**: Make sure that your model can be successfully inferred with OpenVINO Inference Engine. +> **NOTE**: Make sure that your model can be successfully inferred with OpenVINO Runtime. -Inside OpenVINO there are two ways how to get better performance number: during developing and deployment your model. **It is possible to combine both developing and deployment optimizations**. +With the OpenVINO there are two primary ways of improving the inference performance, namely model- and runtime-level optimizations. **These two optimizations directions are fully compatible**. -- **Developing step** includes model modification. Inside developing optimization there are three ways to optimize your model: +- **Model optimization** includes model modification, such as quantization, pruning, optimization of preprocessing, etc. Fore more details, refer to this [document](./model_optimization_guide.md). - - **Post-training Optimization tool** (POT) is designed to optimize the inference of deep learning models by applying special methods without model retraining or fine-tuning, like post-training quantization. +- **Runtime (Deployment) optimization** includes tuning of model _execution_ parameters. To read more visit [Deployment Optimization Guide](../optimization_guide/dldt_deployment_optimization_guide.md). - - **Neural Network Compression Framework (NNCF)** provides a suite of advanced algorithms for Neural Networks inference optimization with minimal accuracy drop, available quantization, pruning and sparsity optimization algorithms. - - - **Model Optimizer** implement some optimization to a model, most of them added by default, but you can configure mean/scale values, batch size RGB vs BGR input channels and other parameters to speed-up preprocess of a model ([Additional Optimization Use Cases](../MO_DG/prepare_model/Additional_Optimizations.md)) - -- **Deployment step** includes tuning inference parameters and optimizing model execution, to read more visit [Deployment Optimization Guide](../optimization_guide/dldt_deployment_optimization_guide.md). - -More detailed workflow: - -![](../img/DEVELOPMENT_FLOW_V3_crunch.svg) - -To understand when to use each development optimization tool, follow this diagram: - -POT is the easiest way to get optimized models and it is also really fast and usually takes several minutes depending on the model size and used HW. NNCF can be considered as an alternative or an addition when the first does not give accurate results. - -![](../img/WHAT_TO_USE.svg) +## Performance benchmarks +To estimate the performance and compare performance numbers, measured on various supported devices, a wide range of public models are available at [Performance benchmarks](../benchmarks/performance_benchmarks.md) section. \ No newline at end of file diff --git a/docs/optimization_guide/model_optimization_guide.md b/docs/optimization_guide/model_optimization_guide.md new file mode 100644 index 00000000000..50469ea5acb --- /dev/null +++ b/docs/optimization_guide/model_optimization_guide.md @@ -0,0 +1,35 @@ + # Model Optimization Guide {#openvino_docs_model_optimization_guide} + +@sphinxdirective + +.. toctree:: + :maxdepth: 1 + :hidden: + + pot_README + docs_nncf_introduction + openvino_docs_IE_DG_Int8Inference + +@endsphinxdirective + + Model optimization assumes applying transformations to the model and relevant data flow to improve the inference performance. These transformations are basically offline and can require the availability of training and validation data. It includes such methods as quantization, pruning, preprocessing optimization, etc. OpenVINO provides several tools to optimize models at different steps of model development: + + - **Post-training Optimization tool [(POT)](../../tools/pot/README.md)** is designed to optimize the inference of deep learning models by applying post-training methods that do not require model retraining or fine-tuning, like post-training quantization. + +- **Neural Network Compression Framework [(NNCF)](./nncf_introduction.md)** provides a suite of advanced algorithms for Neural Networks inference optimization with minimal accuracy drop, for example, quantization, pruning algorithms. + +- **Model Optimizer** implements optimization to a model, most of them added by default, but you can configure mean/scale values, batch size, RGB vs BGR input channels, and other parameters to speed-up preprocess of a model ([Embedding Preprocessing Computation](../MO_DG/prepare_model/Additional_Optimizations.md)) + + +## Detailed workflow: + +![](../img/DEVELOPMENT_FLOW_V3_crunch.svg) + +To understand which development optimization tool you need, refer to the diagram: + +POT is the easiest way to get optimized models, and usually takes several minutes depending on the model size and used HW. NNCF can be considered as an alternative or addition when the first one does not give accurate results. + +![](../img/WHAT_TO_USE.svg) + +## See also +- [Deployment optimization](./dldt_deployment_optimization_guide.md) \ No newline at end of file diff --git a/docs/optimization_guide/nncf_introduction.md b/docs/optimization_guide/nncf_introduction.md new file mode 100644 index 00000000000..6ce2234771b --- /dev/null +++ b/docs/optimization_guide/nncf_introduction.md @@ -0,0 +1,63 @@ +# Neural Network Compression Framework {#docs_nncf_introduction} +This document describes the Neural Network Compression Framework (NNCF) which is being developed as a separate project outside of OpenVINO™ but it is highly aligned with OpenVINO™ in terms of the supported optimization features and models. It is open-sourced and available on [GitHub](https://github.com/openvinotoolkit/nncf). + +## Introduction + Neural Network Compression Framework (NNCF) is aimed at optimizing Deep Neural Network (DNN) by applying optimization methods, such as quantization, pruning, etc., to the original framework model. It mostly provides in-training optimization capabilities which means that optimization methods require model fine-tuning during and after optimization. The diagram below shows the model optimization workflow using NNCF. + ![](../img/nncf_workflow.png) + + ### Features + - Support optimization of PyTorch and TensorFlow 2.x models. + - Support of various optimization algorithms, applied during a model fine-tuning process to achieve a better performance-accuracy trade-off: + + |Compression algorithm|PyTorch|TensorFlow 2.x| + | :--- | :---: | :---: | + |[8- bit quantization](https://github.com/openvinotoolkit/nncf/blob/develop/docs/compression_algorithms/Quantization.md) | Supported | Supported | + |[Filter pruning](https://github.com/openvinotoolkit/nncf/blob/develop/docs/compression_algorithms/Pruning.md) | Supported | Supported | + |[Sparsity](https://github.com/openvinotoolkit/nncf/blob/develop/docs/compression_algorithms/Sparsity.md) | Supported | Supported | + |[Mixed-precision quantization](https://github.com/openvinotoolkit/nncf/blob/develop/docs/compression_algorithms/Quantization.md#mixed_precision_quantization) | Supported | Not supported | + |[Binarization](https://github.com/openvinotoolkit/nncf/blob/develop/docs/compression_algorithms/Binarization.md) | Supported | Not supported | + + + +- Stacking of optimization methods. For example: 8-bit quaNtization + Filter Pruning. +- Support for [Accuracy-Aware model training](https://github.com/openvinotoolkit/nncf/blob/develop/docs/Usage.md#accuracy-aware-model-training) pipelines via the [Adaptive Compression Level Training](https://github.com/openvinotoolkit/nncf/tree/develop/docs/accuracy_aware_model_training/AdaptiveCompressionLevelTraining.md) and [Early Exit Training](https://github.com/openvinotoolkit/nncf/tree/develop/docs/accuracy_aware_model_training/EarlyExitTrainig.md). +- Automatic, configurable model graph transformation to obtain the compressed model. + > **NOTE**: Limited support for TensorFlow models. Only the models created, using Sequential or Keras Functional API, are supported. +- GPU-accelerated layers for the faster compressed model fine-tuning. +- Distributed training support. +- Configuration file examples for each supported compression algorithm. +- Exporting PyTorch compressed models to ONNX\* checkpoints and TensorFlow compressed models to SavedModel or Frozen Graph format, ready to use with [OpenVINO™ toolkit](https://github.com/openvinotoolkit/). +- Git patches for prominent third-party repositories ([huggingface-transformers](https://github.com/huggingface/transformers)) demonstrating the process of integrating NNCF into custom training pipelines + +## Get started +### Installation +NNCF provides the packages available for installation through the PyPI repository. To install the latest version via pip manager run the following command: +``` +pip install nncf +``` + +### Usage examples +NNCF provides various examples and tutorials that demonstrate usage of optimization methods. + +### Tutorials +- [Quantization-aware training of PyTorch model](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/302-pytorch-quantization-aware-training) +- [Quantization-aware training of TensorFlow model](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/305-tensorflow-quantization-aware-training) +- (Experimental) [Post-training quantization of PyTorch model](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/112-pytorch-post-training-quantization-nncf) + +### Samples +- PyTorch: + - [Image Classification sample](https://github.com/openvinotoolkit/nncf/blob/develop/examples/torch/classification/README.md) + - [Object Detection sample](https://github.com/openvinotoolkit/nncf/blob/develop/examples/torch/object_detection/README.md) + - [Semantic segmentation sample](https://github.com/openvinotoolkit/nncf/blob/develop/examples/torch/semantic_segmentation/README.md) + +- TensorFlow samples: + - [Image Classification sample](https://github.com/openvinotoolkit/nncf/blob/develop/examples/tensorflow/classification/README.md) + - [Object Detection sample](https://github.com/openvinotoolkit/nncf/blob/develop/examples/tensorflow/object_detection/README.md) + - [Instance Segmentation sample](https://github.com/openvinotoolkit/nncf/blob/develop/examples/tensorflow/segmentation/README.md) + + +## See also +- [Compressed Model Zoo](https://github.com/openvinotoolkit/nncf#nncf-compressed-model-zoo) +- [NNCF in HuggingFace Optimum](https://github.com/dkurt/optimum-openvino) +- [OpenVINO™ Post-training Optimization tool](../../tools/pot/README.md) + diff --git a/docs/ovsa/ovsa_get_started.md b/docs/ovsa/ovsa_get_started.md index 84cc5d47224..08ffe9f1869 100644 --- a/docs/ovsa/ovsa_get_started.md +++ b/docs/ovsa/ovsa_get_started.md @@ -11,7 +11,7 @@ In this release, one person performs the role of both the Model Developer and th ## Overview -The OpenVINO™ Security Add-on works with the [OpenVINO™ Model Server](@ref openvino_docs_ovms) on Intel® architecture. Together, the OpenVINO™ Security Add-on and the OpenVINO™ Model Server provide a way for Model Developers and Independent Software Vendors to use secure packaging and secure model execution to enable access control to the OpenVINO™ models, and for model Users to run inference within assigned limits. +The OpenVINO™ Security Add-on works with the [OpenVINO™ Model Server](@ref ovms_what_is_openvino_model_server) on Intel® architecture. Together, the OpenVINO™ Security Add-on and the OpenVINO™ Model Server provide a way for Model Developers and Independent Software Vendors to use secure packaging and secure model execution to enable access control to the OpenVINO™ models, and for model Users to run inference within assigned limits. The OpenVINO™ Security Add-on consists of three components that run in Kernel-based Virtual Machines (KVMs). These components provide a way to run security-sensitive operations in an isolated environment. A brief description of the three components are as follows. Click each triangled line for more information about each. diff --git a/docs/resources.md b/docs/resources/resources.md similarity index 62% rename from docs/resources.md rename to docs/resources/resources.md index 9ce3654c995..bda4a9d2e63 100644 --- a/docs/resources.md +++ b/docs/resources/resources.md @@ -7,9 +7,10 @@ :maxdepth: 1 :hidden: :caption: Additional Resources - + Release Notes openvino_docs_IE_DG_supported_plugins_Supported_Devices + openvino_docs_MO_DG_prepare_model_Supported_Frameworks_Layers openvino_docs_OV_Glossary openvino_docs_Legal_Information @@ -18,10 +19,12 @@ :maxdepth: 1 :caption: Intermediate Representation and Operations Sets :hidden: - + + openvino_docs_MO_DG_IR_and_opsets openvino_docs_ops_opset openvino_docs_ops_broadcast_rules openvino_docs_operations_specifications + openvino_docs_MO_DG_prepare_model_convert_model_IR_suitable_for_INT8_inference .. toctree:: @@ -39,14 +42,14 @@ This section includes a variety of reference information in three broad categori ### Additional Resources [Release Notes](https://software.intel.com/content/www/us/en/develop/articles/openvino-relnotes.html) contains change logs and notes for each OpenVINO release. -[Supported Devices](OV_Runtime_UG/supported_plugins/Supported_Devices.md) is compatibility information about supported hardware accelerators. +[Supported Devices](../OV_Runtime_UG/supported_plugins/Supported_Devices.md) is compatibility information about supported hardware accelerators. -[Legal Information](Legal_Information.md) has trademark information and other legal statements. +[Legal Information](../Legal_Information.md) has trademark information and other legal statements. ### Intermediate Representation and Operations Sets -[Available Operation Sets](ops/opset.md) is a list of supported operations and explanation of supported capabilities. +[Available Operation Sets](../ops/opset.md) is a list of supported operations and explanation of supported capabilities. -[Broadcast Rules for Elementwise Operations](ops/broadcast_rules.md) explains the rules used for to support an arbitrary number of dimensions in neural nets. +[Broadcast Rules for Elementwise Operations](../ops/broadcast_rules.md) explains the rules used for to support an arbitrary number of dimensions in neural nets. ### Case Studies Links to [articles](https://www.intel.com/openvino-success-stories) about real-world examples of OpenVINO™ usage. diff --git a/docs/scripts/create_mapping.py b/docs/scripts/create_mapping.py index 7fd0bbcb249..028154ee918 100644 --- a/docs/scripts/create_mapping.py +++ b/docs/scripts/create_mapping.py @@ -11,6 +11,8 @@ REPOSITORIES = [ 'openvino', 'omz', 'pot' + 'ovms', + 'ote' ] diff --git a/docs/scripts/doxy_md_filter.py b/docs/scripts/doxy_md_filter.py index 59219ed6b2e..12395b8833c 100644 --- a/docs/scripts/doxy_md_filter.py +++ b/docs/scripts/doxy_md_filter.py @@ -65,12 +65,19 @@ class DoxyMDFilter: """ for link in self.md_links: link_path = self.parent_folder.joinpath(link).resolve() - if os.path.exists(link_path): + if os.path.exists(link_path) and link_path in self.file_to_label_mapping: self.content = self.content.replace(link, '@ref ' + self.file_to_label_mapping[link_path]) else: rel_path = os.path.relpath(link_path, self.input_dir).replace('\\', '/') self.content = self.content.replace(link, rel_path) + def remove_comment_block_sphinxdirective(self): + """ + Remove comment blocks from `sphinxdirective` + """ + self.content = re.sub(r'\<\!\-\-\s*?\@sphinxdirective', '@sphinxdirective', self.content) + self.content = re.sub(r'\@endsphinxdirective\s*?\-\-\>', '@endsphinxdirective', self.content) + def copy_images(self): """ Go through image links and copy them into output_folder @@ -97,6 +104,7 @@ class DoxyMDFilter: Do all processing operations on a markdown file """ self.replace_image_links() + self.remove_comment_block_sphinxdirective() self.replace_md_links() self.copy_markdown() self.copy_images() diff --git a/docs/scripts/tests/conftest.py b/docs/scripts/tests/conftest.py index 465ba94ccf6..dd4999af356 100644 --- a/docs/scripts/tests/conftest.py +++ b/docs/scripts/tests/conftest.py @@ -72,6 +72,11 @@ def pytest_addoption(parser): action="store_true", default=False, help='Include link check for ovms') + parser.addoption( + '--include_ote', + action="store_true", + default=False, + help='Include link check for ote') def read_lists(configs): @@ -90,7 +95,7 @@ def read_lists(configs): def pytest_generate_tests(metafunc): """ Generate tests depending on command line options """ - exclude_links = {'open_model_zoo', 'workbench', 'pot', 'gst', 'omz', 'ovms'} + exclude_links = {'open_model_zoo', 'workbench', 'pot', 'gst', 'omz', 'ovms', 'ote'} if metafunc.config.getoption('include_omz'): exclude_links.remove('open_model_zoo') exclude_links.remove('omz') @@ -102,6 +107,8 @@ def pytest_generate_tests(metafunc): exclude_links.remove('gst') if metafunc.config.getoption('include_ovms'): exclude_links.remove('ovms') + if metafunc.config.getoption('include_ote'): + exclude_links.remove('ote') # warnings to ignore suppress_warnings = read_lists(metafunc.config.getoption('suppress_warnings')) diff --git a/docs/security_guide/workbench.md b/docs/security_guide/workbench.md index cfcbdc56b99..3760277a958 100644 --- a/docs/security_guide/workbench.md +++ b/docs/security_guide/workbench.md @@ -5,18 +5,14 @@ Deep Learning Workbench (DL Workbench) is a web application running within a Doc ## Run DL Workbench Unless necessary, limit the connections to the DL Workbench to `localhost` (127.0.0.1), so that it -is only accessible from the machine the Docker container is built on: +is only accessible from the machine the Docker container is built on. -* The script [starting the DL Workbench from the - package](@ref workbench_docs_Workbench_DG_Install_from_Package) ensures that the container and the web - application are accessible only from the `localhost` by default. - -* When using `docker run` to [start the DL Workbench from Docker - Hub](@ref workbench_docs_Workbench_DG_Run_Locally), limit connections for the host IP 127.0.0.1. - For example, limit the connections for the host IP to the port `5665` with the `-p - 127.0.0.1:5665:5665` command . Refer to [Container - networking](https://docs.docker.com/config/containers/container-networking/#published-ports) for - details. +When using `docker run` to [start the DL Workbench from Docker +Hub](@ref workbench_docs_Workbench_DG_Run_Locally), limit connections for the host IP 127.0.0.1. +For example, limit the connections for the host IP to the port `5665` with the `-p +127.0.0.1:5665:5665` command . Refer to [Container +networking](https://docs.docker.com/config/containers/container-networking/#published-ports) for +details. ## Authentication Security diff --git a/docs/snippets/Bfloat16Inference0.cpp b/docs/snippets/Bfloat16Inference0.cpp deleted file mode 100644 index a5b91778e6b..00000000000 --- a/docs/snippets/Bfloat16Inference0.cpp +++ /dev/null @@ -1,10 +0,0 @@ -#include - -int main() { -using namespace InferenceEngine; -//! [part0] -InferenceEngine::Core core; -auto cpuOptimizationCapabilities = core.GetMetric("CPU", METRIC_KEY(OPTIMIZATION_CAPABILITIES)).as>(); -//! [part0] -return 0; -} diff --git a/docs/snippets/Bfloat16Inference1.cpp b/docs/snippets/Bfloat16Inference1.cpp deleted file mode 100644 index c3dbd23e1cd..00000000000 --- a/docs/snippets/Bfloat16Inference1.cpp +++ /dev/null @@ -1,13 +0,0 @@ -#include - -int main() { -using namespace InferenceEngine; -//! [part1] -InferenceEngine::Core core; -auto network = core.ReadNetwork("sample.xml"); -auto exeNetwork = core.LoadNetwork(network, "CPU"); -auto enforceBF16 = exeNetwork.GetConfig(PluginConfigParams::KEY_ENFORCE_BF16).as(); -//! [part1] - -return 0; -} diff --git a/docs/snippets/Bfloat16Inference2.cpp b/docs/snippets/Bfloat16Inference2.cpp deleted file mode 100644 index 2d231830677..00000000000 --- a/docs/snippets/Bfloat16Inference2.cpp +++ /dev/null @@ -1,11 +0,0 @@ -#include - -int main() { -using namespace InferenceEngine; -//! [part2] -InferenceEngine::Core core; -core.SetConfig({ { CONFIG_KEY(ENFORCE_BF16), CONFIG_VALUE(NO) } }, "CPU"); -//! [part2] - -return 0; -} diff --git a/docs/snippets/CMakeLists.txt b/docs/snippets/CMakeLists.txt index 2e4ef05c1a7..b9908896c8b 100644 --- a/docs/snippets/CMakeLists.txt +++ b/docs/snippets/CMakeLists.txt @@ -4,15 +4,55 @@ set(TARGET_NAME ie_docs_snippets) -file(GLOB SOURCES "${CMAKE_CURRENT_SOURCE_DIR}/*.cpp") +file(GLOB SOURCES "${CMAKE_CURRENT_SOURCE_DIR}/*.cpp" + "${CMAKE_CURRENT_SOURCE_DIR}/gpu/*.cpp") -# remove OpenCL related sources -# TODO: fix compilation of OpenCL files -if(NOT CLDNN__IOCL_ICD_INCDIRS OR TRUE) - list(REMOVE_ITEM SOURCES "${CMAKE_CURRENT_SOURCE_DIR}/GPU_RemoteBlob_API0.cpp" - "${CMAKE_CURRENT_SOURCE_DIR}/GPU_RemoteBlob_API1.cpp" - "${CMAKE_CURRENT_SOURCE_DIR}/GPU_RemoteBlob_API2.cpp" - "${CMAKE_CURRENT_SOURCE_DIR}/GPU_RemoteBlob_API3.cpp") +find_package(OpenCL) +find_path(OpenCL_HPP_INCLUDE_DIR + NAMES + CL/cl2.hpp OpenCL/cl2.hpp + HINTS + ${opencl_root_hints} + ENV "PROGRAMFILES(X86)" + ENV AMDAPPSDKROOT + ENV INTELOCLSDKROOT + ENV NVSDKCOMPUTE_ROOT + ENV CUDA_PATH + ENV ATISTREAMSDKROOT + ENV OCL_ROOT + PATH_SUFFIXES + include + OpenCL/common/inc + "AMD APP/include") + +if(TARGET OpenCL) + # Use OpenCL CPP headers from sources if present + set(OpenCL_HEADERS ${OPENCL_HEADERS_DIR}) + set(OpenCL_LIB "OpenCL") +elseif(OpenCL_HPP_INCLUDE_DIR) + # Append OpenCL CPP headers to C headers and use both + set(OpenCL_HEADERS ${OpenCL_INCLUDE_DIR} ${OpenCL_HPP_INCLUDE_DIR}) + set(OpenCL_LIB "OpenCL::OpenCL") +endif() + +# remove GPU remote snippets if OCL hasn't been found +if (NOT (OpenCL_FOUND AND OpenCL_HEADERS)) + list(REMOVE_ITEM SOURCES "${CMAKE_CURRENT_SOURCE_DIR}/gpu/context_sharing_va.cpp" + "${CMAKE_CURRENT_SOURCE_DIR}/gpu/context_sharing.cpp" + "${CMAKE_CURRENT_SOURCE_DIR}/gpu/preprocessing.cpp" + "${CMAKE_CURRENT_SOURCE_DIR}/gpu/queue_sharing.cpp" + "${CMAKE_CURRENT_SOURCE_DIR}/gpu/remote_objects_creation.cpp") +endif() + +# try to find VA libraries +find_package(PkgConfig QUIET) +if(PkgConfig_FOUND) + pkg_search_module(LIBVA QUIET libva) +endif() + +# TODO: pkg_search_module finds libva not in sysroot +if(ANDROID) + set(LIBVA_FOUND OFF CACHE BOOL "" FORCE) endif() # remove OpenCV related sources @@ -35,9 +75,22 @@ list(REMOVE_ITEM SOURCES "${CMAKE_CURRENT_SOURCE_DIR}/dldt_optimization_guide2.c # create a static library add_library(${TARGET_NAME} STATIC ${SOURCES}) +target_include_directories(${TARGET_NAME} PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/../template_extension/new/") -if(CLDNN__IOCL_ICD_INCDIRS) - target_include_directories(${TARGET_NAME} SYSTEM PRIVATE ${CLDNN__IOCL_ICD_INCDIRS}) +if(OpenCL_FOUND AND OpenCL_HEADERS) + target_link_libraries(${TARGET_NAME} PRIVATE ${OpenCL_LIB}) + target_include_directories(${TARGET_NAME} PRIVATE ${OpenCL_HEADERS}) + + if(LIBVA_FOUND) + target_compile_definitions(${TARGET_NAME} PRIVATE ENABLE_LIBVA) + target_include_directories(${TARGET_NAME} PRIVATE ${LIBVA_INCLUDE_DIRS}) + target_link_libraries(${TARGET_NAME} PRIVATE ${LIBVA_LINK_LIBRARIES}) + endif() + + if(WIN32) + target_compile_definitions(${TARGET_NAME} PRIVATE ENABLE_DX11) + target_link_libraries(${TARGET_NAME} PRIVATE d3d11 dxgi) + endif() endif() if(OpenCV_FOUND) @@ -57,3 +110,22 @@ if(NOT MSVC) endif() target_link_libraries(${TARGET_NAME} PRIVATE openvino::runtime openvino::runtime::dev) + +set(TARGET_NAME "ov_integration_snippet") +# [cmake:integration_example] +cmake_minimum_required(VERSION 3.10) +set(CMAKE_CXX_STANDARD 11) + + +find_package(OpenVINO REQUIRED) + +add_executable(${TARGET_NAME} src/main.cpp) + +target_link_libraries(${TARGET_NAME} PRIVATE openvino::runtime) +# [cmake:integration_example] +if(NOT MSVC) + target_compile_options(${TARGET_NAME} PRIVATE -Wno-unused-variable) + if(CMAKE_COMPILER_IS_GNUCXX) + target_compile_options(${TARGET_NAME} PRIVATE -Wno-unused-but-set-variable) + endif() +endif() diff --git a/docs/snippets/DynamicBatching.cpp b/docs/snippets/DynamicBatching.cpp deleted file mode 100644 index 4d5f24fed75..00000000000 --- a/docs/snippets/DynamicBatching.cpp +++ /dev/null @@ -1,41 +0,0 @@ -#include - -int main() { -int FLAGS_bl = 1; -auto imagesData = std::vector(2); -auto imagesData2 = std::vector(4); -//! [part0] -int dynBatchLimit = FLAGS_bl; //take dynamic batch limit from command line option - -// Read network model -InferenceEngine::Core core; -InferenceEngine::CNNNetwork network = core.ReadNetwork("sample.xml"); - - -// enable dynamic batching and prepare for setting max batch limit -const std::map dyn_config = -{ { InferenceEngine::PluginConfigParams::KEY_DYN_BATCH_ENABLED, InferenceEngine::PluginConfigParams::YES } }; -network.setBatchSize(dynBatchLimit); - -// create executable network and infer request -auto executable_network = core.LoadNetwork(network, "CPU", dyn_config); -auto infer_request = executable_network.CreateInferRequest(); - -// ... - -// process a set of images -// dynamically set batch size for subsequent Infer() calls of this request -size_t batchSize = imagesData.size(); -infer_request.SetBatch(batchSize); -infer_request.Infer(); - -// ... - -// process another set of images -batchSize = imagesData2.size(); -infer_request.SetBatch(batchSize); -infer_request.Infer(); -//! [part0] - -return 0; -} diff --git a/docs/snippets/GPU_Kernel.cpp b/docs/snippets/GPU_Kernel.cpp deleted file mode 100644 index 8b21a79dfe2..00000000000 --- a/docs/snippets/GPU_Kernel.cpp +++ /dev/null @@ -1,12 +0,0 @@ -#include - -int main() { -using namespace InferenceEngine; -//! [part0] -InferenceEngine::Core core; -// Load GPU Extensions -core.SetConfig({ { InferenceEngine::PluginConfigParams::KEY_CONFIG_FILE, "" } }, "GPU"); -//! [part0] - -return 0; -} diff --git a/docs/snippets/GPU_Metric0.cpp b/docs/snippets/GPU_Metric0.cpp deleted file mode 100644 index b143ff64631..00000000000 --- a/docs/snippets/GPU_Metric0.cpp +++ /dev/null @@ -1,12 +0,0 @@ -#include -#include - -int main() { -//! [part0] -ov::Core core; -auto model = core.read_model("sample.xml"); -auto compiledModel = core.compile_model(model, "GPU"); -std::map statistics_map = core.get_property("GPU", ov::intel_gpu::memory_statistics); -//! [part0] -return 0; -} diff --git a/docs/snippets/GPU_Metric1.cpp b/docs/snippets/GPU_Metric1.cpp deleted file mode 100644 index 64e959233e5..00000000000 --- a/docs/snippets/GPU_Metric1.cpp +++ /dev/null @@ -1,25 +0,0 @@ -#include -#include - -int main() { -//! [part1] -ov::Core core; -std::shared_ptr model = core.read_model("network.xml"); -uint32_t n_streams = 2; -int64_t available_device_mem_size = 3221225472; -ov::AnyMap options = { - ov::hint::model(model), // Required. Set the address of the target network. If this is not set, the MAX_BATCH_SIZE returns 1. - ov::num_streams(n_streams), // Optional. Set only when you want to estimate max batch size for a specific throughtput streams. Default is 1 or throughtput streams set by SetConfig. - ov::intel_gpu::hint::available_device_mem(available_device_mem_size) // Optional. Set only when you want to limit the available device mem size. -}; - -uint32_t max_batch_size = core.get_property("GPU", ov::max_batch_size, options); -//! [part1] -//! [part2] -// This is not entirely GPU-specific metric (so METRIC_KEY is used rather than GPU_METRIC_KEY below), -// but the GPU is the only device that supports that at the moment. -// For the GPU, the metric already accommodates limitation for the on-device memory that the MAX_BATCH_SIZE poses. -// so OPTIMAL_BATCH_SIZE is always less than MAX_BATCH_SIZE. Unlike the latter it is also aligned to the power of 2. -uint32_t optimal_batch_size = core.get_property("GPU", ov::optimal_batch_size, options); -//! [part2] -} diff --git a/docs/snippets/GPU_RemoteBlob_API0.cpp b/docs/snippets/GPU_RemoteBlob_API0.cpp deleted file mode 100644 index e376ef9e407..00000000000 --- a/docs/snippets/GPU_RemoteBlob_API0.cpp +++ /dev/null @@ -1,60 +0,0 @@ -#define CL_HPP_MINIMUM_OPENCL_VERSION 120 -#define CL_HPP_TARGET_OPENCL_VERSION 120 - -#include -#include -#include - - -int main() { -using namespace InferenceEngine; -//! [part0] - - -// ... - - -// initialize the core and load the network -InferenceEngine::Core ie; -auto net = ie.ReadNetwork("network.xml"); -auto exec_net = ie.LoadNetwork(net, "GPU"); - - -// obtain the RemoteContext pointer from the executable network object -auto cldnn_context = exec_net.GetContext(); -// obtain the OpenCL context handle from the RemoteContext, -// get device info and create a queue -cl::Context ctx = std::dynamic_pointer_cast(cldnn_context); -_device = cl::Device(_context.getInfo()[0].get(), true); -cl::CommandQueue _queue; -cl_command_queue_properties props = CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE; -_queue = cl::CommandQueue(_context, _device, props); - - -// create the OpenCL buffer within the obtained context -cl::Buffer shared_buffer(ctx, CL_MEM_READ_WRITE, image_size * num_channels, NULL, &err); -// wrap the buffer into RemoteBlob -auto shared_blob = gpu::make_shared_blob(input_info->getTensorDesc(), cldnn_context, shared_buffer); - - -// ... -// execute user kernel -cl::Kernel kernel(program, kernelName.c_str()); -kernel.setArg(0, shared_buffer); -queue.enqueueNDRangeKernel(kernel, - cl::NDRange(0), - cl::NDRange(image_size), - cl::NDRange(1), - 0, // wait events * - &profileEvent); -queue.finish(); -// ... - - -// pass results to the inference -inf_req_shared.SetBlob(input_name, shared_blob); -inf_req_shared.Infer(); -//! [part0] - -return 0; -} diff --git a/docs/snippets/GPU_RemoteBlob_API1.cpp b/docs/snippets/GPU_RemoteBlob_API1.cpp deleted file mode 100644 index bcead81e791..00000000000 --- a/docs/snippets/GPU_RemoteBlob_API1.cpp +++ /dev/null @@ -1,32 +0,0 @@ -#define CL_HPP_MINIMUM_OPENCL_VERSION 120 -#define CL_HPP_TARGET_OPENCL_VERSION 120 - -#include -#include -#include - - -int main() { -using namespace InferenceEngine; -//! [part1] - -// ... - -cl::Context ctx = get_my_OpenCL_context(); - -// share the context with GPU plugin and compile ExecutableNetwork -auto remote_context = gpu::make_shared_context(ie, "GPU", ocl_instance->_context.get()); -auto exec_net_shared = ie.LoadNetwork(net, remote_context); -auto inf_req_shared = exec_net_shared.CreateInferRequest(); - - -// ... -// do OpenCL processing stuff -// ... - -// run the inference -inf_req_shared.Infer(); -//! [part1] - -return 0; -} diff --git a/docs/snippets/GPU_RemoteBlob_API2.cpp b/docs/snippets/GPU_RemoteBlob_API2.cpp deleted file mode 100644 index 13597ae4561..00000000000 --- a/docs/snippets/GPU_RemoteBlob_API2.cpp +++ /dev/null @@ -1,54 +0,0 @@ -#include -#include -#include - - -int main() { -using namespace InferenceEngine; -//! [part2] - -// ... - - -// initialize the objects -CNNNetwork network = ie.ReadNetwork(xmlFileName, binFileName); - - -// ... - - -auto inputInfoItem = *inputInfo.begin(); -inputInfoItem.second->setPrecision(Precision::U8); -inputInfoItem.second->setLayout(Layout::NCHW); -inputInfoItem.second->getPreProcess().setColorFormat(ColorFormat::NV12); - -VADisplay disp = get_VA_Device(); -// create the shared context object -auto shared_va_context = gpu::make_shared_context(ie, "GPU", disp); -// compile network within a shared context -ExecutableNetwork executable_network = ie.LoadNetwork(network, - shared_va_context, - { { GPUConfigParams::KEY_GPU_NV12_TWO_INPUTS, - PluginConfigParams::YES } }); - - -// decode/inference loop -for (int i = 0; i < nframes; i++) { -// ... - // execute decoding and obtain decoded surface handle - decoder.DecodeFrame(); - VASurfaceID va_surface = decoder.get_VA_output_surface(); -// ... - //wrap decoder output into RemoteBlobs and set it as inference input - auto nv12_blob = gpu::make_shared_blob_nv12(ieInHeight, - ieInWidth, - shared_va_context, - va_surface - ); - inferRequests[currentFrame].SetBlob(input_name, nv12_blob); - inferRequests[currentFrame].StartAsync(); - inferRequests[prevFrame].Wait(InferenceEngine::InferRequest::WaitMode::RESULT_READY); -} -//! [part2] -return 0; -} diff --git a/docs/snippets/GPU_RemoteBlob_API3.cpp b/docs/snippets/GPU_RemoteBlob_API3.cpp deleted file mode 100644 index 6e374425285..00000000000 --- a/docs/snippets/GPU_RemoteBlob_API3.cpp +++ /dev/null @@ -1,76 +0,0 @@ -#define CL_HPP_MINIMUM_OPENCL_VERSION 120 -#define CL_HPP_TARGET_OPENCL_VERSION 120 - -#include -#include -#include - - -int main() { -using namespace InferenceEngine; -//! [part0] - - -// ... - - -// initialize the core and read the network -InferenceEngine::Core ie; -auto net = ie.ReadNetwork("network.xml"); - -// initialize opencl context and create queue -cl::Context ctx = get_my_OpenCL_context(); -cl::CommandQueue queue = get_my_OpenCL_queue(); - -// share the queue with GPU plugin and compile ExecutableNetwork -auto remote_context = gpu::make_shared_context(ie, "GPU", queue.get()); -auto exec_net_shared = ie.LoadNetwork(net, remote_context); - -// create the OpenCL buffers within the context -cl::Buffer shared_in_buffer(ctx, CL_MEM_READ_WRITE, image_size * num_channels, NULL, &err); -cl::Buffer shared_out_buffer(ctx, CL_MEM_READ_WRITE, image_size * num_channels, NULL, &err); -// wrap in and out buffers into RemoteBlob and set them to infer request -auto shared_in_blob = gpu::make_shared_blob(input_info->getTensorDesc(), remote_context, shared_in_buffer); -auto shared_out_blob = gpu::make_shared_blob(out_data->getTensorDesc(), remote_context, shared_out_buffer); -auto infer_request = exec_net_shared.CreateInferRequest(); -infer_request.SetBlob(input_name, shared_in_blob); -infer_request.SetBlob(output_name, shared_out_blob); - -// ... -// execute user kernel -cl::Kernel kernel_preproc(program, kernel_name_preproc.c_str()); -kernel_preproc.setArg(0, shared_in_buffer); -queue.enqueueNDRangeKernel(kernel_preproc, - cl::NDRange(0), - cl::NDRange(image_size), - cl::NDRange(1), - nullptr, // wait events * - &profileEvent); -// Blocking clFinish() call is not required, but this barrier is added to the queue to guarantee that user kernel is finished -// before any inference primitive is started -queue.enqueueBarrierWithWaitList(nullptr, nullptr); -// ... - -// pass results to the inference -// since the remote context is created with queue sharing, StartAsync() guarantees that scheduling is finished -infer_request.StartAsync(); - -// execute some postprocessing kernel. -// infer_request.Wait() is not called, synchonization between inference and post-processing is done via -// enqueueBarrierWithWaitList call. -cl::Kernel kernel_postproc(program, kernel_name_postproc.c_str()); -kernel_postproc.setArg(0, shared_out_buffer); -queue.enqueueBarrierWithWaitList(nullptr, nullptr); -queue.enqueueNDRangeKernel(kernel_postproc, - cl::NDRange(0), - cl::NDRange(image_size), - cl::NDRange(1), - nullptr, // wait events * - &profileEvent); - -// Wait for pipeline completion -queue.finish(); -//! [part0] - -return 0; -} diff --git a/docs/snippets/InferenceEngine_Caching0.cpp b/docs/snippets/InferenceEngine_Caching0.cpp deleted file mode 100644 index 5311a3d0bb6..00000000000 --- a/docs/snippets/InferenceEngine_Caching0.cpp +++ /dev/null @@ -1,17 +0,0 @@ -#include - -int main() { -using namespace InferenceEngine; - std::string modelPath = "/tmp/myModel.xml"; - std::string device = "GNA"; - std::map deviceConfig; -//! [part0] - InferenceEngine::Core ie; // Step 1: create Inference engine object - ie.SetConfig({{CONFIG_KEY(CACHE_DIR), "myCacheFolder"}}); // Step 1b: Enable caching - auto cnnNet = ie.ReadNetwork(modelPath); // Step 2: ReadNetwork - //... // Step 3: Prepare inputs/outputs - //... // Step 4: Set device configuration - ie.LoadNetwork(cnnNet, device, deviceConfig); // Step 5: LoadNetwork -//! [part0] -return 0; -} diff --git a/docs/snippets/InferenceEngine_Caching1.cpp b/docs/snippets/InferenceEngine_Caching1.cpp deleted file mode 100644 index 3c9d0c5b22d..00000000000 --- a/docs/snippets/InferenceEngine_Caching1.cpp +++ /dev/null @@ -1,13 +0,0 @@ -#include - -int main() { -using namespace InferenceEngine; - std::string modelPath = "/tmp/myModel.xml"; - std::string device = "GNA"; - std::map deviceConfig; -//! [part1] - InferenceEngine::Core ie; // Step 1: create Inference engine object - ie.LoadNetwork(modelPath, device, deviceConfig); // Step 2: LoadNetwork by model file path -//! [part1] -return 0; -} diff --git a/docs/snippets/InferenceEngine_Caching2.cpp b/docs/snippets/InferenceEngine_Caching2.cpp deleted file mode 100644 index aaf4b33c10d..00000000000 --- a/docs/snippets/InferenceEngine_Caching2.cpp +++ /dev/null @@ -1,14 +0,0 @@ -#include - -int main() { -using namespace InferenceEngine; - std::string modelPath = "/tmp/myModel.xml"; - std::string device = "GNA"; - std::map deviceConfig; -//! [part2] - InferenceEngine::Core ie; // Step 1: create Inference engine object - ie.SetConfig({{CONFIG_KEY(CACHE_DIR), "myCacheFolder"}}); // Step 1b: Enable caching - ie.LoadNetwork(modelPath, device, deviceConfig); // Step 2: LoadNetwork by model file path -//! [part2] -return 0; -} diff --git a/docs/snippets/InferenceEngine_Caching3.cpp b/docs/snippets/InferenceEngine_Caching3.cpp deleted file mode 100644 index db6cd89e5c6..00000000000 --- a/docs/snippets/InferenceEngine_Caching3.cpp +++ /dev/null @@ -1,20 +0,0 @@ -#include - -int main() { -using namespace InferenceEngine; - std::string modelPath = "/tmp/myModel.xml"; - std::string deviceName = "GNA"; - std::map deviceConfig; - InferenceEngine::Core ie; -//! [part3] - // Get list of supported metrics - std::vector keys = ie.GetMetric(deviceName, METRIC_KEY(SUPPORTED_METRICS)); - - // Find 'IMPORT_EXPORT_SUPPORT' metric in supported metrics - auto it = std::find(keys.begin(), keys.end(), METRIC_KEY(IMPORT_EXPORT_SUPPORT)); - - // If metric 'IMPORT_EXPORT_SUPPORT' exists, check it's value - auto cachingSupported = (it != keys.end()) && ie.GetMetric(deviceName, METRIC_KEY(IMPORT_EXPORT_SUPPORT)).as(); -//! [part3] - return 0; -} diff --git a/docs/snippets/Integrate_with_customer_application_new_API.cpp b/docs/snippets/Integrate_with_customer_application_new_API.cpp deleted file mode 100644 index 3bee3d4df4c..00000000000 --- a/docs/snippets/Integrate_with_customer_application_new_API.cpp +++ /dev/null @@ -1,133 +0,0 @@ -#include - -int main() { -const std::string output_name = "output_name"; -const std::string input_name = "input_name"; -//! [part0] -InferenceEngine::Core core; -InferenceEngine::CNNNetwork network; -InferenceEngine::ExecutableNetwork executable_network; -//! [part0] - -//! [part1] -network = core.ReadNetwork("Model.xml"); -//! [part1] - -//! [part2] -network = core.ReadNetwork("model.onnx"); -//! [part2] - -//! [part3] -/** Take information about all topology inputs **/ -InferenceEngine::InputsDataMap input_info = network.getInputsInfo(); -/** Take information about all topology outputs **/ -InferenceEngine::OutputsDataMap output_info = network.getOutputsInfo(); -//! [part3] - -//! [part4] -/** Iterate over all input info**/ -for (auto &item : input_info) { - auto input_data = item.second; - input_data->setPrecision(InferenceEngine::Precision::U8); - input_data->setLayout(InferenceEngine::Layout::NCHW); - input_data->getPreProcess().setResizeAlgorithm(InferenceEngine::RESIZE_BILINEAR); - input_data->getPreProcess().setColorFormat(InferenceEngine::ColorFormat::RGB); -} -/** Iterate over all output info**/ -for (auto &item : output_info) { - auto output_data = item.second; - output_data->setPrecision(InferenceEngine::Precision::FP32); - output_data->setLayout(InferenceEngine::Layout::NC); -} -//! [part4] - -//! [part5] -executable_network = core.LoadNetwork(network, "CPU"); -//! [part5] - -//! [part6] -/** Optional config. E.g. this enables profiling of performance counters. **/ -std::map config = {{ InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES }}; -executable_network = core.LoadNetwork(network, "CPU", config); -//! [part6] - -//! [part7] -auto infer_request = executable_network.CreateInferRequest(); -//! [part7] - -auto infer_request1 = executable_network.CreateInferRequest(); -auto infer_request2 = executable_network.CreateInferRequest(); - -//! [part8] -/** Iterate over all input blobs **/ -for (auto & item : input_info) { - auto input_name = item.first; - /** Get input blob **/ - auto input = infer_request.GetBlob(input_name); - /** Fill input tensor with planes. First b channel, then g and r channels **/ -// ... -} -//! [part8] - -//! [part9] -auto output = infer_request1.GetBlob(output_name); -infer_request2.SetBlob(input_name, output); -//! [part9] - -//! [part10] -/** inputBlob points to input of a previous network and - cropROI contains coordinates of output bounding box **/ -InferenceEngine::Blob::Ptr inputBlob; -InferenceEngine::ROI cropRoi; -//... - -/** roiBlob uses shared memory of inputBlob and describes cropROI - according to its coordinates **/ -auto roiBlob = InferenceEngine::make_shared_blob(inputBlob, cropRoi); -infer_request2.SetBlob(input_name, roiBlob); -//! [part10] - -//! [part11] -/** Iterate over all input blobs **/ -for (auto & item : input_info) { - auto input_data = item.second; - /** Create input blob **/ - InferenceEngine::TBlob::Ptr input; - // assuming input precision was asked to be U8 in prev step - input = InferenceEngine::make_shared_blob( - InferenceEngine::TensorDesc(InferenceEngine::Precision::U8, input_data->getTensorDesc().getDims(), - input_data->getTensorDesc().getLayout())); - input->allocate(); - infer_request.SetBlob(item.first, input); - - /** Fill input tensor with planes. First b channel, then g and r channels **/ -// ... -} -//! [part11] - -//! [part12] -infer_request.StartAsync(); -infer_request.Wait(InferenceEngine::InferRequest::WaitMode::RESULT_READY); -//! [part12] - -auto sync_infer_request = executable_network.CreateInferRequest(); - -//! [part13] -sync_infer_request.Infer(); -//! [part13] - -//! [part14] - for (auto &item : output_info) { - auto output_name = item.first; - auto output = infer_request.GetBlob(output_name); - { - auto const memLocker = output->cbuffer(); // use const memory locker - // output_buffer is valid as long as the lifetime of memLocker - const float *output_buffer = memLocker.as(); - /** output_buffer[] - accessing output blob data **/ - } - } -//! [part14] - -return 0; -} diff --git a/docs/snippets/MULTI0.cpp b/docs/snippets/MULTI0.cpp index 05146a6c945..12f5e26c0c0 100644 --- a/docs/snippets/MULTI0.cpp +++ b/docs/snippets/MULTI0.cpp @@ -1,18 +1,18 @@ -#include +#include int main() { -using namespace InferenceEngine; //! [part0] - Core ie; - auto network = ie.ReadNetwork("sample.xml"); - //NEW IE-CENTRIC API, the "MULTI" plugin is (globally) pre-configured with the explicit option: - ie.SetConfig({{"MULTI_DEVICE_PRIORITIES", "HDDL,GPU"}}, "MULTI"); - ExecutableNetwork exec0 = ie.LoadNetwork(network, "MULTI", {}); +ov::Core core; +std::shared_ptr model = core.read_model("sample.xml"); +// the "MULTI" plugin is (globally) pre-configured with the explicit option: +core.set_property("MULTI", ov::device::priorities("HDDL,GPU")); +ov::CompiledModel compileModel0 = core.compile_model(model, "MULTI"); - //NEW IE-CENTRIC API, configuration of the "MULTI" is part of the network configuration (and hence specific to the network): - ExecutableNetwork exec1 = ie.LoadNetwork(network, "MULTI", {{"MULTI_DEVICE_PRIORITIES", "HDDL,GPU"}}); - //NEW IE-CENTRIC API, same as previous, but configuration of the "MULTI" is part of the name (so config is empty), also network-specific: - ExecutableNetwork exec2 = ie.LoadNetwork(network, "MULTI:HDDL,GPU", {}); +// configuration of the "MULTI" is part of the network configuration (and hence specific to the network): +ov::CompiledModel compileModel1 = core.compile_model(model, "MULTI", ov::device::priorities("HDDL,GPU")); + +// same as previous, but configuration of the "MULTI" is part of the name (so config is empty), also network-specific: +ov::CompiledModel compileModel2 = core.compile_model(model, "MULTI:HDDL,GPU"); //! [part0] return 0; } diff --git a/docs/snippets/MULTI1.cpp b/docs/snippets/MULTI1.cpp index 60ea4ac5282..904662de2a0 100644 --- a/docs/snippets/MULTI1.cpp +++ b/docs/snippets/MULTI1.cpp @@ -1,22 +1,21 @@ -#include +#include int main() { -using namespace InferenceEngine; //! [part1] - Core ie; - auto network = ie.ReadNetwork("sample.xml"); - ExecutableNetwork exec = ie.LoadNetwork(network, "MULTI:HDDL,GPU", {}); - //... - exec.SetConfig({{"MULTI_DEVICE_PRIORITIES", "GPU,HDDL"}}); - // you can even exclude some device - exec.SetConfig({{"MULTI_DEVICE_PRIORITIES", "GPU"}}); - //... - // and then return it back - exec.SetConfig({{"MULTI_DEVICE_PRIORITIES", "GPU,HDDL"}}); - //but you cannot add new devices on the fly, the next line will trigger the following exception: - //[ ERROR ] [NOT_FOUND] You can only change device priorities but not add new devices with the Network's SetConfig(MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES. - //CPU device was not in the original device list! - exec.SetConfig({{"MULTI_DEVICE_PRIORITIES", "CPU,GPU,HDDL"}}); +ov::Core core; +std::shared_ptr model = core.read_model("sample.xml"); +ov::CompiledModel compileModel = core.compile_model(model, "MULTI:HDDL,GPU"); +//... +compileModel.set_property(ov::device::priorities("GPU,HDDL")); +// you can even exclude some device +compileModel.set_property(ov::device::priorities("GPU")); +//... +// and then return it back +compileModel.set_property(ov::device::priorities("GPU,HDDL")); +//but you cannot add new devices on the fly, the next line will trigger the following exception: +//[ ERROR ] [NOT_FOUND] You can only change device priorities but not add new devices with the Network's SetConfig(MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES. +//CPU device was not in the original device list! +compileModel.set_property(ov::device::priorities("CPU,GPU,HDDL")); //! [part1] return 0; } diff --git a/docs/snippets/MULTI2.cpp b/docs/snippets/MULTI2.cpp index d42f42e10b0..674b23d7fd2 100644 --- a/docs/snippets/MULTI2.cpp +++ b/docs/snippets/MULTI2.cpp @@ -1,17 +1,16 @@ -#include +#include int main() { -using namespace InferenceEngine; //! [part2] - Core ie; - auto cnnNetwork = ie.ReadNetwork("sample.xml"); - std::string allDevices = "MULTI:"; - std::vector availableDevices = ie.GetAvailableDevices(); - for (auto && device : availableDevices) { - allDevices += device; - allDevices += ((device == availableDevices[availableDevices.size()-1]) ? "" : ","); - } - ExecutableNetwork exeNetwork = ie.LoadNetwork(cnnNetwork, allDevices, {}); +ov::Core core; +std::shared_ptr model = core.read_model("sample.xml"); +std::string allDevices = "MULTI:"; +std::vector availableDevices = core.get_available_devices(); +for (auto && device : availableDevices) { + allDevices += device; + allDevices += ((device == availableDevices[availableDevices.size()-1]) ? "" : ","); +} +ov::CompiledModel compileModel = core.compile_model(model, allDevices); //! [part2] return 0; } diff --git a/docs/snippets/MULTI3.cpp b/docs/snippets/MULTI3.cpp index 98de44b1e22..30361308c76 100644 --- a/docs/snippets/MULTI3.cpp +++ b/docs/snippets/MULTI3.cpp @@ -1,17 +1,18 @@ -#include +#include int main() { //! [part3] - InferenceEngine::Core ie; - auto cnnNetwork = ie.ReadNetwork("sample.xml"); - std::string allDevices = "MULTI:"; - std::vector myriadDevices = ie.GetMetric("MYRIAD", METRIC_KEY(AVAILABLE_DEVICES)); - for (size_t i = 0; i < myriadDevices.size(); ++i) { - allDevices += std::string("MYRIAD.") - + myriadDevices[i] - + std::string(i < (myriadDevices.size() -1) ? "," : ""); - } - InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(cnnNetwork, allDevices, {}); +ov::Core core; +std::shared_ptr model = core.read_model("sample.xml"); +std::string allDevices = "MULTI:"; +std::vector myriadDevices = core.get_property("MYRIAD", ov::available_devices); +for (size_t i = 0; i < myriadDevices.size(); ++i) { + allDevices += std::string("MYRIAD.") + + myriadDevices[i] + + std::string(i < (myriadDevices.size() -1) ? "," : ""); +} + +ov::CompiledModel compileModel = core.compile_model(model, allDevices); //! [part3] return 0; } diff --git a/docs/snippets/MULTI4.cpp b/docs/snippets/MULTI4.cpp index 40501250b23..b88f4fb6687 100644 --- a/docs/snippets/MULTI4.cpp +++ b/docs/snippets/MULTI4.cpp @@ -1,19 +1,24 @@ -#include +#include int main() { -const std::map hddl_config = { { InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES } }; -const std::map gpu_config = { { InferenceEngine::PluginConfigParams::KEY_PERF_COUNT, InferenceEngine::PluginConfigParams::YES } }; +ov::AnyMap hddl_config = {{ov::enable_profiling(true)}}; +ov::AnyMap gpu_config = {{ov::enable_profiling(true)}}; //! [part4] // configure the HDDL device first -InferenceEngine::Core ie; -InferenceEngine::CNNNetwork cnnNetwork = ie.ReadNetwork("sample.xml"); -ie.SetConfig(hddl_config, "HDDL"); -// configure the GPU device -ie.SetConfig(gpu_config, "GPU"); -// load the network to the multi-device, while specifying the configuration (devices along with priorities): -InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(cnnNetwork, "MULTI", {{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES, "HDDL,GPU"}}); -// new metric allows to query the optimal number of requests: -uint32_t nireq = exeNetwork.GetMetric(METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS)).as(); +ov::Core core; +std::shared_ptr model = core.read_model("sample.xml"); +core.set_property({ov::device::properties("HDDL", hddl_config), + ov::device::properties("GPU", gpu_config)}); + +// compile the modle on the multi-device, while specifying the configuration (devices along with priorities +// and the configuration of devices): +ov::CompiledModel compileModel = core.compile_model(model, "MULTI", + ov::device::priorities("HDDL,GPU"), + ov::device::properties("HDDL", hddl_config), + ov::device::properties("GPU", gpu_config)); + +// query the optimal number of requests: +uint32_t nireq = compileModel.get_property(ov::optimal_number_of_infer_requests); //! [part4] return 0; } diff --git a/docs/snippets/MULTI5.cpp b/docs/snippets/MULTI5.cpp index 20b3015f8de..10dfc996a9e 100644 --- a/docs/snippets/MULTI5.cpp +++ b/docs/snippets/MULTI5.cpp @@ -1,15 +1,15 @@ -#include +#include int main() { -std::string device_name = "MULTI:HDDL,GPU"; -const std::map< std::string, std::string > full_config = {}; +const ov::AnyMap full_config = {}; //! [part5] -InferenceEngine::Core ie; -InferenceEngine::CNNNetwork cnnNetwork = ie.ReadNetwork("sample.xml"); +ov::Core core; +std::string device_name = "MULTI:HDDL,GPU"; +std::shared_ptr model = core.read_model("sample.xml"); // 'device_name' can be "MULTI:HDDL,GPU" to configure the multi-device to use HDDL and GPU -InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(cnnNetwork, device_name, full_config); -// new metric allows to query the optimal number of requests: -uint32_t nireq = exeNetwork.GetMetric(METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS)).as(); +ov::CompiledModel compileModel = core.compile_model(model, device_name, full_config); +// query the optimal number of requests: +uint32_t nireq = compileModel.get_property(ov::optimal_number_of_infer_requests); //! [part5] return 0; } diff --git a/docs/snippets/cpu/Bfloat16Inference0.cpp b/docs/snippets/cpu/Bfloat16Inference0.cpp new file mode 100644 index 00000000000..8df7ddaf3f7 --- /dev/null +++ b/docs/snippets/cpu/Bfloat16Inference0.cpp @@ -0,0 +1,9 @@ +#include + +int main() { +//! [part0] +ov::Core core; +auto cpuOptimizationCapabilities = core.get_property("CPU", ov::device::capabilities); +//! [part0] +return 0; +} diff --git a/docs/snippets/cpu/Bfloat16Inference1.cpp b/docs/snippets/cpu/Bfloat16Inference1.cpp new file mode 100644 index 00000000000..51850c6018d --- /dev/null +++ b/docs/snippets/cpu/Bfloat16Inference1.cpp @@ -0,0 +1,13 @@ +#include + +int main() { +using namespace InferenceEngine; +//! [part1] +ov::Core core; +auto network = core.read_model("sample.xml"); +auto exec_network = core.compile_model(network, "CPU"); +auto inference_precision = exec_network.get_property(ov::hint::inference_precision); +//! [part1] + +return 0; +} diff --git a/docs/snippets/cpu/Bfloat16Inference2.cpp b/docs/snippets/cpu/Bfloat16Inference2.cpp new file mode 100644 index 00000000000..c06a6491b89 --- /dev/null +++ b/docs/snippets/cpu/Bfloat16Inference2.cpp @@ -0,0 +1,11 @@ +#include + +int main() { +using namespace InferenceEngine; +//! [part2] +ov::Core core; +core.set_property("CPU", ov::hint::inference_precision(ov::element::f32)); +//! [part2] + +return 0; +} diff --git a/docs/snippets/cpu/compile_model.cpp b/docs/snippets/cpu/compile_model.cpp new file mode 100644 index 00000000000..be84b353cd7 --- /dev/null +++ b/docs/snippets/cpu/compile_model.cpp @@ -0,0 +1,20 @@ +#include + + +int main() { + { + //! [compile_model_default] + ov::Core core; + auto model = core.read_model("model.xml"); + auto compiled_model = core.compile_model(model, "CPU"); + //! [compile_model_default] + } + + { + //! [compile_model_multi] + ov::Core core; + auto model = core.read_model("model.xml"); + auto compiled_model = core.compile_model(model, "MULTI:CPU,GPU.0"); + //! [compile_model_multi] + } +} diff --git a/docs/snippets/cpu/dynamic_shape.cpp b/docs/snippets/cpu/dynamic_shape.cpp new file mode 100644 index 00000000000..74a0ac7637a --- /dev/null +++ b/docs/snippets/cpu/dynamic_shape.cpp @@ -0,0 +1,25 @@ +#include + + +int main() { + { + //! [defined_upper_bound] + ov::Core core; + auto model = core.read_model("model.xml"); + + model->reshape({{ov::Dimension(1, 10), ov::Dimension(1, 20), ov::Dimension(1, 30), ov::Dimension(1, 40)}}); + //! [defined_upper_bound] + } + + { + //! [static_shape] + ov::Core core; + auto model = core.read_model("model.xml"); + ov::Shape static_shape = {10, 20, 30, 40}; + + model->reshape(static_shape); + //! [static_shape] + } + + return 0; +} diff --git a/docs/snippets/dldt_optimization_guide9.cpp b/docs/snippets/dldt_optimization_guide9.cpp index dfd746c4b44..bdab20e7326 100644 --- a/docs/snippets/dldt_optimization_guide9.cpp +++ b/docs/snippets/dldt_optimization_guide9.cpp @@ -1,7 +1,6 @@ #include int main() { -using namespace InferenceEngine; //! [part9] while(true) { // capture frame diff --git a/docs/snippets/example_ngraph_utils.cpp b/docs/snippets/example_ngraph_utils.cpp index c6d32373551..6cf9faf21a4 100644 --- a/docs/snippets/example_ngraph_utils.cpp +++ b/docs/snippets/example_ngraph_utils.cpp @@ -20,115 +20,6 @@ #include // ! [ov:include] - -// ! [ov:create_simple_model] -std::shared_ptr create_simple_function() { - // This example shows how to create ov::Function - // - // Parameter--->Multiply--->Add--->Result - // Constant---' / - // Constant---' - - // Create opset8::Parameter operation with static shape - auto data = std::make_shared(ov::element::f32, ov::Shape{3, 1, 2}); - - auto mul_constant = ov::opset8::Constant::create(ov::element::f32, ov::Shape{1}, {1.5}); - auto mul = std::make_shared(data, mul_constant); - - auto add_constant = ov::opset8::Constant::create(ov::element::f32, ov::Shape{1}, {0.5}); - auto add = std::make_shared(mul, add_constant); - - // Create opset8::Result operation - auto res = std::make_shared(mul); - - // Create nGraph function - return std::make_shared(ov::ResultVector{res}, ov::ParameterVector{data}); -} -// ! [ov:create_simple_model] - -// ! [ov:create_advanced_model] -std::shared_ptr create_advanced_function() { - // Advanced example with multi output operation - // - // Parameter->Split---0-->Result - // | `--1-->Relu-->Result - // `----2-->Result - - auto data = std::make_shared(ov::element::f32, ov::Shape{1, 3, 64, 64}); - - // Create Constant for axis value - auto axis_const = ov::opset8::Constant::create(ov::element::i64, ov::Shape{}/*scalar shape*/, {1}); - - // Create opset8::Split operation that splits input to three slices across 1st dimension - auto split = std::make_shared(data, axis_const, 3); - - // Create opset8::Relu operation that takes 1st Split output as input - auto relu = std::make_shared(split->output(1)/*specify explicit output*/); - - // Results operations will be created automatically based on provided OutputVector - return std::make_shared(ov::OutputVector{split->output(0), relu, split->output(2)}, ov::ParameterVector{data}); -} -// ! [ov:create_advanced_model] - -void pattern_matcher_examples(std::shared_ptr node) { -{ -// ! [pattern:simple_example] -// Pattern example -auto input = std::make_shared(ov::element::i64, ov::Shape{1}); -auto shapeof = std::make_shared(input); - -// Create Matcher with Parameter->ShapeOf pattern -auto m = std::make_shared(shapeof, "MyPatternBasedTransformation"); -// ! [pattern:simple_example] - -// ! [pattern:callback_example] -ov::graph_rewrite_callback callback = [](ov::pass::pattern::Matcher& m) { - // Get root node - std::shared_ptr root_node = m.get_match_root(); - - // Get all nodes matched by pattern - ov::NodeVector nodes = m.get_matched_nodes(); - - // Transformation code - return false; -}; -// ! [pattern:callback_example] -} - -{ -// ! [pattern:label_example] -// Detect Multiply with arbitrary first input and second as Constant -// ov::pattern::op::Label - represent arbitrary input -auto input = ov::pass::pattern::any_input(); -auto value = ov::opset8::Constant::create(ov::element::f32, ov::Shape{1}, {0.5}); -auto mul = std::make_shared(input, value); -auto m = std::make_shared(mul, "MultiplyMatcher"); -// ! [pattern:label_example] -} - -{ -// ! [pattern:concat_example] -// Detect Concat operation with arbitrary number of inputs -auto concat = ov::pass::pattern::wrap_type(); -auto m = std::make_shared(concat, "ConcatMatcher"); -// ! [pattern:concat_example] -} - -{ -// ! [pattern:predicate_example] -// Detect Multiply->Add sequence where mul has exactly one consumer -auto mul = ov::pass::pattern::wrap_type(ov::pass::pattern::consumers_count(1)/*сheck consumers count*/); -auto add = ov::pass::pattern::wrap_type({mul, ov::pass::pattern::any_input()}); -auto m = std::make_shared(add, "MultiplyAddMatcher"); -// Matcher can be used to match pattern manually on given node -if (m->match(node->output(0))) { - // Successfully matched -} -// ! [pattern:predicate_example] -} - -} - bool ngraph_api_examples(std::shared_ptr node) { { // ! [ngraph:ports_example] @@ -154,16 +45,6 @@ auto consumers = output.get_target_inputs(); // ! [ngraph:ports_example] } -{ -// ! [ngraph:shape] -auto partial_shape = node->input(0).get_partial_shape(); // get zero input partial shape -if (partial_shape.is_dynamic() /* or !partial_shape.is_static() */) { - return false; -} -auto static_shape = partial_shape.get_shape(); -// ! [ngraph:shape] -} - { // ! [ngraph:shape_check] auto partial_shape = node->input(0).get_partial_shape(); // get zero input partial shape @@ -185,63 +66,6 @@ auto dim = partial_shape[1].get_length(); return true; } -// ! [ngraph:replace_node] -bool ngraph_replace_node(std::shared_ptr node) { - // Step 1. Verify that node has opset8::Negative type - auto neg = std::dynamic_pointer_cast(node); - if (!neg) { - return false; - } - - // Step 2. Create opset8::Multiply operation where the first input is negative operation input and second as Constant with -1 value - auto mul = std::make_shared(neg->input_value(0), - ov::opset8::Constant::create(neg->get_element_type(), ov::Shape{1}, {-1})); - - mul->set_friendly_name(neg->get_friendly_name()); - // TODO: Move to new API - ngraph::copy_runtime_info(neg, mul); - - // Step 3. Replace Negative operation with Multiply operation - ov::replace_node(neg, mul); - return true; - - // Step 4. Negative operation will be removed automatically because all consumers was moved to Multiply operation -} -// ! [ngraph:replace_node] - -// ! [ngraph:insert_node] -// Step 1. Lets suppose that we have a node with single output port and we want to insert additional operation new_node after it -void insert_example(std::shared_ptr node) { - // Get all consumers for node - auto consumers = node->output(0).get_target_inputs(); - - // Step 2. Create new node. Let it be opset1::Relu. - auto new_node = std::make_shared(node); - - // Step 3. Reconnect all consumers to new_node - for (auto input : consumers) { - input.replace_source_output(new_node); - } -} -// ! [ngraph:insert_node] - -// ! [ngraph:insert_node_with_copy] -void insert_example_with_copy(std::shared_ptr node) { - // Make a node copy - auto node_copy = node->clone_with_new_inputs(node->input_values()); - // Create new node - auto new_node = std::make_shared(node_copy); - ov::replace_node(node, new_node); -} -// ! [ngraph:insert_node_with_copy] - -void eliminate_example(std::shared_ptr node) { -// ! [ngraph:eliminate_node] -// Suppose we have a node that we want to remove -bool success = replace_output_update_name(node->output(0), node->input_value(0)); -// ! [ngraph:eliminate_node] -} - // ! [ov:serialize] void serialize_example(std::shared_ptr f) { ov::pass::Manager manager; diff --git a/docs/snippets/gna/configure.cpp b/docs/snippets/gna/configure.cpp new file mode 100644 index 00000000000..2738c511143 --- /dev/null +++ b/docs/snippets/gna/configure.cpp @@ -0,0 +1,17 @@ +// Copyright (C) 2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// +//! [include] +#include +#include +//! [include] + +int main() { +const std::string model_path = "model.xml"; +//! [ov_gna_exec_mode_hw_with_sw_fback] +ov::Core core; +auto model = core.read_model(model_path); +auto compiled_model = core.compile_model(model, "GNA", + ov::intel_gna::execution_mode(ov::intel_gna::ExecutionMode::HW_WITH_SW_FBACK)); +//! [ov_gna_exec_mode_hw_with_sw_fback] +} \ No newline at end of file diff --git a/docs/snippets/gna/configure.py b/docs/snippets/gna/configure.py new file mode 100644 index 00000000000..6f0ba675665 --- /dev/null +++ b/docs/snippets/gna/configure.py @@ -0,0 +1,15 @@ +# Copyright (C) 2022 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +#! [import] +from openvino.runtime import Core +#! [import] + +model_path = "model.xml" + +#! [ov_gna_exec_mode_hw_with_sw_fback] +core = Core() +model = core.read_model(model=model_path) +compiled_model = core.compile_model(model, device_name="GNA", + config={ 'GNA_DEVICE_MODE' : 'GNA_HW_WITH_SW_FBACK'}) +#! [ov_gna_exec_mode_hw_with_sw_fback] \ No newline at end of file diff --git a/docs/snippets/gna/import_export.cpp b/docs/snippets/gna/import_export.cpp new file mode 100644 index 00000000000..4677364892c --- /dev/null +++ b/docs/snippets/gna/import_export.cpp @@ -0,0 +1,29 @@ +// Copyright (C) 2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// +//! [include] +#include +#include +//! [include] + +int main() { +const std::string model_path = "model.xml"; +const std::string blob_path = "compiled_model.blob"; + +ov::Core core; +auto model = core.read_model(model_path); +auto compiled_model = core.compile_model(model, "GNA"); + +{ +//! [ov_gna_export] +std::ofstream ofs(blob_path, std::ios_base::binary | std::ios::out); +compiled_model.export_model(ofs); +//! [ov_gna_export] +} +{ +//! [ov_gna_import] +std::ifstream ifs(blob_path, std::ios_base::binary | std::ios_base::in); +auto compiled_model = core.import_model(ifs, "GNA"); +//! [ov_gna_import] +} +} \ No newline at end of file diff --git a/docs/snippets/gna/import_export.py b/docs/snippets/gna/import_export.py new file mode 100644 index 00000000000..8ecead1520d --- /dev/null +++ b/docs/snippets/gna/import_export.py @@ -0,0 +1,26 @@ +# Copyright (C) 2022 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +#! [import] +from openvino.runtime import Core +from io import BytesIO +#! [import] + +model_path = "model.xml" +blob_path = "compiled_model.blob" + +core = Core() +model = core.read_model(model=model_path) +compiled_model = core.compile_model(model, device_name="GNA") + +#! [ov_gna_export] +user_stream = compiled_model.export_model() +with open(blob_path, 'wb') as f: + f.write(user_stream) +#! [ov_gna_export] + +# [ov_gna_import] +with open(blob_path, 'rb') as f: + buf = BytesIO(f.read()) + compiled_model = core.import_model(buf, device_name="GNA") +# [ov_gna_import] \ No newline at end of file diff --git a/docs/snippets/gna/set_batch.cpp b/docs/snippets/gna/set_batch.cpp new file mode 100644 index 00000000000..822efa562d8 --- /dev/null +++ b/docs/snippets/gna/set_batch.cpp @@ -0,0 +1,29 @@ +// Copyright (C) 2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// +//! [include] +#include +//! [include] + +int main() { +const std::string model_path = "model.xml"; +size_t batch_size = 8; + +//! [ov_gna_read_model] +ov::Core core; +auto model = core.read_model(model_path); +//! [ov_gna_read_model] + +//! [ov_gna_set_nc_layout] +ov::preprocess::PrePostProcessor ppp(model); +for (const auto& input : model->inputs()) { + auto& in = ppp.input(input.get_any_name()); + in.model().set_layout(ov::Layout("N?")); +} +model = ppp.build(); +//! [ov_gna_set_nc_layout] + +//! [ov_gna_set_batch_size] +ov::set_batch(model, batch_size); +//! [ov_gna_set_batch_size] +} \ No newline at end of file diff --git a/docs/snippets/gna/set_batch.py b/docs/snippets/gna/set_batch.py new file mode 100644 index 00000000000..6ae1cd2ab15 --- /dev/null +++ b/docs/snippets/gna/set_batch.py @@ -0,0 +1,27 @@ +# Copyright (C) 2022 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +#! [import] +from openvino.runtime import Core, set_batch +from openvino.preprocess import PrePostProcessor +#! [import] + +model_path = "model.xml" +batch_size = 8 + +#! [ov_gna_read_model] +core = Core() +model = core.read_model(model=model_path) +#! [ov_gna_read_model] + +#! [ov_gna_set_nc_layout] +ppp = PrePostProcessor(model) +for i in range(len(model.inputs)): + input_name = model.input(i).get_any_name() + ppp.input(i).model().set_layout("N?") +model = ppp.build() +#! [ov_gna_set_nc_layout] + +#! [ov_gna_set_batch_size] +set_batch(model, batch_size) +#! [ov_gna_set_batch_size] \ No newline at end of file diff --git a/docs/snippets/gpu/compile_model.cpp b/docs/snippets/gpu/compile_model.cpp new file mode 100644 index 00000000000..0cdb494989d --- /dev/null +++ b/docs/snippets/gpu/compile_model.cpp @@ -0,0 +1,54 @@ +#include +#include + + +int main() { +{ + //! [compile_model_default_gpu] + ov::Core core; + auto model = core.read_model("model.xml"); + auto compiled_model = core.compile_model(model, "GPU"); + //! [compile_model_default_gpu] +} + +{ + //! [compile_model_gpu_with_id] + ov::Core core; + auto model = core.read_model("model.xml"); + auto compiled_model = core.compile_model(model, "GPU.1"); + //! [compile_model_gpu_with_id] +} + +{ + //! [compile_model_gpu_with_id_and_tile] + ov::Core core; + auto model = core.read_model("model.xml"); + auto compiled_model = core.compile_model(model, "GPU.1.0"); + //! [compile_model_gpu_with_id_and_tile] +} + +{ + //! [compile_model_multi] + ov::Core core; + auto model = core.read_model("model.xml"); + auto compiled_model = core.compile_model(model, "MULTI:GPU.1,GPU.0"); + //! [compile_model_multi] +} + +{ + //! [compile_model_batch_plugin] + ov::Core core; + auto model = core.read_model("model.xml"); + auto compiled_model = core.compile_model(model, "BATCH:GPU"); + //! [compile_model_batch_plugin] +} + +{ + //! [compile_model_auto_batch] + ov::Core core; + auto model = core.read_model("model.xml"); + auto compiled_model = core.compile_model(model, "GPU", ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)); + //! [compile_model_auto_batch] +} + return 0; +} diff --git a/docs/snippets/gpu/context_sharing.cpp b/docs/snippets/gpu/context_sharing.cpp new file mode 100644 index 00000000000..5b0c206692a --- /dev/null +++ b/docs/snippets/gpu/context_sharing.cpp @@ -0,0 +1,77 @@ +#include +#include + +cl::Context get_ocl_context(); // a function which returns cl context created on the app side + +int main() { +{ + //! [context_sharing_get_from_ov] + + // ... + + // initialize the core and load the network + ov::Core core; + auto model = core.read_model("model.xml"); + auto compiled_model = core.compile_model(model, "GPU"); + auto infer_request = compiled_model.create_infer_request(); + + + // obtain the RemoteContext from the compiled model object and cast it to ClContext + auto gpu_context = compiled_model.get_context().as(); + // obtain the OpenCL context handle from the RemoteContext, + // get device info and create a queue + cl::Context cl_context = gpu_context; + cl::Device device = cl::Device(cl_context.getInfo()[0].get(), true); + cl_command_queue_properties props = CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE; + cl::CommandQueue queue = cl::CommandQueue(cl_context, device, props); + + // create the OpenCL buffer within the obtained context + auto input = model->get_parameters().at(0); + auto input_size = ov::shape_size(input->get_shape()); + cl_int err; + cl::Buffer shared_buffer(cl_context, CL_MEM_READ_WRITE, input_size, NULL, &err); + // wrap the buffer into RemoteBlob + auto shared_blob = gpu_context.create_tensor(input->get_element_type(), input->get_shape(), shared_buffer); + + // ... + // execute user kernel + cl::Program program; + cl::Kernel kernel(program, "user_kernel"); + kernel.setArg(0, shared_buffer); + queue.enqueueNDRangeKernel(kernel, + cl::NDRange(0), + cl::NDRange(input_size), + cl::NDRange(1), + nullptr, + nullptr); + queue.finish(); + // ... + // pass results to the inference + infer_request.set_tensor(input, shared_blob); + infer_request.infer(); + //! [context_sharing_get_from_ov] +} +{ + + //! [context_sharing_user_handle] + cl::Context ctx = get_ocl_context(); + + ov::Core core; + auto model = core.read_model("model.xml"); + + // share the context with GPU plugin and compile ExecutableNetwork + auto remote_context = ov::intel_gpu::ocl::ClContext(core, ctx.get()); + auto exec_net_shared = core.compile_model(model, remote_context); + auto inf_req_shared = exec_net_shared.create_infer_request(); + + + // ... + // do OpenCL processing stuff + // ... + + // run the inference + inf_req_shared.infer(); + //! [context_sharing_user_handle] +} + return 0; +} diff --git a/docs/snippets/gpu/context_sharing_va.cpp b/docs/snippets/gpu/context_sharing_va.cpp new file mode 100644 index 00000000000..d0ed053ffa3 --- /dev/null +++ b/docs/snippets/gpu/context_sharing_va.cpp @@ -0,0 +1,57 @@ +#ifdef ENABLE_LIBVA +#include +#include +#include +#include + +VADisplay get_va_display(); +VASurfaceID decode_va_surface(); + +int main() { + // initialize the objects + ov::Core core; + auto model = core.read_model("model.xml"); + + + // ... + + + //! [context_sharing_va] + + // ... + + using namespace ov::preprocess; + auto p = PrePostProcessor(model); + p.input().tensor().set_element_type(ov::element::u8) + .set_color_format(ov::preprocess::ColorFormat::NV12_TWO_PLANES, {"y", "uv"}) + .set_memory_type(ov::intel_gpu::memory_type::surface); + p.input().preprocess().convert_color(ov::preprocess::ColorFormat::BGR); + p.input().model().set_layout("NCHW"); + model = p.build(); + + VADisplay disp = get_va_display(); + // create the shared context object + auto shared_va_context = ov::intel_gpu::ocl::VAContext(core, disp); + // compile model within a shared context + auto compiled_model = core.compile_model(model, shared_va_context); + + auto input = model->get_parameters().at(0); + size_t width = 1024; + size_t height = 768; + + // execute decoding and obtain decoded surface handle + VASurfaceID va_surface = decode_va_surface(); + // ... + //wrap decoder output into RemoteBlobs and set it as inference input + auto nv12_blob = shared_va_context.create_tensor_nv12(height, width, va_surface); + + auto infer_request = compiled_model.create_infer_request(); + infer_request.set_tensor("y", nv12_blob.first); + infer_request.set_tensor("uv", nv12_blob.second); + infer_request.start_async(); + infer_request.wait(); + //! [context_sharing_va] + + return 0; +} +#endif // ENABLE_LIBVA diff --git a/docs/snippets/gpu/custom_kernels_api.cpp b/docs/snippets/gpu/custom_kernels_api.cpp new file mode 100644 index 00000000000..4cd5edf2f7d --- /dev/null +++ b/docs/snippets/gpu/custom_kernels_api.cpp @@ -0,0 +1,11 @@ +#include + +int main() { + //! [part0] + ov::Core core; + // Load GPU Extensions + core.set_property("GPU", {{ CONFIG_KEY(CONFIG_FILE), "" }}); + //! [part0] + + return 0; +} diff --git a/docs/snippets/gpu/dynamic_batch.cpp b/docs/snippets/gpu/dynamic_batch.cpp new file mode 100644 index 00000000000..ae221316a38 --- /dev/null +++ b/docs/snippets/gpu/dynamic_batch.cpp @@ -0,0 +1,34 @@ +#include + +int main() { +size_t C = 3; +size_t H = 224; +size_t W = 224; + +//! [dynamic_batch] + +// Read model +ov::Core core; +auto model = core.read_model("model.xml"); + +model->reshape({{ov::Dimension(1, 10), ov::Dimension(C), ov::Dimension(H), ov::Dimension(W)}}); // {1..10, C, H, W} + +// compile model and create infer request +auto compiled_model = core.compile_model(model, "GPU"); +auto infer_request = compiled_model.create_infer_request(); +auto input = model->get_parameters().at(0); + +// ... + +// create input tensor with specific batch size +ov::Tensor input_tensor(input->get_element_type(), {2, C, H, W}); + +// ... + +infer_request.set_tensor(input, input_tensor); +infer_request.infer(); + +//! [dynamic_batch] + +return 0; +} diff --git a/docs/snippets/gpu/preprocessing.cpp b/docs/snippets/gpu/preprocessing.cpp new file mode 100644 index 00000000000..3674759bc46 --- /dev/null +++ b/docs/snippets/gpu/preprocessing.cpp @@ -0,0 +1,54 @@ +#include +#include +#include +#include + +ov::intel_gpu::ocl::ClImage2DTensor get_y_tensor(); +ov::intel_gpu::ocl::ClImage2DTensor get_uv_tensor(); + +int main() { + ov::Core core; + auto model = core.read_model("model.xml"); + + //! [init_preproc] + using namespace ov::preprocess; + auto p = PrePostProcessor(model); + p.input().tensor().set_element_type(ov::element::u8) + .set_color_format(ov::preprocess::ColorFormat::NV12_TWO_PLANES, {"y", "uv"}) + .set_memory_type(ov::intel_gpu::memory_type::surface); + p.input().preprocess().convert_color(ov::preprocess::ColorFormat::BGR); + p.input().model().set_layout("NCHW"); + auto model_with_preproc = p.build(); + //! [init_preproc] + + auto compiled_model = core.compile_model(model, "GPU"); + auto context = compiled_model.get_context().as(); + auto input = model->get_parameters().at(0); + auto infer_request = compiled_model.create_infer_request(); + +{ + + //! [single_batch] + ov::intel_gpu::ocl::ClImage2DTensor y_tensor = get_y_tensor(); + ov::intel_gpu::ocl::ClImage2DTensor uv_tensor = get_uv_tensor(); + infer_request.set_tensor("y", y_tensor); + infer_request.set_tensor("uv", uv_tensor); + infer_request.infer(); + //! [single_batch] +} + +{ + auto y_tensor_0 = get_y_tensor(); + auto y_tensor_1 = get_y_tensor(); + auto uv_tensor_0 = get_uv_tensor(); + auto uv_tensor_1 = get_uv_tensor(); + //! [batched_case] + std::vector y_tensors = {y_tensor_0, y_tensor_1}; + std::vector uv_tensors = {uv_tensor_0, uv_tensor_1}; + infer_request.set_tensors("y", y_tensors); + infer_request.set_tensors("uv", uv_tensors); + infer_request.infer(); + //! [batched_case] +} + return 0; +} diff --git a/docs/snippets/gpu/queue_sharing.cpp b/docs/snippets/gpu/queue_sharing.cpp new file mode 100644 index 00000000000..95517af1466 --- /dev/null +++ b/docs/snippets/gpu/queue_sharing.cpp @@ -0,0 +1,79 @@ +#include +#include + + +cl::CommandQueue get_ocl_queue(); // a function which returns cl queue created on the app side +cl::Context get_ocl_context(); // a function which returns cl context created on the app side + +int main() { + //! [queue_sharing] + + // ... + + // initialize the core and read the model + ov::Core core; + auto model = core.read_model("model.xml"); + + // get opencl queue object + cl::CommandQueue queue = get_ocl_queue(); + cl::Context cl_context = get_ocl_context(); + + // share the queue with GPU plugin and compile model + auto remote_context = ov::intel_gpu::ocl::ClContext(core, queue.get()); + auto exec_net_shared = core.compile_model(model, remote_context); + + auto input = model->get_parameters().at(0); + auto input_size = ov::shape_size(input->get_shape()); + auto output = model->get_results().at(0); + auto output_size = ov::shape_size(output->get_shape()); + cl_int err; + + // create the OpenCL buffers within the context + cl::Buffer shared_in_buffer(cl_context, CL_MEM_READ_WRITE, input_size, NULL, &err); + cl::Buffer shared_out_buffer(cl_context, CL_MEM_READ_WRITE, output_size, NULL, &err); + // wrap in and out buffers into RemoteTensor and set them to infer request + auto shared_in_blob = remote_context.create_tensor(input->get_element_type(), input->get_shape(), shared_in_buffer); + auto shared_out_blob = remote_context.create_tensor(output->get_element_type(), output->get_shape(), shared_out_buffer); + auto infer_request = exec_net_shared.create_infer_request(); + infer_request.set_tensor(input, shared_in_blob); + infer_request.set_tensor(output, shared_out_blob); + + // ... + // execute user kernel + cl::Program program; + cl::Kernel kernel_preproc(program, "user_kernel_preproc"); + kernel_preproc.setArg(0, shared_in_buffer); + queue.enqueueNDRangeKernel(kernel_preproc, + cl::NDRange(0), + cl::NDRange(input_size), + cl::NDRange(1), + nullptr, + nullptr); + // Blocking clFinish() call is not required, but this barrier is added to the queue to guarantee that user kernel is finished + // before any inference primitive is started + queue.enqueueBarrierWithWaitList(nullptr, nullptr); + // ... + + // pass results to the inference + // since the remote context is created with queue sharing, start_async() guarantees that scheduling is finished + infer_request.start_async(); + + // execute some postprocessing kernel. + // infer_request.wait() is not called, synchonization between inference and post-processing is done via + // enqueueBarrierWithWaitList call. + cl::Kernel kernel_postproc(program, "user_kernel_postproc"); + kernel_postproc.setArg(0, shared_out_buffer); + queue.enqueueBarrierWithWaitList(nullptr, nullptr); + queue.enqueueNDRangeKernel(kernel_postproc, + cl::NDRange(0), + cl::NDRange(output_size), + cl::NDRange(1), + nullptr, + nullptr); + + // Wait for pipeline completion + queue.finish(); + //! [queue_sharing] + + return 0; +} diff --git a/docs/snippets/gpu/remote_objects_creation.cpp b/docs/snippets/gpu/remote_objects_creation.cpp new file mode 100644 index 00000000000..70df2a654a5 --- /dev/null +++ b/docs/snippets/gpu/remote_objects_creation.cpp @@ -0,0 +1,154 @@ +#include +#include + +#include + +#ifdef WIN32 +#include +#elif defined(ENABLE_LIBVA) +#include +#endif + +void* allocate_usm_buffer(size_t size); +cl_mem allocate_cl_mem(size_t size); +cl_context get_cl_context(); +cl_command_queue get_cl_queue(); +cl::Buffer allocate_buffer(size_t size); +cl::Image2D allocate_image(size_t size); + + +#ifdef WIN32 +ID3D11Device* get_d3d_device(); +#elif defined(ENABLE_LIBVA) +VADisplay get_va_display(); +#endif + +int main() { + ov::Core core; + auto model = core.read_model("model.xml"); + auto input = model->get_parameters().at(0); + auto input_size = ov::shape_size(input->get_shape()); + + auto compiled_model = core.compile_model(model, "GPU"); + auto gpu_context = compiled_model.get_context().as(); + + auto in_element_type = input->get_element_type(); + auto in_shape = input->get_shape(); + +{ + //! [wrap_usm_pointer] + void* shared_buffer = allocate_usm_buffer(input_size); + auto remote_tensor = gpu_context.create_tensor(in_element_type, in_shape, shared_buffer); + //! [wrap_usm_pointer] +} + +{ + //! [wrap_cl_mem] + cl_mem shared_buffer = allocate_cl_mem(input_size); + auto remote_tensor = gpu_context.create_tensor(in_element_type, in_shape, shared_buffer); + //! [wrap_cl_mem] +} + +{ + //! [wrap_cl_buffer] + cl::Buffer shared_buffer = allocate_buffer(input_size); + auto remote_tensor = gpu_context.create_tensor(in_element_type, in_shape, shared_buffer); + //! [wrap_cl_buffer] +} + +{ + //! [wrap_cl_image] + cl::Image2D shared_buffer = allocate_image(input_size); + auto remote_tensor = gpu_context.create_tensor(in_element_type, in_shape, shared_buffer); + //! [wrap_cl_image] +} + +{ + //! [allocate_usm_device] + auto remote_tensor = gpu_context.create_usm_device_tensor(in_element_type, in_shape); + // Extract raw usm pointer from remote tensor + void* usm_ptr = remote_tensor.get(); + //! [allocate_usm_device] +} + +{ + //! [allocate_usm_host] + ov::intel_gpu::ocl::USMTensor remote_tensor = gpu_context.create_usm_host_tensor(in_element_type, in_shape); + // Extract raw usm pointer from remote tensor + void* usm_ptr = remote_tensor.get(); + //! [allocate_usm_host] +} + +{ + //! [allocate_cl_buffer] + ov::RemoteTensor remote_tensor = gpu_context.create_tensor(in_element_type, in_shape); + // Cast from base to derived class and extract ocl memory handle + auto buffer_tensor = remote_tensor.as(); + cl_mem handle = buffer_tensor.get(); + //! [allocate_cl_buffer] +} + +{ + size_t width = 1024; + size_t height = 768; + + size_t y_plane_size = width*height; + size_t uv_plane_size = width*height / 2; + + //! [wrap_nv12_surface] + cl::Image2D y_plane_surface = allocate_image(y_plane_size); + cl::Image2D uv_plane_surface = allocate_image(uv_plane_size); + auto remote_tensor = gpu_context.create_tensor_nv12(y_plane_surface, uv_plane_surface); + auto y_tensor = remote_tensor.first; + auto uv_tensor = remote_tensor.second; + //! [wrap_nv12_surface] +} + +{ + //! [context_from_cl_context] + cl_context ctx = get_cl_context(); + ov::intel_gpu::ocl::ClContext gpu_context(core, ctx); + //! [context_from_cl_context] +} + + +{ + //! [context_from_cl_queue] + cl_command_queue queue = get_cl_queue(); + ov::intel_gpu::ocl::ClContext gpu_context(core, queue); + //! [context_from_cl_queue] +} + +#ifdef WIN32 +{ + //! [context_from_d3d_device] + ID3D11Device* device = get_d3d_device(); + ov::intel_gpu::ocl::D3DContext gpu_context(core, device); + //! [context_from_d3d_device] +} +#elif defined(ENABLE_LIBVA) +{ + //! [context_from_va_display] + VADisplay display = get_va_display(); + ov::intel_gpu::ocl::VAContext gpu_context(core, display); + //! [context_from_va_display] +} +#endif +{ + //! [default_context_from_core] + auto gpu_context = core.get_default_context("GPU").as(); + // Extract ocl context handle from RemoteContext + cl_context context_handle = gpu_context.get(); + //! [default_context_from_core] +} + +{ + //! [default_context_from_model] + auto gpu_context = compiled_model.get_context().as(); + // Extract ocl context handle from RemoteContext + cl_context context_handle = gpu_context.get(); + //! [default_context_from_model] +} + + return 0; +} diff --git a/docs/snippets/ie_common.cpp b/docs/snippets/ie_common.cpp index 25cf78a0c43..a594e6e59de 100644 --- a/docs/snippets/ie_common.cpp +++ b/docs/snippets/ie_common.cpp @@ -13,10 +13,8 @@ int main() { InferenceEngine::CNNNetwork network = core.ReadNetwork("model.xml"); //! [ie:read_model] - //! [ie:get_inputs_outputs] InferenceEngine::InputsDataMap inputs = network.getInputsInfo(); InferenceEngine::OutputsDataMap outputs = network.getOutputsInfo(); - //! [ie:get_inputs_outputs] //! [ie:compile_model] InferenceEngine::ExecutableNetwork exec_network = core.LoadNetwork(network, "CPU"); @@ -29,7 +27,6 @@ int main() { //! [ie:get_input_tensor] InferenceEngine::Blob::Ptr input_blob1 = infer_request.GetBlob(inputs.begin()->first); // fill first blob - InferenceEngine::SizeVector dims1 = input_blob1->getTensorDesc().getDims(); InferenceEngine::MemoryBlob::Ptr minput1 = InferenceEngine::as(input_blob1); if (minput1) { // locked memory holder should be alive all time while access to its @@ -39,6 +36,7 @@ int main() { auto data = minputHolder.as::value_type*>(); // Fill data ... } + InferenceEngine::Blob::Ptr input_blob2 = infer_request.GetBlob("data2"); // fill first blob InferenceEngine::MemoryBlob::Ptr minput2 = InferenceEngine::as(input_blob2); @@ -56,6 +54,35 @@ int main() { infer_request.Infer(); //! [ie:inference] + //! [ie:start_async_and_wait] + // NOTE: For demonstration purposes we are trying to set callback + // which restarts inference inside one more time, so two inferences happen here + + // Start inference without blocking current thread + auto restart_once = true; + infer_request.SetCompletionCallback>( + [&, restart_once](InferenceEngine::InferRequest request, InferenceEngine::StatusCode status) mutable { + if (status != InferenceEngine::OK) { + // Process error code + } else { + // Extract inference result + InferenceEngine::Blob::Ptr output_blob = request.GetBlob(outputs.begin()->first); + // Restart inference if needed + if (restart_once) { + request.StartAsync(); + restart_once = false; + } + } + }); + infer_request.StartAsync(); + // Get inference status immediately + InferenceEngine::StatusCode status = infer_request.Wait(InferenceEngine::InferRequest::STATUS_ONLY); + // Wait for 1 milisecond + status = infer_request.Wait(1); + // Wait for inference completion + infer_request.Wait(InferenceEngine::InferRequest::RESULT_READY); + //! [ie:start_async_and_wait] + //! [ie:get_output_tensor] InferenceEngine::Blob::Ptr output_blob = infer_request.GetBlob(outputs.begin()->first); InferenceEngine::MemoryBlob::Ptr moutput = InferenceEngine::as(output_blob); @@ -70,4 +97,4 @@ int main() { } //! [ie:get_output_tensor] return 0; -} +} \ No newline at end of file diff --git a/docs/snippets/ov_auto_batching.cpp b/docs/snippets/ov_auto_batching.cpp new file mode 100644 index 00000000000..4d74ee5be57 --- /dev/null +++ b/docs/snippets/ov_auto_batching.cpp @@ -0,0 +1,54 @@ +#include + +int main() { + ov::Core core; + auto model = core.read_model("sample.xml"); +{ + +//! [compile_model] +auto compiled_model = core.compile_model(model, "GPU", + ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)); +//! [compile_model] +} + +{ +//! [compile_model_no_auto_batching] +// disabling the automatic batching +// leaving intact other configurations options that the device selects for the 'throughput' hint +auto compiled_model = core.compile_model(model, "GPU", + ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT), + ov::hint::allow_auto_batching(false)); +//! [compile_model_no_auto_batching] +} + +{ +//! [query_optimal_num_requests] +// when the batch size is automatically selected by the implementation +// it is important to query/create and run the sufficient #requests +auto compiled_model = core.compile_model(model, "GPU", + ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)); +auto num_requests = compiled_model.get_property(ov::optimal_number_of_infer_requests); +//! [query_optimal_num_requests] +} + +{ +//! [hint_num_requests] +// limiting the available parallel slack for the 'throughput' hint via the ov::hint::num_requests +// so that certain parameters (like selected batch size) are automatically accommodated accordingly +auto compiled_model = core.compile_model(model, "GPU", + ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT), + ov::hint::num_requests(4)); +//! [hint_num_requests] +} + +//! [hint_plus_low_level] +{ + // high-level performance hints are compatible with low-level device-specific settings +auto compiled_model = core.compile_model(model, "CPU", + ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT), + ov::inference_num_threads(4)); +} +//! [hint_plus_low_level] + + return 0; +} diff --git a/docs/snippets/ov_auto_batching.py b/docs/snippets/ov_auto_batching.py new file mode 100644 index 00000000000..cecf2f846a7 --- /dev/null +++ b/docs/snippets/ov_auto_batching.py @@ -0,0 +1,41 @@ +from openvino.runtime import Core + +core = Core() +model = core.read_model(model="sample.xml") + +# [compile_model] +config = {"PERFORMANCE_HINT": "THROUGHPUT"} +compiled_model = core.compile_model(model, "GPU", config) +# [compile_model] + +# [compile_model_no_auto_batching] +# disabling the automatic batching +# leaving intact other configurations options that the device selects for the 'throughput' hint +config = {"PERFORMANCE_HINT": "THROUGHPUT", + "ALLOW_AUTO_BATCHING": False} +compiled_model = core.compile_model(model, "GPU", config) +# [compile_model_no_auto_batching] + +# [query_optimal_num_requests] +# when the batch size is automatically selected by the implementation +# it is important to query/create and run the sufficient requests +config = {"PERFORMANCE_HINT": "THROUGHPUT"} +compiled_model = core.compile_model(model, "GPU", config) +num_requests = compiled_model.get_property("OPTIMAL_NUMBER_OF_INFER_REQUESTS") +# [query_optimal_num_requests] + +# [hint_num_requests] +config = {"PERFORMANCE_HINT": "THROUGHPUT", + "PERFORMANCE_HINT_NUM_REQUESTS": "4"} +# limiting the available parallel slack for the 'throughput' +# so that certain parameters (like selected batch size) are automatically accommodated accordingly +compiled_model = core.compile_model(model, "GPU", config) +# [hint_num_requests] + +# [hint_plus_low_level] +config = {"PERFORMANCE_HINT": "THROUGHPUT", + "INFERENCE_NUM_THREADS": "4"} +# limiting the available parallel slack for the 'throughput' +# so that certain parameters (like selected batch size) are automatically accommodated accordingly +compiled_model = core.compile_model(model, "CPU", config) +# [hint_plus_low_level]] \ No newline at end of file diff --git a/docs/snippets/ov_caching.cpp b/docs/snippets/ov_caching.cpp new file mode 100644 index 00000000000..1e104c25b28 --- /dev/null +++ b/docs/snippets/ov_caching.cpp @@ -0,0 +1,72 @@ +#include + +void part0() { + std::string modelPath = "/tmp/myModel.xml"; + std::string device = "GNA"; + ov::AnyMap config; +//! [ov:caching:part0] +ov::Core core; // Step 1: create ov::Core object +core.set_property(ov::cache_dir("/path/to/cache/dir")); // Step 1b: Enable caching +auto model = core.read_model(modelPath); // Step 2: Read Model +//... // Step 3: Prepare inputs/outputs +//... // Step 4: Set device configuration +auto compiled = core.compile_model(model, device, config); // Step 5: LoadNetwork +//! [ov:caching:part0] + if (!compiled) { + throw std::runtime_error("error"); + } +} + +void part1() { + std::string modelPath = "/tmp/myModel.xml"; + std::string device = "GNA"; + ov::AnyMap config; +//! [ov:caching:part1] +ov::Core core; // Step 1: create ov::Core object +auto compiled = core.compile_model(modelPath, device, config); // Step 2: Compile model by file path +//! [ov:caching:part1] + if (!compiled) { + throw std::runtime_error("error"); + } +} + +void part2() { + std::string modelPath = "/tmp/myModel.xml"; + std::string device = "GNA"; + ov::AnyMap config; +//! [ov:caching:part2] +ov::Core core; // Step 1: create ov::Core object +core.set_property(ov::cache_dir("/path/to/cache/dir")); // Step 1b: Enable caching +auto compiled = core.compile_model(modelPath, device, config); // Step 2: Compile model by file path +//! [ov:caching:part2] + if (!compiled) { + throw std::runtime_error("error"); + } +} + +void part3() { + std::string deviceName = "GNA"; + ov::AnyMap config; + ov::Core core; +//! [ov:caching:part3] +// Get list of supported device capabilities +std::vector caps = core.get_property(deviceName, ov::device::capabilities); + +// Find 'EXPORT_IMPORT' capability in supported capabilities +bool cachingSupported = std::find(caps.begin(), caps.end(), ov::device::capability::EXPORT_IMPORT) != caps.end(); +//! [ov:caching:part3] + if (!cachingSupported) { + throw std::runtime_error("GNA should support model caching"); + } +} + +int main() { + try { + part0(); + part1(); + part2(); + part3(); + } catch (...) { + } + return 0; +} \ No newline at end of file diff --git a/docs/snippets/ov_caching.py b/docs/snippets/ov_caching.py new file mode 100644 index 00000000000..3aa400fe1fa --- /dev/null +++ b/docs/snippets/ov_caching.py @@ -0,0 +1,36 @@ +# Copyright (C) 2018-2022 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +# + +from openvino.runtime import Core + +device_name = 'GNA' +xml_path = '/tmp/myModel.xml' +# ! [ov:caching:part0] +core = Core() +core.set_property({'CACHE_DIR': '/path/to/cache/dir'}) +model = core.read_model(model=xml_path) +compiled_model = core.compile_model(model=model, device_name=device_name) +# ! [ov:caching:part0] + +assert compiled_model + +# ! [ov:caching:part1] +core = Core() +compiled_model = core.compile_model(model_path=xml_path, device_name=device_name) +# ! [ov:caching:part1] + +assert compiled_model + +# ! [ov:caching:part2] +core = Core() +core.set_property({'CACHE_DIR': '/path/to/cache/dir'}) +compiled_model = core.compile_model(model_path=xml_path, device_name=device_name) +# ! [ov:caching:part2] + +assert compiled_model + +# ! [ov:caching:part3] +# Find 'EXPORT_IMPORT' capability in supported capabilities +caching_supported = 'EXPORT_IMPORT' in core.get_property(device_name, 'OPTIMIZATION_CAPABILITIES') +# ! [ov:caching:part3] diff --git a/docs/snippets/ov_common.cpp b/docs/snippets/ov_common.cpp index ca3c8f83fa1..f42cb78e646 100644 --- a/docs/snippets/ov_common.cpp +++ b/docs/snippets/ov_common.cpp @@ -62,16 +62,11 @@ int main() { //! [ov_api_2_0:create_core] //! [ov_api_2_0:read_model] - std::shared_ptr network = core.read_model("model.xml"); + std::shared_ptr model = core.read_model("model.xml"); //! [ov_api_2_0:read_model] - //! [ov_api_2_0:get_inputs_outputs] - std::vector> inputs = network->inputs(); - std::vector> outputs = network->outputs(); - //! [ov_api_2_0:get_inputs_outputs] - //! [ov_api_2_0:compile_model] - ov::CompiledModel compiled_model = core.compile_model(network, "CPU"); + ov::CompiledModel compiled_model = core.compile_model(model, "CPU"); //! [ov_api_2_0:compile_model] //! [ov_api_2_0:create_infer_request] @@ -79,11 +74,41 @@ int main() { //! [ov_api_2_0:create_infer_request] inputs_aligned(infer_request); + //! [ov_api_2_0:inference] infer_request.infer(); //! [ov_api_2_0:inference] + //! [ov_api_2_0:start_async_and_wait] + // NOTE: For demonstration purposes we are trying to set callback + // which restarts inference inside one more time, so two inferences happen here + + auto restart_once = true; + infer_request.set_callback([&, restart_once] (std::exception_ptr exception_ptr) mutable { + if (exception_ptr) { + // procces exception or rethrow it. + std::rethrow_exception(exception_ptr); + } else { + // Extract inference result + ov::Tensor output_tensor = infer_request.get_output_tensor(); + // Restart inference if needed + if (restart_once) { + infer_request.start_async(); + restart_once = false; + } + } + }); + // Start inference without blocking current thread + infer_request.start_async(); + // Get inference status immediately + bool status = infer_request.wait_for(std::chrono::milliseconds{0}); + // Wait for one milisecond + status = infer_request.wait_for(std::chrono::milliseconds{1}); + // Wait for inference completion + infer_request.wait(); + //! [ov_api_2_0:start_async_and_wait] + outputs_aligned(infer_request); return 0; -} +} \ No newline at end of file diff --git a/docs/snippets/ov_dynamic_shapes.cpp b/docs/snippets/ov_dynamic_shapes.cpp new file mode 100644 index 00000000000..41270671def --- /dev/null +++ b/docs/snippets/ov_dynamic_shapes.cpp @@ -0,0 +1,157 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// +#include +#include + +void reshape_with_dynamics() { +{ +//! [ov_dynamic_shapes:reshape_undefined] +ov::Core core; +auto model = core.read_model("model.xml"); + +// Set one static dimension (= 1) and another dynamic dimension (= Dimension()) +model->reshape({{1, ov::Dimension()}}); // {1,?} + +// The same as above +model->reshape({{1, -1}}); // {1,?} + +// Or set both dimensions as dynamic if both are going to be changed dynamically +model->reshape({{ov::Dimension(), ov::Dimension()}}); // {?,?} + +// The same as above +model->reshape({{-1, -1}}); // {?,?} +//! [ov_dynamic_shapes:reshape_undefined] +//! [ov_dynamic_shapes:reshape_bounds] +// Both dimensions are dynamic, first has a size within 1..10 and the second has a size within 8..512 +model->reshape({{ov::Dimension(1, 10), ov::Dimension(8, 512)}}); // {1..10,8..512} + +// Both dimensions are dynamic, first doesn't have bounds, the second is in the range of 8..512 +model->reshape({{-1, ov::Dimension(8, 512)}}); // {?,8..512} +//! [ov_dynamic_shapes:reshape_bounds] +} +{ +ov::Core core; +auto model = core.read_model("model.xml"); +//! [ov_dynamic_shapes:print_dynamic] +// Print output partial shape +std::cout << model->output().get_partial_shape() << "\n"; + +// Print input partial shape +std::cout << model->input().get_partial_shape() << "\n"; +//! [ov_dynamic_shapes:print_dynamic] +} +{ +ov::Core core; +//! [ov_dynamic_shapes:detect_dynamic] +auto model = core.read_model("model.xml"); + +if (model->input(0).get_partial_shape().is_dynamic()) { + // input is dynamic +} + +if (model->output(0).get_partial_shape().is_dynamic()) { + // output is dynamic +} + +if (model->output(0).get_partial_shape()[1].is_dynamic()) { + // 1-st dimension of output is dynamic +} +//! [ov_dynamic_shapes:detect_dynamic] +} +} + +void set_tensor() { +ov::Core core; +auto model = core.read_model("model.xml"); +auto executable = core.compile_model(model); +auto infer_request = executable.create_infer_request(); +//! [ov_dynamic_shapes:set_input_tensor] +// The first inference call + +// Create tensor compatible with the model input +// Shape {1, 128} is compatible with any reshape statements made in previous examples +auto input_tensor_1 = ov::Tensor(model->input().get_element_type(), {1, 128}); +// ... write values to input_tensor_1 + +// Set the tensor as an input for the infer request +infer_request.set_input_tensor(input_tensor_1); + +// Do the inference +infer_request.infer(); + +// Retrieve a tensor representing the output data +ov::Tensor output_tensor = infer_request.get_output_tensor(); + +// For dynamic models output shape usually depends on input shape, +// that means shape of output tensor is initialized after the first inference only +// and has to be queried after every infer request +auto output_shape_1 = output_tensor.get_shape(); + +// Take a pointer of an appropriate type to tensor data and read elements according to the shape +// Assuming model output is f32 data type +auto data_1 = output_tensor.data(); +// ... read values + +// The second inference call, repeat steps: + +// Create another tensor (if the previous one cannot be utilized) +// Notice, the shape is different from input_tensor_1 +auto input_tensor_2 = ov::Tensor(model->input().get_element_type(), {1, 200}); +// ... write values to input_tensor_2 + +infer_request.set_input_tensor(input_tensor_2); + +infer_request.infer(); + +// No need to call infer_request.get_output_tensor() again +// output_tensor queried after the first inference call above is valid here. +// But it may not be true for the memory underneath as shape changed, so re-take a pointer: +auto data_2 = output_tensor.data(); + +// and new shape as well +auto output_shape_2 = output_tensor.get_shape(); + +// ... read values in data_2 according to the shape output_shape_2 +//! [ov_dynamic_shapes:set_input_tensor] +} + +void get_tensor() { +ov::Core core; +auto model = core.read_model("model.xml"); +auto executable = core.compile_model(model); +auto infer_request = executable.create_infer_request(); +//! [ov_dynamic_shapes:get_input_tensor] +// The first inference call + +// Get the tensor; shape is not initialized +auto input_tensor = infer_request.get_input_tensor(); + +// Set shape is required +input_tensor.set_shape({1, 128}); +// ... write values to input_tensor + +infer_request.infer(); +ov::Tensor output_tensor = infer_request.get_output_tensor(); +auto output_shape_1 = output_tensor.get_shape(); +auto data_1 = output_tensor.data(); +// ... read values + +// The second inference call, repeat steps: + +// Set a new shape, may reallocate tensor memory +input_tensor.set_shape({1, 200}); +// ... write values to input_tensor memory + +infer_request.infer(); +auto data_2 = output_tensor.data(); +auto output_shape_2 = output_tensor.get_shape(); +// ... read values in data_2 according to the shape output_shape_2 +//! [ov_dynamic_shapes:get_input_tensor] +} + +int main() { +reshape_with_dynamics(); +get_tensor(); +set_tensor(); +} diff --git a/docs/snippets/ov_dynamic_shapes.py b/docs/snippets/ov_dynamic_shapes.py new file mode 100644 index 00000000000..06cd11c39fe --- /dev/null +++ b/docs/snippets/ov_dynamic_shapes.py @@ -0,0 +1,139 @@ +# Copyright (C) 2018-2022 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +import numpy as np +#! [import] +import openvino.runtime as ov +#! [import] + +#! [reshape_undefined] +core = ov.Core() +model = core.read_model("model.xml") + +# Set one static dimension (= 1) and another dynamic dimension (= Dimension()) +model.reshape([1, ov.Dimension()]) + +# The same as above +model.reshape([1, -1]) + +# The same as above +model.reshape("1, ?") + +# Or set both dimensions as dynamic if both are going to be changed dynamically +model.reshape([ov.Dimension(), ov.Dimension()]) + +# The same as above +model.reshape([-1, -1]) + +# The same as above +model.reshape("?, ?") +#! [reshape_undefined] + +#! [reshape_bounds] +# Both dimensions are dynamic, first has a size within 1..10 and the second has a size within 8..512 +model.reshape([ov.Dimension(1, 10), ov.Dimension(8, 512)]) + +# The same as above +model.reshape([(1, 10), (8, 512)]) + +# The same as above +model.reshape("1..10, 8..512") + +# Both dimensions are dynamic, first doesn't have bounds, the second is in the range of 8..512 +model.reshape([-1, (8, 512)]) +#! [reshape_bounds] + +model = core.read_model("model.xml") + +#! [print_dynamic] +# Print output partial shape +print(model.output().partial_shape) + +# Print input partial shape +print(model.input().partial_shape) +#! [print_dynamic] + +#! [detect_dynamic] +model = core.read_model("model.xml") + +if model.input(0).partial_shape.is_dynamic(): + # input is dynamic + pass + +if model.output(0).partial_shape.is_dynamic(): + # output is dynamic + pass + +if model.output(0).partial_shape[1].is_dynamic(): + # 1-st dimension of output is dynamic + pass +#! [detect_dynamic] + +executable = core.compile_model(model) +infer_request = executable.create_infer_request() + +#! [set_input_tensor] +# The first inference call + +# Create tensor compatible to the model input +# Shape {1, 128} is compatible with any reshape statements made in previous examples +input_tensor1 = ov.Tensor(model.input().element_type, [1, 128]) +# ... write values to input_tensor_1 + +# Set the tensor as an input for the infer request +infer_request.set_input_tensor(input_tensor1) + +# Do the inference +infer_request.infer() + +# Or pass a tensor in infer to set the tensor as a model input and make the inference +infer_request.infer([input_tensor1]) + +# Or pass the numpy array to set inputs of the infer request +input_data = np.ones(shape=[1, 128]) +infer_request.infer([input_data]) + +# Retrieve a tensor representing the output data +output_tensor = infer_request.get_output_tensor() + +# Copy data from tensor to numpy array +data1 = output_tensor.data[:] + +# The second inference call, repeat steps: + +# Create another tensor (if the previous one cannot be utilized) +# Notice, the shape is different from input_tensor_1 +input_tensor2 = ov.Tensor(model.input().element_type, [1, 200]) +# ... write values to input_tensor_2 + +infer_request.infer([input_tensor2]) + +# No need to call infer_request.get_output_tensor() again +# output_tensor queried after the first inference call above is valid here. +# But it may not be true for the memory underneath as shape changed, so re-take an output data: +data2 = output_tensor.data[:] +#! [set_input_tensor] + +infer_request = executable.create_infer_request() + +#! [get_input_tensor] +# Get the tensor, shape is not initialized +input_tensor = infer_request.get_input_tensor() + +# Set shape is required +input_tensor.shape = [1, 128] +# ... write values to input_tensor + +infer_request.infer() +output_tensor = infer_request.get_output_tensor() +data1 = output_tensor.data[:] + +# The second inference call, repeat steps: + +# Set a new shape, may reallocate tensor memory +input_tensor.shape = [1, 200] +# ... write values to input_tensor + +infer_request.infer() +data2 = output_tensor.data[:] +#! [get_input_tensor] diff --git a/docs/snippets/ov_extensions.cpp b/docs/snippets/ov_extensions.cpp new file mode 100644 index 00000000000..0abab9d3bfa --- /dev/null +++ b/docs/snippets/ov_extensions.cpp @@ -0,0 +1,26 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// +#include +#include +#include + +int main() { +{ +//! [add_extension] +ov::Core core; +// Use operation type to add operation extension +core.add_extension(); +// or you can add operation extension to this method +core.add_extension(ov::OpExtension()); +//! [add_extension] +} +{ +//! [add_extension_lib] +ov::Core core; +// Load extensions library to ov::Core +core.add_extension("openvino_template_extension.so"); +//! [add_extension_lib] +} +return 0; +} diff --git a/docs/snippets/ov_extensions.py b/docs/snippets/ov_extensions.py new file mode 100644 index 00000000000..4f53700c746 --- /dev/null +++ b/docs/snippets/ov_extensions.py @@ -0,0 +1,15 @@ +# Copyright (C) 2018-2022 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +# + +import openvino.runtime as ov + +#! [add_extension] +# Not implemented +#! [add_extension] + +#! [add_extension_lib] +core = ov.Core() +# Load extensions library to ov::Core +core.add_extension("openvino_template_extension.so") +#! [add_extension_lib] diff --git a/docs/snippets/ov_hetero.cpp b/docs/snippets/ov_hetero.cpp new file mode 100644 index 00000000000..791340afff5 --- /dev/null +++ b/docs/snippets/ov_hetero.cpp @@ -0,0 +1,57 @@ +#include + +int main() { +ov::Core core; +auto model = core.read_model("sample.xml"); +//! [set_manual_affinities] +for (auto && op : model->get_ops()) { + op->get_rt_info()["affinity"] = "CPU"; +} +//! [set_manual_affinities] + +//! [fix_automatic_affinities] +// This example demonstrates how to perform default affinity initialization and then +// correct affinity manually for some layers +const std::string device = "HETERO:GPU,CPU"; + +// query_model result contains mapping of supported operations to devices +auto supported_ops = core.query_model(model, device); + +// update default affinities manually for specific operations +supported_ops["operation_name"] = "CPU"; + +// set affinities to a model +for (auto&& node : model->get_ops()) { + auto& affinity = supported_ops[node->get_friendly_name()]; + // Store affinity mapping using op runtime information + node->get_rt_info()["affinity"] = affinity; +} + +// load model with manually set affinities +auto compiled_model = core.compile_model(model, device); +//! [fix_automatic_affinities] + +{ +//! [compile_model] +auto compiled_model = core.compile_model(model, "HETERO:GPU,CPU"); +// or with ov::device::priorities with multiple args +compiled_model = core.compile_model(model, "HETERO", ov::device::priorities("GPU", "CPU")); +// or with ov::device::priorities with a single argument +compiled_model = core.compile_model(model, "HETERO", ov::device::priorities("GPU,CPU")); +//! [compile_model] +} + +{ +//! [configure_fallback_devices] +auto compiled_model = core.compile_model(model, "HETERO", + // GPU with fallback to CPU + ov::device::priorities("GPU", "CPU"), + // profiling is enabled only for GPU + ov::device::properties("GPU", ov::enable_profiling(true)), + // FP32 inference precision only for CPU + ov::device::properties("CPU", ov::hint::inference_precision(ov::element::f32)) +); +//! [configure_fallback_devices] +} +return 0; +} diff --git a/docs/snippets/ov_hetero.py b/docs/snippets/ov_hetero.py new file mode 100644 index 00000000000..2d5bbd2d454 --- /dev/null +++ b/docs/snippets/ov_hetero.py @@ -0,0 +1,43 @@ +import openvino.runtime as ov + +core = ov.Core() +model = core.read_model("sample.xml") + +#! [set_manual_affinities] +for op in model.get_ops(): + rt_info = op.get_rt_info() + rt_info["affinity"] = "CPU" +#! [set_manual_affinities] + +#! [fix_automatic_affinities] +# This example demonstrates how to perform default affinity initialization and then +# correct affinity manually for some layers +device = "HETERO:GPU,CPU" + +# query_model result contains mapping of supported operations to devices +supported_ops = core.query_model(model, device) + +# update default affinities manually for specific operations +supported_ops["operation_name"] = "CPU" + +# set affinities to a model +for node in model.get_ops(): + affinity = supported_ops[node.get_friendly_name()] + node.get_rt_info()["affinity"] = "CPU" + +# load model with manually set affinities +compiled_model = core.compile_model(model, device) +#! [fix_automatic_affinities] + +#! [compile_model] +compiled_model = core.compile_model(model, device_name="HETERO:GPU,CPU") +# device priorities via configuration property +compiled_model = core.compile_model(model, device_name="HETERO", config={"MULTI_DEVICE_PRIORITIES": "GPU,CPU"}) +#! [compile_model] + +#! [configure_fallback_devices] +core.set_property("HETERO", {"MULTI_DEVICE_PRIORITIES": "GPU,CPU"}) +core.set_property("GPU", {"PERF_COUNT": "YES"}) +core.set_property("CPU", {"INFERENCE_PRECISION_HINT": "f32"}) +compiled_model = core.compile_model(model=model, device_name="HETERO") +#! [configure_fallback_devices] diff --git a/docs/snippets/ov_infer_request.cpp b/docs/snippets/ov_infer_request.cpp new file mode 100644 index 00000000000..42be537252e --- /dev/null +++ b/docs/snippets/ov_infer_request.cpp @@ -0,0 +1,112 @@ +// Copyright (C) 2020 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +// ! [ov:include] +#include +// ! [ov:include] + +int main() { +ov::Core core; +std::shared_ptr model; +ov::CompiledModel compiled_model; + +//! [create_infer_request] +auto infer_request = compiled_model.create_infer_request(); +//! [create_infer_request] + +//! [sync_infer] +infer_request.infer(); +//! [sync_infer] + +//! [async_infer] +infer_request.start_async(); +//! [async_infer] + +//! [wait] +infer_request.wait(); +//! [wait] + +//! [wait_for] +infer_request.wait_for(std::chrono::milliseconds(10)); +//! [wait_for] + +//! [set_callback] +infer_request.set_callback([&](std::exception_ptr ex_ptr) { + if (!ex_ptr) { + // all done. Output data can be processed. + // You can fill the input data and run inference one more time: + infer_request.start_async(); + } else { + // Something wrong, you can analyze exception_ptr + } +}); +//! [set_callback] + +//! [cancel] +infer_request.cancel(); +//! [cancel] + +{ +//! [get_set_one_tensor] +auto input_tensor = infer_request.get_input_tensor(); +auto output_tensor = infer_request.get_output_tensor(); +//! [get_set_one_tensor] +} + +{ +//! [get_set_index_tensor] +auto input_tensor = infer_request.get_input_tensor(0); +auto output_tensor = infer_request.get_output_tensor(1); +//! [get_set_index_tensor] +} + +//! [get_set_tensor] +auto tensor1 = infer_request.get_tensor("tensor_name1"); +ov::Tensor tensor2; +infer_request.set_tensor("tensor_name2", tensor2); +//! [get_set_tensor] + +{ +//! [get_set_tensor_by_port] +auto input_port = model->input(0); +auto output_port = model->output("tensor_name"); +ov::Tensor input_tensor; +infer_request.set_tensor(input_port, input_tensor); +auto output_tensor = infer_request.get_tensor(output_port); +//! [get_set_tensor_by_port] +} + +auto infer_request1 = compiled_model.create_infer_request(); +auto infer_request2 = compiled_model.create_infer_request(); + +//! [cascade_models] +auto output = infer_request1.get_output_tensor(0); +infer_request2.set_input_tensor(0, output); +//! [cascade_models] + +//! [roi_tensor] +/** input_tensor points to input of a previous network and + cropROI contains coordinates of output bounding box **/ +ov::Tensor input_tensor(ov::element::f32, ov::Shape({1, 3, 20, 20})); +ov::Coordinate begin({0, 0, 0, 0}); +ov::Coordinate end({1, 2, 3, 3}); +//... + +/** roi_tensor uses shared memory of input_tensor and describes cropROI + according to its coordinates **/ +ov::Tensor roi_tensor(input_tensor, begin, end); +infer_request2.set_tensor("input_name", roi_tensor); +//! [roi_tensor] + +{ +//! [remote_tensor] +ov::RemoteContext context = core.get_default_context("GPU"); +auto input_port = compiled_model.input("tensor_name"); +ov::RemoteTensor remote_tensor = context.create_tensor(input_port.get_element_type(), input_port.get_shape()); +infer_request.set_tensor(input_port, remote_tensor); +//! [remote_tensor] +} + +return 0; +} diff --git a/docs/snippets/ov_infer_request.py b/docs/snippets/ov_infer_request.py new file mode 100644 index 00000000000..c18ea6316f2 --- /dev/null +++ b/docs/snippets/ov_infer_request.py @@ -0,0 +1,97 @@ +# Copyright (C) 2018-2022 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +import numpy as np +#! [import] +import openvino.runtime as ov +#! [import] + +core = ov.Core() +model = core.read_model("model.xml") +compiled_model = core.compile_model(model, "AUTO") + +#! [create_infer_request] +infer_request = compiled_model.create_infer_request() +#! [create_infer_request] + +#! [sync_infer] +infer_request.infer() +#! [sync_infer] + +#! [async_infer] +infer_request.start_async() +#! [async_infer] + +#! [wait] +infer_request.wait() +#! [wait] + +#! [wait_for] +infer_request.wait_for(10) +#! [wait_for] + +#! [set_callback] +def callback(request, userdata): + request.start_async() + +infer_request.set_callback(callback) +#! [set_callback] + +#! [cancel] +infer_request.cancel() +#! [cancel] + +#! [get_set_one_tensor] +input_tensor = infer_request.get_input_tensor() +output_tensor = infer_request.get_output_tensor() +#! [get_set_one_tensor] + +#! [get_set_index_tensor] +input_tensor = infer_request.get_input_tensor(0) +output_tensor = infer_request.get_output_tensor(1) +#! [get_set_index_tensor] + +#! [get_set_name_tensor] +input_tensor = infer_request.get_tensor("input_name") +output_tensor = infer_request.get_tensor("output_name") +#! [get_set_name_tensor] + +#! [get_set_tensor] +tensor1 = infer_request.get_tensor("tensor_name1") +tensor2 = ov.Tensor() +infer_request.set_tensor("tensor_name2", tensor2) +#! [get_set_tensor] + +#! [get_set_tensor_by_port] +input_port = model.input(0) +output_port = model.input("tensor_name") +input_tensor = ov.Tensor() +infer_request.set_tensor(input_port, input_tensor) +output_tensor = infer_request.get_tensor(output_port) +#! [get_set_tensor_by_port] + +infer_request1 = compiled_model.create_infer_request() +infer_request2 = compiled_model.create_infer_request() + +#! [cascade_models] +output = infer_request1.get_output_tensor(0) +infer_request2.set_input_tensor(0, output) +#! [cascade_models] + +#! [roi_tensor] +# input_tensor points to input of a previous network and +# cropROI contains coordinates of output bounding box **/ +input_tensor = ov.Tensor(type=ov.Type.f32, shape=ov.Shape([1, 3, 20, 20])) +begin = [0, 0, 0, 0] +end = [1, 2, 3, 3] +# ... + +# roi_tensor uses shared memory of input_tensor and describes cropROI +# according to its coordinates **/ +roi_tensor = ov.Tensor(input_tensor, begin, end) +infer_request2.set_tensor("input_name", roi_tensor) +#! [roi_tensor] + +#! [remote_tensor] +# NOT SUPPORTED +#! [remote_tensor] diff --git a/docs/snippets/ov_layout.cpp b/docs/snippets/ov_layout.cpp new file mode 100644 index 00000000000..44f1e392f98 --- /dev/null +++ b/docs/snippets/ov_layout.cpp @@ -0,0 +1,58 @@ +// Copyright (C) 2018-2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// +#include + +int main() { + ov::Layout layout; +//! [ov:layout:simple] +layout = ov::Layout("NHWC"); +//! [ov:layout:simple] + +//! [ov:layout:complex] +// Each dimension has name separated by comma, layout is wrapped with square brackets +layout = ov::Layout("[time,temperature,humidity]"); +//! [ov:layout:complex] + +//! [ov:layout:partially_defined] +// First dimension is batch, 4th is 'channels'. Others are not important for us +layout = ov::Layout("N??C"); +// Or the same using advanced syntax +layout = ov::Layout("[n,?,?,c]"); +//! [ov:layout:partially_defined] + +//! [ov:layout:dynamic] +// First dimension is 'batch' others are whatever +layout = ov::Layout("N..."); + +// Second dimension is 'channels' others are whatever +layout = ov::Layout("?C..."); + +// Last dimension is 'channels' others are whatever +layout = ov::Layout("...C"); +//! [ov:layout:dynamic] + +//! [ov:layout:predefined] +// returns 0 for batch +ov::layout::batch_idx("NCDHW"); + +// returns 1 for channels +ov::layout::channels_idx("NCDHW"); + +// returns 2 for depth +ov::layout::depth_idx("NCDHW"); + +// returns -2 for height +ov::layout::height_idx("...HW"); + +// returns -1 for width +ov::layout::width_idx("...HW"); +//! [ov:layout:predefined] + +//! [ov:layout:dump] +layout = ov::Layout("NCHW"); +std::cout << layout.to_string(); // prints [N,C,H,W] +//! [ov:layout:dump] + + return 0; +} diff --git a/docs/snippets/ov_layout.py b/docs/snippets/ov_layout.py new file mode 100644 index 00000000000..689937b6c87 --- /dev/null +++ b/docs/snippets/ov_layout.py @@ -0,0 +1,54 @@ +# Copyright (C) 2018-2022 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +# + +# ! [ov:layout:simple] +from openvino.runtime import Layout +layout = Layout('NCHW') +# ! [ov:layout:simple] +# ! [ov:layout:complex] +# Each dimension has name separated by comma +# Layout is wrapped with square brackets +layout = Layout('[time,temperature,humidity]') +# ! [ov:layout:complex] +# ! [ov:layout:partially_defined] +# First dimension is batch, 4th is 'channels'. +# Others are not important for us +layout = Layout('N??C') + +# Or the same using advanced syntax +layout = Layout('[n,?,?,c]') +# ! [ov:layout:partially_defined] +# ! [ov:layout:dynamic] +# First dimension is 'batch' others are whatever +layout = Layout('N...') + +# Second dimension is 'channels' others are whatever +layout = Layout('?C...') + +# Last dimension is 'channels' others are whatever +layout = Layout('...C') +# ! [ov:layout:dynamic] + +# ! [ov:layout:predefined] +from openvino.runtime import layout_helpers +# returns 0 for batch +layout_helpers.batch_idx(Layout('NCDHW')) + +# returns 1 for channels +layout_helpers.channels_idx(Layout('NCDHW')) + +# returns 2 for depth +layout_helpers.depth_idx(Layout('NCDHW')) + +# returns -2 for height +layout_helpers.height_idx(Layout('...HW')) + +# returns -1 for width +layout_helpers.width_idx(Layout('...HW')) +# ! [ov:layout:predefined] + +# ! [ov:layout:dump] +layout = Layout('NCHW') +print(layout) # prints [N,C,H,W] +# ! [ov:layout:dump] diff --git a/docs/snippets/ov_model_snippets.cpp b/docs/snippets/ov_model_snippets.cpp new file mode 100644 index 00000000000..12a8c732fc3 --- /dev/null +++ b/docs/snippets/ov_model_snippets.cpp @@ -0,0 +1,337 @@ +// Copyright (C) 2020 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +// ! [ov:include] +#include +#include +// ! [ov:include] + +#include +#include +#include +#include +#include +#include +#include +#include + + +// ! [ov:create_simple_model] +std::shared_ptr create_simple_model() { + // This example shows how to create ov::Model + // + // Parameter--->Multiply--->Add--->Result + // Constant---' / + // Constant---' + + // Create opset8::Parameter operation with static shape + auto data = std::make_shared(ov::element::f32, ov::Shape{3, 1, 2}); + + auto mul_constant = ov::opset8::Constant::create(ov::element::f32, ov::Shape{1}, {1.5}); + auto mul = std::make_shared(data, mul_constant); + + auto add_constant = ov::opset8::Constant::create(ov::element::f32, ov::Shape{1}, {0.5}); + auto add = std::make_shared(mul, add_constant); + + // Create opset8::Result operation + auto res = std::make_shared(mul); + + // Create nGraph function + return std::make_shared(ov::ResultVector{res}, ov::ParameterVector{data}); +} +// ! [ov:create_simple_model] + +// ! [ov:create_advanced_model] +std::shared_ptr create_advanced_model() { + // Advanced example with multi output operation + // + // Parameter->Split---0-->Result + // | `--1-->Relu-->Result + // `----2-->Result + + auto data = std::make_shared(ov::element::f32, ov::Shape{1, 3, 64, 64}); + + // Create Constant for axis value + auto axis_const = ov::opset8::Constant::create(ov::element::i64, ov::Shape{} /*scalar shape*/, {1}); + + // Create opset8::Split operation that splits input to three slices across 1st dimension + auto split = std::make_shared(data, axis_const, 3); + + // Create opset8::Relu operation that takes 1st Split output as input + auto relu = std::make_shared(split->output(1) /*specify explicit output*/); + + // Results operations will be created automatically based on provided OutputVector + return std::make_shared(ov::OutputVector{split->output(0), relu, split->output(2)}, + ov::ParameterVector{data}); +} +// ! [ov:create_advanced_model] + +void ov_api_examples() { + std::shared_ptr node = std::make_shared(ov::element::f32, ov::PartialShape{ov::Dimension::dynamic(), 3, 64, 64}); + + // ! [ov:partial_shape] + ov::Shape static_shape; + ov::PartialShape partial_shape = node->output(0).get_partial_shape(); // get zero output partial shape + if (!partial_shape.is_dynamic() /* or partial_shape.is_static() */) { + static_shape = partial_shape.get_shape(); + } + // ! [ov:partial_shape] +} + +// ! [ov:serialize] +void serialize_example(const std::shared_ptr& f) { + // Need include: + // * openvino/pass/manager.hpp + // * openvino/pass/serialize.hpp + ov::pass::Manager manager; + + // Serialize ov::Model to IR + manager.register_pass("/path/to/file/model.xml", "/path/to/file/model.bin"); + + manager.run_passes(f); +} +// ! [ov:serialize] + +// ! [ov:visualize] +void visualize_example(const std::shared_ptr& m) { + // Need include: + // * openvino/pass/manager.hpp + // * openvino/pass/visualize_tree.hpp + ov::pass::Manager manager; + + // Serialize ov::Model to before.svg file before transformation + manager.register_pass("image.svg"); + + manager.run_passes(m); +} +// ! [ov:visualize] + +void model_inputs() { +std::shared_ptr model; +//! [all_inputs_ouputs] +/* Take information about all topology inputs */ +auto inputs = model->inputs(); +/* Take information about all topology outputs */ +auto outputs = model->outputs(); +//! [all_inputs_ouputs] +} + +void pattern_matcher_examples(std::shared_ptr node) { +{ +// ! [pattern:simple_example] +// Pattern example +auto input = std::make_shared(ov::element::i64, ov::Shape{1}); +auto shapeof = std::make_shared(input); + +// Create Matcher with Parameter->ShapeOf pattern +auto m = std::make_shared(shapeof, "MyPatternBasedTransformation"); +// ! [pattern:simple_example] + +// ! [pattern:callback_example] +ov::graph_rewrite_callback callback = [](ov::pass::pattern::Matcher& m) { + // Get root node + std::shared_ptr root_node = m.get_match_root(); + + // Get all nodes matched by pattern + ov::NodeVector nodes = m.get_matched_nodes(); + + // Transformation code + return false; +}; +// ! [pattern:callback_example] +} + +{ +// ! [pattern:label_example] +// Detect Multiply with arbitrary first input and second as Constant +// ov::pattern::op::Label - represent arbitrary input +auto input = ov::pass::pattern::any_input(); +auto value = ov::opset8::Constant::create(ov::element::f32, ov::Shape{1}, {0.5}); +auto mul = std::make_shared(input, value); +auto m = std::make_shared(mul, "MultiplyMatcher"); +// ! [pattern:label_example] +} + +{ +// ! [pattern:concat_example] +// Detect Concat operation with arbitrary number of inputs +auto concat = ov::pass::pattern::wrap_type(); +auto m = std::make_shared(concat, "ConcatMatcher"); +// ! [pattern:concat_example] +} + +{ +// ! [pattern:predicate_example] +// Detect Multiply->Add sequence where mul has exactly one consumer +auto mul = ov::pass::pattern::wrap_type(ov::pass::pattern::consumers_count(1)/*сheck consumers count*/); +auto add = ov::pass::pattern::wrap_type({mul, ov::pass::pattern::any_input()}); +auto m = std::make_shared(add, "MultiplyAddMatcher"); +// Matcher can be used to match pattern manually on given node +if (m->match(node->output(0))) { + // Successfully matched +} +// ! [pattern:predicate_example] +} + +} + +bool openvino_api_examples(std::shared_ptr node) { +{ +// ! [ov:ports_example] +// Let's suppose that node is opset8::Convolution operation +// as we know opset8::Convolution has two input ports (data, weights) and one output port +ov::Input data = node->input(0); +ov::Input weights = node->input(1); +ov::Output output = node->output(0); + +// Getting shape and type +auto pshape = data.get_partial_shape(); +auto el_type = data.get_element_type(); + +// Getting parent for input port +ov::Output parent_output; +parent_output = data.get_source_output(); + +// Another short way to get partent for output port +parent_output = node->input_value(0); + +// Getting all consumers for output port +auto consumers = output.get_target_inputs(); +// ! [ov:ports_example] +} + +{ +// ! [ngraph:shape_check] +auto partial_shape = node->input(0).get_partial_shape(); // get zero input partial shape + +// Check that input shape rank is static +if (!partial_shape.rank().is_static()) { + return false; +} +auto rank_size = partial_shape.rank().get_length(); + +// Check that second dimension is not dynamic +if (rank_size < 2 || partial_shape[1].is_dynamic()) { + return false; +} +auto dim = partial_shape[1].get_length(); +// ! [ngraph:shape_check] +} + +return true; +} + +// ! [ov:replace_node] +bool ov_replace_node(std::shared_ptr node) { + // Step 1. Verify that node has opset8::Negative type + auto neg = std::dynamic_pointer_cast(node); + if (!neg) { + return false; + } + + // Step 2. Create opset8::Multiply operation where the first input is negative operation input and second as Constant with -1 value + auto mul = std::make_shared(neg->input_value(0), + ov::opset8::Constant::create(neg->get_element_type(), ov::Shape{1}, {-1})); + + mul->set_friendly_name(neg->get_friendly_name()); + ov::copy_runtime_info(neg, mul); + + // Step 3. Replace Negative operation with Multiply operation + ov::replace_node(neg, mul); + return true; + + // Step 4. Negative operation will be removed automatically because all consumers was moved to Multiply operation +} +// ! [ov:replace_node] + +bool ov_manual_replace_node(std::shared_ptr node) { +auto neg = std::dynamic_pointer_cast(node); +if (!neg) { + return false; +} + +auto mul = std::make_shared(neg->input_value(0), + ov::opset8::Constant::create(neg->get_element_type(), ov::Shape{1}, {-1})); + +mul->set_friendly_name(neg->get_friendly_name()); +ov::copy_runtime_info(neg, mul); + +// ! [ov:manual_replace] +// All neg->output(0) consumers will be moved to mul->output(0) port +neg->output(0).replace(mul->output(0)); +// ! [ov:manual_replace] +return true; +} + +// ! [ov:insert_node] +// Step 1. Lets suppose that we have a node with single output port and we want to insert additional operation new_node after it +void insert_example(std::shared_ptr node) { + // Get all consumers for node + auto consumers = node->output(0).get_target_inputs(); + + // Step 2. Create new node. Let it be opset8::Relu. + auto new_node = std::make_shared(node); + + // Step 3. Reconnect all consumers to new_node + for (auto input : consumers) { + input.replace_source_output(new_node); + } +} +// ! [ov:insert_node] + +// ! [ov:insert_node_with_copy] +void insert_example_with_copy(std::shared_ptr node) { + // Make a node copy + auto node_copy = node->clone_with_new_inputs(node->input_values()); + // Create new node + auto new_node = std::make_shared(node_copy); + ov::replace_node(node, new_node); +} +// ! [ov:insert_node_with_copy] + +void eliminate_example(std::shared_ptr node) { +// ! [ov:eliminate_node] +// Suppose we have a node that we want to remove +bool success = ov::replace_output_update_name(node->output(0), node->input_value(0)); +// ! [ov:eliminate_node] +} + +void replace_friendly_name() { +auto div = std::make_shared(); +// ! [ov:replace_friendly_name] +// Replace Div operation with Power and Multiply sub-graph and set original friendly name to Multiply operation +auto pow = std::make_shared(div->input(1).get_source_output(), + ov::op::v0::Constant::create(div->get_input_element_type(1), ov::Shape{1}, {-1})); +auto mul = std::make_shared(div->input(0).get_source_output(), pow); +mul->set_friendly_name(div->get_friendly_name()); +ngraph::replace_node(div, mul); +// ! [ov:replace_friendly_name] +} + +void constant_subgraph() { +// ! [ov:constant_subgraph] +// After ConstantFolding pass Power will be replaced with Constant +auto input = std::make_shared(ov::element::f32, ov::Shape{1}); +auto pow = std::make_shared(ov::opset8::Constant::create(ov::element::f32, ov::Shape{1}, {2}), + ov::opset8::Constant::create(ov::element::f32, ov::Shape{1}, {3})); +auto mul = std::make_shared(input /* not constant input */, pow); +// ! [ov:constant_subgraph] +} + +void copy_runtime_info_snippet() { +std::shared_ptr transpose, reshape, div, pow, mul, conv, bias, conv_fused, a, b, c, e, f; +// ! [ov:copy_runtime_info] +// Replace Transpose with Reshape operation (1:1) +ov::copy_runtime_info(transpose, reshape); + +// Replace Div operation with Power and Multiply sub-graph (1:N) +ov::copy_runtime_info(div, {pow, mul}); + +// Fuse Convolution with Add operation (N:1) +ov::copy_runtime_info({conv, bias}, {conv_fused}); + +// Any other transformation that replaces one sub-graph with another sub-graph (N:M) +ov::copy_runtime_info({a, b, c}, {e, f}); +// ! [ov:copy_runtime_info] +} diff --git a/docs/snippets/ov_model_snippets.py b/docs/snippets/ov_model_snippets.py new file mode 100644 index 00000000000..575ab07e80e --- /dev/null +++ b/docs/snippets/ov_model_snippets.py @@ -0,0 +1,88 @@ +# Copyright (C) 2018-2022 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +import numpy as np +#! [import] +import openvino.runtime as ov +#! [import] +import openvino.runtime.passes as passes + +# ! [ov:create_simple_model] +def create_simple_model(): + # This example shows how to create ov::Function + # + # Parameter--->Multiply--->Add--->Result + # Constant---' / + # Constant---' + data = ov.opset8.parameter([3, 1, 2], ov.Type.f32) + mul_constant = ov.opset8.constant([1.5], ov.Type.f32) + mul = ov.opset8.multiply(data, mul_constant) + add_constant = ov.opset8.constant([0.5], ov.Type.f32) + add = ov.opset8.add(mul, add_constant) + res = ov.opset8.result(add) + return ov.Model([res], [data], "model") +# ! [ov:create_simple_model] + +# ! [ov:create_advanced_model] +def create_advanced_model(): + # Advanced example with multi output operation + # + # Parameter->Split---0-->Result + # | `--1-->Relu-->Result + # `----2-->Result + data = ov.opset8.parameter(ov.Shape([1, 3, 64, 64]), ov.Type.f32) + # Create Constant for axis value + axis_const = ov.opset8.constant(ov.Type.i64, ov.Shape({}), [1]) + + # Create opset8::Split operation that splits input to three slices across 1st dimension + split = ov.opset8.split(data, axis_const, 3) + + # Create opset8::Relu operation that takes 1st Split output as input + relu = ov.opset8.relu(split.output(1)) + + # Results operations will be created automatically based on provided OutputVector + return ov.Model([split.output(0), relu, split.output[2]], [data], "model") +# ! [ov:create_advanced_model] + +def ov_api_examples(): + # Doesn't work + # node = ov.opset8.parameter(ov.PartialShape([ov.Dimension.dynamic(), 3, 64, 64]), np.float32) + node = ov.opset8.parameter(ov.PartialShape([ov.Dimension.dynamic(), ov.Dimension(3), ov.Dimension(64), ov.Dimension(64)]), np.float32) + + # it doesn't work: + # static_shape = ov.Shape() + # ! [ov:partial_shape] + partial_shape = node.output(0).get_partial_shape() # get zero output partial shape + if not partial_shape.is_dynamic: # or partial_shape.is_static + static_shape = partial_shape.get_shape() + # ! [ov:partial_shape] + +# ! [ov:serialize] +def serialize_example(m : ov.Model): + # Need import: + # * import openvino.runtime.passes as passes + pass_manager = passes.Manager() + pass_manager.register_pass(pass_name="Serialize", xml_path='model.xml', bin_path='model.bin') + pass_manager.run_passes(m) +# ! [ov:serialize] + +# ! [ov:visualize] +def visualize_example(m : ov.Model): + # Need import: + # * import openvino.runtime.passes as passes + pass_manager = passes.Manager() + pass_manager.register_pass(pass_name="VisualTree", file_name='image.svg') + pass_manager.run_passes(m) +# ! [ov:visualize] + +def model_inputs_outputs(model : ov.Model): + #! [all_inputs_ouputs] + inputs = model.inputs + outputs = model.outputs + #! [all_inputs_ouputs] + + +if __name__ == '__main__': + ov_api_examples() + create_simple_model() + create_advanced_model() diff --git a/docs/snippets/ov_multi.py b/docs/snippets/ov_multi.py new file mode 100644 index 00000000000..6b9629b9e4d --- /dev/null +++ b/docs/snippets/ov_multi.py @@ -0,0 +1,72 @@ +import sys +from openvino.runtime import Core +model_path = "/openvino_CI_CD/result/install_pkg/tests/test_model_zoo/core/models/ir/add_abc.xml" +path_to_model = "/openvino_CI_CD/result/install_pkg/tests/test_model_zoo/core/models/ir/add_abc.xml" +def Option_1(): +#! [Option_1] + core = Core() + + # Read a network in IR or ONNX format + model = core.read_model(model_path) + compiled_model = core.compile_model(model=model, device_name="MULTI:CPU,GPU") +#! [Option_1] + +def Option_2(): +#! [Option_2] + core = Core() + + # Read a network in IR or ONNX format + model = core.read_model(model_path) + core.set_property(device_name="MULTI", properties={"MULTI_DEVICE_PRIORITIES":"HDDL,GPU"}) + # Change priorities + core.set_property(device_name="MULTI", properties={"MULTI_DEVICE_PRIORITIES":"GPU,HDDL"}) + core.set_property(device_name="MULTI", properties={"MULTI_DEVICE_PRIORITIES":"GPU"}) + core.set_property(device_name="MULTI", properties={"MULTI_DEVICE_PRIORITIES":"HDDL,GPU"}) + core.set_property(device_name="MULTI", properties={"MULTI_DEVICE_PRIORITIES":"CPU,HDDL,GPU"}) +#! [Option_2] + +def available_devices_1(): +#! [available_devices_1] + all_devices = "MULTI:" + core = Core() + model = core.read_model(model_path) + all_devices += ",".join(core.available_devices) + compiled_model = core.compile_model(model=model, device_name=all_devices) +#! [available_devices_1] + +def available_devices_2(): +#! [available_devices_2] + match_list = [] + all_devices = "MULTI:" + dev_match_str = "MYRIAD" + core = Core() + model = core.read_model(model_path) + for d in core.available_devices: + if dev_match_str in d: + match_list.append(d) + all_devices += ",".join(match_list) + compiled_model = core.compile_model(model=model, device_name=all_devices) +#! [available_devices_2] + +def set_property(): +#! [set_property] + core = Core() + cpu_config = {} + gpu_config = {} + model = core.read_model(model_path) + core.set_property(device_name="CPU", properties=cpu_config) + core.set_property(device_name="GPU", properties=gpu_config) + compiled_model = core.compile_model(model=model, device_name="MULTI:GPU,CPU") + # Query the optimal number of requests + nireq = compiled_model.get_property("OPTIMAL_NUMBER_OF_INFER_REQUESTS") +#! [set_property] + +def main(): + Option_1() + Option_2() + available_devices_1() + available_devices_2() + set_property() + +if __name__ == '__main__': + sys.exit(main()) diff --git a/docs/snippets/ov_preprocessing.cpp b/docs/snippets/ov_preprocessing.cpp new file mode 100644 index 00000000000..5504deeca07 --- /dev/null +++ b/docs/snippets/ov_preprocessing.cpp @@ -0,0 +1,216 @@ +// Copyright (C) 2018-2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// +#include +#include +#include + +void ppp_input_1(ov::preprocess::PrePostProcessor& ppp) { +//! [ov:preprocess:input_1] +ppp.input() // no index/name is needed if model has one input + .preprocess().scale(50.f); + +ppp.output() // same for output + .postprocess().convert_element_type(ov::element::u8); +//! [ov:preprocess:input_1] + +//! [ov:preprocess:mean_scale] +ppp.input("input").preprocess().mean(128).scale(127); +//! [ov:preprocess:mean_scale] + +//! [ov:preprocess:mean_scale_array] +// Suppose model's shape is {1, 3, 224, 224} +ppp.input("input").model().set_layout("NCHW"); // N=1, C=3, H=224, W=224 +// Mean/Scale has 3 values which matches with C=3 +ppp.input("input").preprocess() + .mean({103.94, 116.78, 123.68}).scale({57.21, 57.45, 57.73}); +//! [ov:preprocess:mean_scale_array] + +//! [ov:preprocess:convert_element_type] +// First define data type for your tensor +ppp.input("input").tensor().set_element_type(ov::element::u8); + +// Then define preprocessing step +ppp.input("input").preprocess().convert_element_type(ov::element::f32); + +// If conversion is needed to `model's` element type, 'f32' can be omitted +ppp.input("input").preprocess().convert_element_type(); +//! [ov:preprocess:convert_element_type] + +//! [ov:preprocess:convert_layout] +// First define layout for your tensor +ppp.input("input").tensor().set_layout("NHWC"); + +// Then define layout of model +ppp.input("input").model().set_layout("NCHW"); + +std::cout << ppp; // Will print 'implicit layout conversion step' +//! [ov:preprocess:convert_layout] + +//! [ov:preprocess:convert_layout_2] +ppp.input("input").tensor().set_shape({1, 480, 640, 3}); +// Model expects shape {1, 3, 480, 640} +ppp.input("input").preprocess().convert_layout({0, 3, 1, 2}); +// 0 -> 0; 3 -> 1; 1 -> 2; 2 -> 3 +//! [ov:preprocess:convert_layout_2] + +//! [ov:preprocess:resize_1] +ppp.input("input").tensor().set_shape({1, 3, 960, 1280}); +ppp.input("input").model().set_layout("??HW"); +ppp.input("input").preprocess().resize(ov::preprocess::ResizeAlgorithm::RESIZE_LINEAR, 480, 640); +//! [ov:preprocess:resize_1] + +//! [ov:preprocess:resize_2] +ppp.input("input").tensor().set_shape({1, 3, 960, 1280}); +ppp.input("input").model().set_layout("??HW"); // Model accepts {1, 3, 480, 640} shape +// Resize to model's dimension +ppp.input("input").preprocess().resize(ov::preprocess::ResizeAlgorithm::RESIZE_LINEAR); +//! [ov:preprocess:resize_2] + +//! [ov:preprocess:convert_color_1] +ppp.input("input").tensor().set_color_format(ov::preprocess::ColorFormat::BGR); +ppp.input("input").preprocess().convert_color(ov::preprocess::ColorFormat::RGB); +//! [ov:preprocess:convert_color_1] + +//! [ov:preprocess:convert_color_2] +// This will split original `input` to 2 separate inputs: `input/y' and 'input/uv' +ppp.input("input").tensor().set_color_format(ov::preprocess::ColorFormat::NV12_TWO_PLANES); +ppp.input("input").preprocess().convert_color(ov::preprocess::ColorFormat::RGB); +std::cout << ppp; // Dump preprocessing steps to see what will happen +//! [ov:preprocess:convert_color_2] +} + +void ppp_input_2(ov::preprocess::PrePostProcessor& ppp) { + //! [ov:preprocess:input_index] + auto &input_1 = ppp.input(1); // Gets 2nd input in a model + auto &output_1 = ppp.output(2); // Get output with index=2 (3rd one) in a model + //! [ov:preprocess:input_index] +} + +void ppp_input_name(ov::preprocess::PrePostProcessor& ppp) { + //! [ov:preprocess:input_name] + auto &input_image = ppp.input("image"); + auto &output_result = ppp.output("result"); + //! [ov:preprocess:input_name] +} + +int main() { + std::string model_path; + std::string input_name; + //! [ov:preprocess:create] + ov::Core core; + std::shared_ptr model = core.read_model(model_path); + ov::preprocess::PrePostProcessor ppp(model); + //! [ov:preprocess:create] + + //! [ov:preprocess:tensor] + ov::preprocess::InputInfo& input = ppp.input(input_name); + input.tensor() + .set_element_type(ov::element::u8) + .set_shape({1, 480, 640, 3}) + .set_layout("NHWC") + .set_color_format(ov::preprocess::ColorFormat::BGR); + //! [ov:preprocess:tensor] + //! [ov:preprocess:model] + // `model's input` already `knows` it's shape and data type, no need to specify them here + input.model().set_layout("NCHW"); + //! [ov:preprocess:model] + //! [ov:preprocess:steps] + input.preprocess() + .convert_element_type(ov::element::f32) + .convert_color(ov::preprocess::ColorFormat::RGB) + .resize(ov::preprocess::ResizeAlgorithm::RESIZE_LINEAR) + .mean({100.5, 101, 101.5}) + .scale({50., 51., 52.}); + // Not needed, such conversion will be added implicitly + // .convert_layout("NCHW"); + //! [ov:preprocess:steps] + //! [ov:preprocess:custom] + ppp.input("input_image").preprocess() + .custom([](const ov::Output& node) { + // Custom nodes can be inserted as Pre-processing steps + return std::make_shared(node); + }); + //! [ov:preprocess:custom] + //! [ov:preprocess:postprocess] + // Model's output has 'NCHW' layout + ppp.output("result_image").model().set_layout("NCHW"); + + // Set target user's tensor to U8 type + 'NHWC' layout + // Precision & layout conversions will be done implicitly + ppp.output("result_image").tensor() + .set_layout("NHWC") + .set_element_type(ov::element::u8); + + // Also it is possible to insert some custom operations + ppp.output("result_image").postprocess() + .custom([](const ov::Output& node) { + // Custom nodes can be inserted as Post-processing steps + return std::make_shared(node); + }); + //! [ov:preprocess:postprocess] + //! [ov:preprocess:build] + std::cout << "Dump preprocessor: " << ppp << std::endl; + model = ppp.build(); + //! [ov:preprocess:build] + + OPENVINO_ASSERT(model, "Model is invalid"); + return 0; +} + + //! [ov:preprocess:save_headers] + #include + #include + #include + //! [ov:preprocess:save_headers] + +void save_example() { + //! [ov:preprocess:save] + // ======== Step 0: read original model ========= + ov::Core core; + std::shared_ptr model = core.read_model("/path/to/some_model.onnx"); + + // ======== Step 1: Preprocessing ================ + ov::preprocess::PrePostProcessor prep(model); + // Declare section of desired application's input format + prep.input().tensor() + .set_element_type(ov::element::u8) + .set_layout("NHWC") + .set_color_format(ov::preprocess::ColorFormat::BGR) + .set_spatial_dynamic_shape(); + // Specify actual model layout + prep.input().model() + .set_layout("NCHW"); + // Explicit preprocessing steps. Layout conversion will be done automatically as last step + prep.input().preprocess() + .convert_element_type() + .convert_color(ov::preprocess::ColorFormat::RGB) + .resize(ov::preprocess::ResizeAlgorithm::RESIZE_LINEAR) + .mean({123.675, 116.28, 103.53}) // Subtract mean after color conversion + .scale({58.624, 57.12, 57.375}); + // Dump preprocessor + std::cout << "Preprocessor: " << prep << std::endl; + model = prep.build(); + + // ======== Step 2: Change batch size ================ + // In this example we also want to change batch size to increase throughput + ov::set_batch(model, 2); + + // ======== Step 3: Save the model ================ + std::string xml = "/path/to/some_model_saved.xml"; + std::string bin = "/path/to/some_model_saved.bin"; + ov::pass::Serialize(xml, bin).run_on_model(model); + //! [ov:preprocess:save] + +} + +void load_aftersave_example() { + //! [ov:preprocess:save_load] + ov::Core core; + core.set_property(ov::cache_dir("/path/to/cache/dir")); + + // In case that no preprocessing is needed anymore, we can load model on target device directly + // With cached model available, it will also save some time on reading original model + ov::CompiledModel compiled_model = core.compile_model("/path/to/some_model_saved.xml", "CPU"); + //! [ov:preprocess:save_load] +} diff --git a/docs/snippets/ov_preprocessing.py b/docs/snippets/ov_preprocessing.py new file mode 100644 index 00000000000..3cf594c882d --- /dev/null +++ b/docs/snippets/ov_preprocessing.py @@ -0,0 +1,227 @@ +# Copyright (C) 2018-2022 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +# + +from openvino.preprocess import ResizeAlgorithm, ColorFormat +from openvino.runtime import Layout, Type + + +xml_path = '' +input_name = '' +# ! [ov:preprocess:create] +from openvino.preprocess import PrePostProcessor +from openvino.runtime import Core + +core = Core() +model = core.read_model(model=xml_path) +ppp = PrePostProcessor(model) +# ! [ov:preprocess:create] + +# ! [ov:preprocess:tensor] +from openvino.preprocess import ColorFormat +from openvino.runtime import Layout, Type +ppp.input(input_name).tensor() \ + .set_element_type(Type.u8) \ + .set_shape([1, 480, 640, 3]) \ + .set_layout(Layout('NHWC')) \ + .set_color_format(ColorFormat.BGR) +# ! [ov:preprocess:tensor] +# ! [ov:preprocess:model] +# `model's input` already `knows` it's shape and data type, no need to specify them here +ppp.input(input_name).model().set_layout(Layout('NCHW')) +# ! [ov:preprocess:model] +# ! [ov:preprocess:steps] +from openvino.preprocess import ResizeAlgorithm +ppp.input(input_name).preprocess() \ + .convert_element_type(Type.f32) \ + .convert_color(ColorFormat.RGB) \ + .resize(ResizeAlgorithm.RESIZE_LINEAR) \ + .mean([100.5, 101, 101.5]) \ + .scale([50., 51., 52.]) +# .convert_layout(Layout('NCHW')); # Not needed, such conversion will be added implicitly +# ! [ov:preprocess:steps] +# ! [ov:preprocess:build] +print(f'Dump preprocessor: {ppp}') +model = ppp.build() +# ! [ov:preprocess:build] + +# ! [ov:preprocess:input_index] +ppp.input(1) # Gets 2nd input in a model +ppp.output(2) # Gets output with index=2 (3rd one) in a model +# ! [ov:preprocess:input_index] + + +# ! [ov:preprocess:input_name] +ppp.input('image') +ppp.output('result') +# ! [ov:preprocess:input_name] + +# ! [ov:preprocess:input_1] +# no index/name is needed if model has one input +ppp.input().preprocess().scale(50.) + +# same for output +ppp.output() \ + .postprocess().convert_element_type(Type.u8) +# ! [ov:preprocess:input_1] +# ! [ov:preprocess:mean_scale] +ppp.input('input').preprocess().mean(128).scale(127) +# ! [ov:preprocess:mean_scale] +# ! [ov:preprocess:mean_scale_array] +# Suppose model's shape is {1, 3, 224, 224} +# N=1, C=3, H=224, W=224 +ppp.input('input').model().set_layout(Layout('NCHW')) +# Mean/Scale has 3 values which matches with C=3 +ppp.input('input').preprocess() \ + .mean([103.94, 116.78, 123.68]).scale([57.21, 57.45, 57.73]) +# ! [ov:preprocess:mean_scale_array] +# ! [ov:preprocess:convert_element_type] +# First define data type for your tensor +ppp.input('input').tensor().set_element_type(Type.u8) + +# Then define preprocessing step +ppp.input('input').preprocess().convert_element_type(Type.f32) + +# If conversion is needed to `model's` element type, 'f32' can be omitted +ppp.input('input').preprocess().convert_element_type() +# ! [ov:preprocess:convert_element_type] +# ! [ov:preprocess:convert_layout] +# First define layout for your tensor +ppp.input('input').tensor().set_layout(Layout('NHWC')) + +# Then define layout of model +ppp.input('input').model().set_layout(Layout('NCHW')) + +print(ppp) # Will print 'implicit layout conversion step' +# ! [ov:preprocess:convert_layout] +# ! [ov:preprocess:convert_layout_2] +ppp.input('input').tensor().set_shape([1, 480, 640, 3]) + +# Model expects shape {1, 3, 480, 640} +ppp.input('input').preprocess()\ + .convert_layout([0, 3, 1, 2]) +# 0 -> 0; 3 -> 1; 1 -> 2; 2 -> 3 +# ! [ov:preprocess:convert_layout_2] + +# ! [ov:preprocess:resize_1] +ppp.input('input').tensor().set_shape([1, 3, 960, 1280]) +ppp.input('input').model().set_layout(Layout('??HW')) +ppp.input('input').preprocess()\ + .resize(ResizeAlgorithm.RESIZE_LINEAR, 480, 640) +# ! [ov:preprocess:resize_1] +# ! [ov:preprocess:resize_2] +ppp.input('input').tensor().set_shape([1, 3, 960, 1280]) +# Model accepts {1, 3, 480, 640} shape, thus last dimensions are 'H' and 'W' +ppp.input('input').model().set_layout(Layout('??HW')) +# Resize to model's dimension +ppp.input('input').preprocess().resize(ResizeAlgorithm.RESIZE_LINEAR) +# ! [ov:preprocess:resize_2] +# ! [ov:preprocess:convert_color_1] +ppp.input('input').tensor().set_color_format(ColorFormat.BGR) + +ppp.input('input').preprocess().convert_color(ColorFormat.RGB) +# ! [ov:preprocess:convert_color_1] +# ! [ov:preprocess:convert_color_2] +# This will split original `input` to 2 separate inputs: `input/y' and 'input/uv' +ppp.input('input').tensor()\ + .set_color_format(ColorFormat.NV12_TWO_PLANES) + +ppp.input('input').preprocess()\ + .convert_color(ColorFormat.RGB) +print(ppp) # Dump preprocessing steps to see what will happen +# ! [ov:preprocess:convert_color_2] + +# ! [ov:preprocess:custom] +# It is possible to insert some custom operations +import openvino.runtime.opset8 as ops +from openvino.runtime import Output +from openvino.runtime.utils.decorators import custom_preprocess_function + +@custom_preprocess_function +def custom_abs(output: Output): + # Custom nodes can be inserted as Preprocessing steps + return ops.abs(output) + +ppp.input("input_image").preprocess() \ + .custom(custom_abs) +# ! [ov:preprocess:custom] + +# ! [ov:preprocess:postprocess] +# Model's output has 'NCHW' layout +ppp.output('result_image').model().set_layout(Layout('NCHW')) + +# Set target user's tensor to U8 type + 'NHWC' layout +# Precision & layout conversions will be done implicitly +ppp.output('result_image').tensor()\ + .set_layout(Layout("NHWC"))\ + .set_element_type(Type.u8) + +# Also it is possible to insert some custom operations +import openvino.runtime.opset8 as ops +from openvino.runtime import Output +from openvino.runtime.utils.decorators import custom_preprocess_function + +@custom_preprocess_function +def custom_abs(output: Output): + # Custom nodes can be inserted as Post-processing steps + return ops.abs(output) + +ppp.output("result_image").postprocess()\ + .custom(custom_abs) +# ! [ov:preprocess:postprocess] + +# ! [ov:preprocess:save_headers] +from openvino.preprocess import PrePostProcessor, ColorFormat, ResizeAlgorithm +from openvino.runtime import Core, Layout, Type, set_batch +from openvino.runtime.passes import Manager +# ! [ov:preprocess:save_headers] + +# ! [ov:preprocess:save] +# ======== Step 0: read original model ========= +core = Core() +model = core.read_model(model='/path/to/some_model.onnx') + +# ======== Step 1: Preprocessing ================ +ppp = PrePostProcessor(model) +# Declare section of desired application's input format +ppp.input().tensor() \ + .set_element_type(Type.u8) \ + .set_spatial_dynamic_shape() \ + .set_layout(Layout('NHWC')) \ + .set_color_format(ColorFormat.BGR) + +# Specify actual model layout +ppp.input().model().set_layout(Layout('NCHW')) + +# Explicit preprocessing steps. Layout conversion will be done automatically as last step +ppp.input().preprocess() \ + .convert_element_type() \ + .convert_color(ColorFormat.RGB) \ + .resize(ResizeAlgorithm.RESIZE_LINEAR) \ + .mean([123.675, 116.28, 103.53]) \ + .scale([58.624, 57.12, 57.375]) + +# Dump preprocessor +print(f'Dump preprocessor: {ppp}') +model = ppp.build() + +# ======== Step 2: Change batch size ================ +# In this example we also want to change batch size to increase throughput +set_batch(model, 2) + +# ======== Step 3: Save the model ================ +pass_manager = Manager() +pass_manager.register_pass(pass_name="Serialize", + xml_path='/path/to/some_model_saved.xml', + bin_path='/path/to/some_model_saved.bin') +pass_manager.run_passes(model) +# ! [ov:preprocess:save] + +# ! [ov:preprocess:save_load] +core = Core() +core.set_property({'CACHE_DIR': '/path/to/cache/dir'}) + +# In case that no preprocessing is needed anymore, we can load model on target device directly +# With cached model available, it will also save some time on reading original model +compiled_model = core.compile_model('/path/to/some_model_saved.xml', 'CPU') +# ! [ov:preprocess:save_load] diff --git a/docs/snippets/ov_preprocessing_migration.cpp b/docs/snippets/ov_preprocessing_migration.cpp new file mode 100644 index 00000000000..032afb9c8b8 --- /dev/null +++ b/docs/snippets/ov_preprocessing_migration.cpp @@ -0,0 +1,124 @@ +// Copyright (C) 2018-2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// +#include +#include +#include + +#include "inference_engine.hpp" + +int main_new() { + std::string model_path; + std::string tensor_name; + + ov::Core core; + std::shared_ptr model = core.read_model(model_path); + ov::preprocess::PrePostProcessor ppp(model); + + { + //! [ov_mean_scale] +ov::preprocess::PrePostProcessor ppp(model); +ov::preprocess::InputInfo& input = ppp.input(tensor_name); +// we only need to know where is C dimension +input.model().set_layout("...C"); +// specify scale and mean values, order of operations is important +input.preprocess().mean(116.78f).scale({ 57.21f, 57.45f, 57.73f }); +// insert preprocessing operations to the 'model' +model = ppp.build(); + //! [ov_mean_scale] + } + + { + //! [ov_conversions] +ov::preprocess::PrePostProcessor ppp(model); +ov::preprocess::InputInfo& input = ppp.input(tensor_name); +input.tensor().set_layout("NHWC").set_element_type(ov::element::u8); +input.model().set_layout("NCHW"); +// layout and precision conversion is inserted automatically, +// because tensor format != model input format +model = ppp.build(); + //! [ov_conversions] + } + + { + //! [ov_color_space] +ov::preprocess::PrePostProcessor ppp(model); +ov::preprocess::InputInfo& input = ppp.input(tensor_name); +input.tensor().set_color_format(ov::preprocess::ColorFormat::NV12_TWO_PLANES); +// add NV12 to BGR conversion +input.preprocess().convert_color(ov::preprocess::ColorFormat::BGR); +// and insert operations to the model +model = ppp.build(); + //! [ov_color_space] + } + + { + //! [ov_image_scale] +ov::preprocess::PrePostProcessor ppp(model); +ov::preprocess::InputInfo& input = ppp.input(tensor_name); +// scale from the specified tensor size +input.tensor().set_spatial_static_shape(448, 448); +// need to specify H and W dimensions in model, others are not important +input.model().set_layout("??HW"); +// scale to model shape +input.preprocess().resize(ov::preprocess::ResizeAlgorithm::RESIZE_LINEAR); +// and insert operations to the model +model = ppp.build(); + //! [ov_image_scale] + } + +return 0; +} + +int main_old() { + std::string model_path; + std::string operation_name; + + InferenceEngine::Core core; + InferenceEngine::CNNNetwork network = core.ReadNetwork(model_path); + + { + //! [mean_scale] +auto preProcess = network.getInputsInfo()[operation_name]->getPreProcess(); +preProcess.init(3); +preProcess[0]->meanValue = 116.78f; +preProcess[1]->meanValue = 116.78f; +preProcess[2]->meanValue = 116.78f; +preProcess[0]->stdScale = 57.21f; +preProcess[1]->stdScale = 57.45f; +preProcess[2]->stdScale = 57.73f; +preProcess.setVariant(InferenceEngine::MEAN_VALUE); + //! [mean_scale] + } + + { + //! [conversions] +auto inputInfo = network.getInputsInfo()[operation_name]; +inputInfo->setPrecision(InferenceEngine::Precision::U8); +inputInfo->setLayout(InferenceEngine::Layout::NHWC); +// model input layout is always NCHW in Inference Engine +// for shapes with 4 dimensions + //! [conversions] + } + + { + //! [color_space] +auto preProcess = network.getInputsInfo()[operation_name]->getPreProcess(); +// Inference Engine supposes NV12 as two inputs which need to be passed +// as InferenceEngine::NV12Blob composed of two Y and UV planes +preProcess.setColorFormat(InferenceEngine::NV12); + //! [color_space] + } + + { + //! [image_scale] +auto preProcess = network.getInputsInfo()[operation_name]->getPreProcess(); +// Inference Engine supposes input for resize is always in NCHW layout +// while for OpenVINO Runtime API 2.0 `H` and `W` dimensions must be specified +// Also, current code snippet supposed resize from dynamic shapes +preProcess.setResizeAlgorithm(InferenceEngine::ResizeAlgorithm::RESIZE_BILINEAR); + //! [image_scale] + } + + return 0; +} diff --git a/docs/snippets/ov_preprocessing_migration.py b/docs/snippets/ov_preprocessing_migration.py new file mode 100644 index 00000000000..123d9f9bc5a --- /dev/null +++ b/docs/snippets/ov_preprocessing_migration.py @@ -0,0 +1,107 @@ +# Copyright (C) 2018-2022 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +# + +#! [ov_imports] +from openvino.runtime import Core, Layout, Type +from openvino.preprocess import ColorFormat, PrePostProcessor, ResizeAlgorithm +#! [ov_imports] + +#! [imports] +import openvino.inference_engine as ie +#! [imports] + +#include "inference_engine.hpp" + +model_path = '' +tensor_name = '' + +core = Core() +model = core.read_model(model=model_path) + +#! [ov_mean_scale] +ppp = PrePostProcessor(model) +input = ppp.input(tensor_name) +# we only need to know where is C dimension +input.model().set_layout(Layout('...C')) +# specify scale and mean values, order of operations is important +input.preprocess().mean([116.78]).scale([57.21, 57.45, 57.73]) +# insert preprocessing operations to the 'model' +model = ppp.build() +#! [ov_mean_scale] + +#! [ov_conversions] +ppp = PrePostProcessor(model) +input = ppp.input(tensor_name) +input.tensor().set_layout(Layout('NCHW')).set_element_type(Type.u8) +input.model().set_layout(Layout('NCHW')) +# layout and precision conversion is inserted automatically, +# because tensor format != model input format +model = ppp.build() +#! [ov_conversions] + +#! [ov_color_space] +ppp = PrePostProcessor(model) +input = ppp.input(tensor_name) +input.tensor().set_color_format(ColorFormat.NV12_TWO_PLANES) +# add NV12 to BGR conversion +input.preprocess().convert_color(ColorFormat.BGR) +# and insert operations to the model +model = ppp.build() +#! [ov_color_space] + +#! [ov_image_scale] +ppp = PrePostProcessor(model) +input = ppp.input(tensor_name) +# need to specify H and W dimensions in model, others are not important +input.model().set_layout(Layout('??HW')) +# scale to model shape +input.preprocess().resize(ResizeAlgorithm.RESIZE_LINEAR, 448, 448) +# and insert operations to the model +model = ppp.build() +#! [ov_image_scale] + + + +model_path = '' +operation_name = '' + +core = Core() +network = core.ReadNetwork(model_path) + + +#! [mean_scale] +preProcess = network.getInputsInfo()[operation_name].getPreProcess() +preProcess.init(3) +preProcess[0].meanValue = 116.78 +preProcess[1].meanValue = 116.78 +preProcess[2].meanValue = 116.78 +preProcess[0].stdScale = 57.21 +preProcess[1].stdScale = 57.45 +preProcess[2].stdScale = 57.73 +preProcess.setVariant(ie.MEAN_VALUE) +#! [mean_scale] + +#! [conversions] +inputInfo = network.getInputsInfo()[operation_name] +inputInfo.setPrecision(ie.Precision.U8) +inputInfo.setLayout(ie.Layout.NHWC) +# model input layout is always NCHW in Inference Engine +# for shapes with 4 dimensions +#! [conversions] + +#! [color_space] + +preProcess = network.getInputsInfo()[operation_name].getPreProcess() +# Inference Engine supposes NV12 as two inputs which need to be passed +# as InferenceEngine::NV12Blob composed of two Y and UV planes +preProcess.setColorFormat(ie.NV12) +#! [color_space] + +#! [image_scale] +preProcess = network.getInputsInfo()[operation_name].getPreProcess() +# Inference Engine supposes input for resize is always in NCHW layout +# while for OpenVINO Runtime API 2.0 `H` and `W` dimensions must be specified +# Also, current code snippet supposed resize from dynamic shapes +preProcess.setResizeAlgorithm(ie.ResizeAlgorithm.RESIZE_BILINEAR) +#! [image_scale] diff --git a/docs/snippets/ov_properties_api.cpp b/docs/snippets/ov_properties_api.cpp new file mode 100644 index 00000000000..1d971f52ced --- /dev/null +++ b/docs/snippets/ov_properties_api.cpp @@ -0,0 +1,67 @@ +#include + +int main() { +//! [get_available_devices] +ov::Core core; +std::vector available_devices = core.get_available_devices(); +//! [get_available_devices] + +//! [hetero_priorities] +auto device_priorites = core.get_property("HETERO", ov::device::priorities); +//! [hetero_priorities] + +//! [cpu_device_name] +auto cpu_device_name = core.get_property("CPU", ov::device::full_name); +//! [cpu_device_name] + +auto model = core.read_model("sample.xml"); +{ +//! [compile_model_with_property] +auto compiled_model = core.compile_model(model, "CPU", + ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT), + ov::hint::inference_precision(ov::element::f32)); +//! [compile_model_with_property] +} + +{ +//! [optimal_number_of_infer_requests] +auto compiled_model = core.compile_model(model, "CPU"); +auto nireq = compiled_model.get_property(ov::optimal_number_of_infer_requests); +//! [optimal_number_of_infer_requests] +} +{ +//! [core_set_property_then_compile] +// set letency hint is a default for CPU +core.set_property("CPU", ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY)); +// compiled with latency configuration hint +auto compiled_model_latency = core.compile_model(model, "CPU"); +// compiled with overriden ov::hint::performance_mode value +auto compiled_model_thrp = core.compile_model(model, "CPU", + ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)); +//! [core_set_property_then_compile] +} + +{ +//! [device_thermal] +auto compiled_model = core.compile_model(model, "MYRIAD"); +float temperature = compiled_model.get_property(ov::device::thermal); +//! [device_thermal] +} + +{ +//! [inference_num_threads] +auto compiled_model = core.compile_model(model, "CPU"); +auto nthreads = compiled_model.get_property(ov::inference_num_threads); +//! [inference_num_threads] +} + +{ +//! [multi_device] +auto compiled_model = core.compile_model(model, "MULTI", + ov::device::priorities("CPU", "GPU")); +// change the order of priorities +compiled_model.set_property(ov::device::priorities("GPU", "CPU")); +//! [multi_device] +} +return 0; +} diff --git a/docs/snippets/ov_properties_api.py b/docs/snippets/ov_properties_api.py new file mode 100644 index 00000000000..232a52974a8 --- /dev/null +++ b/docs/snippets/ov_properties_api.py @@ -0,0 +1,55 @@ +# Copyright (C) 2018-2022 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +# + +from openvino.runtime import Core + +# [get_available_devices] +core = Core() +available_devices = core.available_devices +# [get_available_devices] + +# [cpu_device_name] +cpu_device_name = core.get_property("CPU", "FULL_DEVICE_NAME") +# [cpu_device_name] + +model = core.read_model(model="sample.xml") +# [compile_model_with_property] +config = {"PERFORMANCE_HINT": "THROUGHPUT", + "INFERENCE_PRECISION_HINT": "f32"} +compiled_model = core.compile_model(model, "CPU", config) +# [compile_model_with_property] + +# [optimal_number_of_infer_requests] +compiled_model = core.compile_model(model, "CPU") +nireq = compiled_model.get_property("OPTIMAL_NUMBER_OF_INFER_REQUESTS"); +# [optimal_number_of_infer_requests] + + +# [core_set_property_then_compile] +# latency hint is a default for CPU +core.set_property("CPU", {"PERFORMANCE_HINT": "LATENCY"}) +# compiled with latency configuration hint +compiled_model_latency = core.compile_model(model, "CPU") +# compiled with overriden performance hint value +config = {"PERFORMANCE_HINT": "THROUGHPUT"} +compiled_model_thrp = core.compile_model(model, "CPU", config) +# [core_set_property_then_compile] + +# [device_thermal] +compiled_model = core.compile_model(model, "MYRIAD") +temperature = compiled_model.get_property("DEVICE_THERMAL") +# [device_thermal] + + +# [inference_num_threads] +compiled_model = core.compile_model(model, "CPU") +nthreads = compiled_model.get_property("INFERENCE_NUM_THREADS") +# [inference_num_threads] + +# [multi_device] +config = {"MULTI_DEVICE_PRIORITIES": "CPU,GPU"} +compiled_model = core.compile_model(model, "MULTI", config) +# change the order of priorities +compiled_model.set_property({"MULTI_DEVICE_PRIORITIES": "GPU,CPU"}) +# [multi_device] diff --git a/docs/snippets/ov_properties_migration.cpp b/docs/snippets/ov_properties_migration.cpp new file mode 100644 index 00000000000..a982da7b29d --- /dev/null +++ b/docs/snippets/ov_properties_migration.cpp @@ -0,0 +1,95 @@ +#include +#include + +int main_new() { + ov::Core core; + +//! [core_get_ro_property] +// 'auto' is automatically deduced as std::string +// since the type is stored in the property +auto full_device_name = core.get_property("CPU", ov::device::full_name); +//! [core_get_ro_property] + +//! [core_get_rw_property] +// 'auto' is automatically deduced as ov::streams::Num +// since the type is stored in the property +auto num_streams = core.get_property("CPU", ov::streams::num); +//! [core_get_rw_property] + +//! [core_set_property] +core.set_property("CPU", ov::enable_profiling(true)); +//! [core_set_property] + +auto model = core.read_model("sample.xml"); +//! [core_compile_model] +auto compiled_model = core.compile_model(model, "MULTI", + ov::device::priorities("GPU", "CPU"), + ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT), + ov::hint::inference_precision(ov::element::f32)); +//! [core_compile_model] + +//! [compiled_model_set_property] +// turn CPU off for multi-device execution +compiled_model.set_property(ov::device::priorities("GPU")); +//! [compiled_model_set_property] + +{ +//! [compiled_model_get_ro_property] +// 'auto' is deduced to 'uint32_t' +auto nireq = compiled_model.get_property(ov::optimal_number_of_infer_requests); +//! [compiled_model_get_ro_property] +} + +{ +//! [compiled_model_get_rw_property] +ov::hint::PerformanceMode perf_mode = compiled_model.get_property(ov::hint::performance_mode); +//! [compiled_model_get_rw_property] +} + + +return 0; +} + + +int main_old() { + InferenceEngine::Core core; +//! [core_get_metric] +auto full_device_name = core.GetConfig("CPU", METRIC_KEY(FULL_DEVICE_NAME)).as(); +//! [core_get_metric] + +//! [core_get_config] +// a user has to parse std::string after +auto num_streams = core.GetMetric("CPU", CONFIG_KEY(CPU_THROUGHPUT_STREAMS)).as(); +//! [core_get_config] + +//! [core_set_config] +core.SetConfig({ { CONFIG_KEY(PERF_COUNT), CONFIG_VALUE(YES) } }, "CPU"); +//! [core_set_config] + +auto model = core.ReadNetwork("sample.xml"); +//! [core_load_network] +auto exec_network = core.LoadNetwork(model, "MULTI", { + { MULTI_CONFIG_KEY(DEVICE_PRIORITIES), "CPU, GPU" }, + { CONFIG_KEY(PERFORMANCE_HINT), CONFIG_VALUE(THROUGHPUT) }, + { CONFIG_KEY(ENFORCE_BF16), CONFIG_VALUE(NO) } }); +//! [core_load_network] + +//! [executable_network_set_config] +// turn CPU off for multi-device execution +exec_network.SetConfig({ { MULTI_CONFIG_KEY(DEVICE_PRIORITIES), "GPU" } }); +//! [executable_network_set_config] + +{ +//! [executable_network_get_metric] +auto nireq = exec_network.GetMetric(EXEC_NETWORK_METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS)).as(); +//! [executable_network_get_metric] +} + +{ +//! [executable_network_get_config] +std::string perf_model = exec_network.GetConfig(CONFIG_KEY(PERFORMANCE_HINT)).as(); +//! [executable_network_get_config] +} + +return 0; +} diff --git a/docs/snippets/ov_properties_migration.py b/docs/snippets/ov_properties_migration.py new file mode 100644 index 00000000000..a1030ae5c22 --- /dev/null +++ b/docs/snippets/ov_properties_migration.py @@ -0,0 +1,44 @@ +# Copyright (C) 2018-2022 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +# + +from openvino.runtime import Core + + +core = Core() + +# ! [core_set_property] +core.set_property(device_name="CPU", properties={"PERF_COUNT": "YES"}) +# ! [core_set_property] + +model = core.read_model("sample.xml") + +# ! [core_compile_model] +compiled_model = core.compile_model(model=model, device_name="MULTI", config= + { + "MULTI_DEVICE_PRIORITIES": "GPU,CPU", + "PERFORMANCE_HINT": "THROUGHPUT", + "INFERENCE_PRECISION_HINT": "f32" + }) +# ! [core_compile_model] + +# ! [compiled_model_set_property] +# turn CPU off for multi-device execution +compiled_model.set_property(properties={"MULTI_DEVICE_PRIORITIES": "GPU"}) +# ! [compiled_model_set_property] + +# ! [core_get_rw_property] +num_streams = core.get_property("CPU", "NUM_STREAMS") +# ! [core_get_rw_property] + +# ! [core_get_ro_property] +full_device_name = core.get_property("CPU", "FULL_DEVICE_NAME") +# ! [core_get_ro_property] + +# ! [compiled_model_get_rw_property] +perf_mode = compiled_model.get_property("PERFORMANCE_HINT") +# ! [compiled_model_get_rw_property] + +# ! [compiled_model_get_ro_property] +nireq = compiled_model.get_property("OPTIMAL_NUMBER_OF_INFER_REQUESTS") +# ! [compiled_model_get_ro_property] diff --git a/docs/snippets/ov_python_exclusives.py b/docs/snippets/ov_python_exclusives.py new file mode 100644 index 00000000000..ecdb58e7a9b --- /dev/null +++ b/docs/snippets/ov_python_exclusives.py @@ -0,0 +1,133 @@ +# Copyright (C) 2018-2022 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +import numpy as np + +#! [auto_compilation] +import openvino.runtime as ov + +compiled_model = ov.compile_model("model.xml") +#! [auto_compilation] + +#! [properties_example] +core = ov.Core() + +input_a = ov.opset8.parameter([8]) +res = ov.opset8.absolute(input_a) +model = ov.Model(res, [input_a]) +compiled = core.compile_model(model, "CPU") + +print(model.inputs) +print(model.outputs) + +print(compiled.inputs) +print(compiled.outputs) +#! [properties_example] + +#! [tensor_basics] +data_float64 = np.ones(shape=(2,8)) + +tensor = ov.Tensor(data_float64) +assert tensor.element_type == ov.Type.f64 + +data_int32 = np.ones(shape=(2,8), dtype=np.int32) + +tensor = ov.Tensor(data_int32) +assert tensor.element_type == ov.Type.i32 +#! [tensor_basics] + +#! [tensor_shared_mode] +data_to_share = np.ones(shape=(2,8)) + +shared_tensor = ov.Tensor(data_to_share, shared_memory=True) + +# Editing of the numpy array affects Tensor's data +data_to_share[0][2] = 6.0 +assert shared_tensor.data[0][2] == 6.0 + +# Editing of Tensor's data affects the numpy array +shared_tensor.data[0][2] = 0.6 +assert data_to_share[0][2] == 0.6 +#! [tensor_shared_mode] + +#! [tensor_slice_mode] +data_to_share = np.ones(shape=(2,8)) + +# Specify slice of memory and the shape +shared_tensor = ov.Tensor(data_to_share[1][:] , shape=ov.Shape([8])) + +# Editing of the numpy array affects Tensor's data +data_to_share[1][:] = 2 +assert np.array_equal(shared_tensor.data, data_to_share[1][:]) +#! [tensor_slice_mode] + +infer_request = compiled.create_infer_request() +data = np.random.randint(-5, 3 + 1, size=(8)) + +#! [passing_numpy_array] +# Passing inputs data in form of a dictionary +infer_request.infer(inputs={0: data}) +# Passing inputs data in form of a list +infer_request.infer(inputs=[data]) +#! [passing_numpy_array] + +#! [getting_results] +# Get output tensor +results = infer_request.get_output_tensor().data + +# Get tensor with CompiledModel's output node +results = infer_request.get_tensor(compiled.outputs[0]).data + +# Get all results with special helper property +results = list(infer_request.results.values()) +#! [getting_results] + +#! [sync_infer] +# Simple call to InferRequest +results = infer_request.infer(inputs={0: data}) +# Extra feature: calling CompiledModel directly +results = compiled_model(inputs={0: data}) +#! [sync_infer] + +#! [asyncinferqueue] +core = ov.Core() + +# Simple model that adds two inputs together +input_a = ov.opset8.parameter([8]) +input_b = ov.opset8.parameter([8]) +res = ov.opset8.add(input_a, input_b) +model = ov.Model(res, [input_a, input_b]) +compiled = core.compile_model(model, "CPU") + +# Number of InferRequests that AsyncInferQueue holds +jobs = 4 +infer_queue = ov.AsyncInferQueue(compiled, jobs) + +# Create data +data = [np.array([i] * 8, dtype=np.float32) for i in range(jobs)] + +# Run all jobs +for i in range(len(data)): + infer_queue.start_async({0: data[i], 1: data[i]}) +infer_queue.wait_all() +#! [asyncinferqueue] + +#! [asyncinferqueue_access] +results = infer_queue[3].get_output_tensor().data +#! [asyncinferqueue_access] + +#! [asyncinferqueue_set_callback] +data_done = [False for _ in range(jobs)] + +def f(request, userdata): + print(f"Done! Result: {request.get_output_tensor().data}") + data_done[userdata] = True + +infer_queue.set_callback(f) + +for i in range(len(data)): + infer_queue.start_async({0: data[i], 1: data[i]}, userdata=i) +infer_queue.wait_all() + +assert all(data_done) +#! [asyncinferqueue_set_callback] diff --git a/docs/snippets/protecting_model_guide.cpp b/docs/snippets/protecting_model_guide.cpp index bcf6856023f..0f148e76167 100644 --- a/docs/snippets/protecting_model_guide.cpp +++ b/docs/snippets/protecting_model_guide.cpp @@ -1,7 +1,8 @@ -#include #include #include +#include "openvino/runtime/core.hpp" + void decrypt_file(std::ifstream & stream, const std::string & pass, std::vector & result) { @@ -9,24 +10,22 @@ void decrypt_file(std::ifstream & stream, int main() { //! [part0] -std::vector model; -std::vector weights; +std::vector model_data, weights_data; std::string password; // taken from an user std::ifstream model_file("model.xml"), weights_file("model.bin"); // Read model files and decrypt them into temporary memory block -decrypt_file(model_file, password, model); -decrypt_file(weights_file, password, weights); +decrypt_file(model_file, password, model_data); +decrypt_file(weights_file, password, weights_data); //! [part0] //! [part1] -InferenceEngine::Core core; +ov::Core core; // Load model from temporary memory block -std::string strModel(model.begin(), model.end()); -InferenceEngine::CNNNetwork network = core.ReadNetwork(strModel, - InferenceEngine::make_shared_blob({InferenceEngine::Precision::U8, - {weights.size()}, InferenceEngine::C}, weights.data())); +std::string str_model(model_data.begin(), model_data.end()); +auto model = core.read_model(str_model, + ov::Tensor(ov::element::u8, {weights_data.size()}, weights_data.data())); //! [part1] return 0; diff --git a/docs/snippets/src/main.cpp b/docs/snippets/src/main.cpp new file mode 100644 index 00000000000..c600e063865 --- /dev/null +++ b/docs/snippets/src/main.cpp @@ -0,0 +1,69 @@ +// Copyright (C) 2018-2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +//! [include] +#include +//! [include] + +int main() { +//! [part1] +ov::Core core; +//! [part1] + +ov::CompiledModel compiled_model; +{ +//! [part2_1] +ov::CompiledModel compiled_model = core.compile_model("model.xml", "AUTO"); +//! [part2_1] +} +{ +//! [part2_2] +ov::CompiledModel compiled_model = core.compile_model("model.onnx", "AUTO"); +//! [part2_2] +} +{ +//! [part2_3] +ov::CompiledModel compiled_model = core.compile_model("model.pdmodel", "AUTO"); +//! [part2_3] +} +{ +//! [part2_4] +auto create_model = []() { + std::shared_ptr model; + // To construct a model, please follow + // https://docs.openvino.ai/latest/openvino_docs_OV_Runtime_UG_Model_Representation.html + return model; +}; +std::shared_ptr model = create_model(); +compiled_model = core.compile_model(model, "AUTO"); +//! [part2_4] +} + +//! [part3] +ov::InferRequest infer_request = compiled_model.create_infer_request(); +//! [part3] + +void * memory_ptr = nullptr; +//! [part4] +// Get input port for model with one input +auto input_port = compiled_model.input(); +// Create tensor from external memory +ov::Tensor input_tensor(input_port.get_element_type(), input_port.get_shape(), memory_ptr); +// Set input tensor for model with one input +infer_request.set_input_tensor(input_tensor); +//! [part4] + +//! [part5] +infer_request.start_async(); +infer_request.wait(); +//! [part5] + +//! [part6] +// Get output tensor by tensor name +auto output = infer_request.get_tensor("tensor_name"); +const float *output_buffer = output.data(); +/* output_buffer[] - accessing output tensor data */ +//! [part6] +return 0; +} diff --git a/docs/snippets/src/main.py b/docs/snippets/src/main.py new file mode 100644 index 00000000000..a86986d56be --- /dev/null +++ b/docs/snippets/src/main.py @@ -0,0 +1,58 @@ +# Copyright (C) 2018-2022 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +import numpy as np +#! [import] +import openvino.runtime as ov +#! [import] + +#! [part1] +core = ov.Core() +#! [part1] + +#! [part2_1] +compiled_model = core.compile_model("model.xml", "AUTO") +#! [part2_1] +#! [part2_2] +compiled_model = core.compile_model("model.onnx", "AUTO") +#! [part2_2] +#! [part2_3] +compiled_model = core.compile_model("model.pdmodel", "AUTO") +#! [part2_3] +#! [part2_4] +def create_model(): + # This example shows how to create ov::Function + # + # To construct a model, please follow + # https://docs.openvino.ai/latest/openvino_docs_OV_Runtime_UG_Model_Representation.html + data = ov.opset8.parameter([3, 1, 2], ov.Type.f32) + res = ov.opset8.result(data) + return ov.Model([res], [data], "model") + +model = create_model() +compiled_model = core.compile_model(model, "AUTO") +#! [part2_4] + +#! [part3] +infer_request = compiled_model.create_infer_request() +#! [part3] + +memory = np.array([1, 2, 3, 4]) +#! [part4] +# Create tensor from external memory +input_tensor = ov.Tensor(array=memory, shared_memory=True) +# Set input tensor for model with one input +infer_request.set_input_tensor(input_tensor) +#! [part4] + +#! [part5] +infer_request.start_async() +infer_request.wait() +#! [part5] + +#! [part6] +# Get output tensor for model with one output +output = infer_request.get_output_tensor() +output_buffer = output.data +# output_buffer[] - accessing output tensor data +#! [part6] diff --git a/docs/suppress_warnings.txt b/docs/suppress_warnings.txt index 572497bf642..8e9d8db84b0 100644 --- a/docs/suppress_warnings.txt +++ b/docs/suppress_warnings.txt @@ -13,7 +13,7 @@ the name \'.+?\' supplied as the argument warning: while setting up extension conf.py: csv directory not found argument \'.+?\' of command no uniquely matching class member found for -example example was already documented\. ignoring documentation found here\. +example was already documented\. ignoring documentation found here\. documentation for unknown define detected potential recursive class relation between class no matching file member found for @@ -100,3 +100,4 @@ explicit markup ends without a blank line \* keyerror: \* modulenotfounderror unexpected unindent +failed to import object diff --git a/docs/template_extension/CMakeLists.txt b/docs/template_extension/CMakeLists.txt index 46e6c9b2c08..0ca2dded963 100644 --- a/docs/template_extension/CMakeLists.txt +++ b/docs/template_extension/CMakeLists.txt @@ -4,3 +4,7 @@ add_subdirectory(old) add_subdirectory(new) + +# Enable code style check +file(GLOB_RECURSE template_extension_src "${CMAKE_CURRENT_SOURCE_DIR}/new/*.cpp" "${CMAKE_CURRENT_SOURCE_DIR}/new/*.hpp") +add_clang_format_target(openvino_template_extension_clang FOR_SOURCES ${template_extension_src}) diff --git a/docs/template_extension/new/CMakeLists.txt b/docs/template_extension/new/CMakeLists.txt index 32bdeda4ea7..10371e33072 100644 --- a/docs/template_extension/new/CMakeLists.txt +++ b/docs/template_extension/new/CMakeLists.txt @@ -7,16 +7,18 @@ set(CMAKE_CXX_STANDARD 11) set(TARGET_NAME "openvino_template_extension") -find_package(OpenVINO) +find_package(OpenVINO REQUIRED) set(SRC identity.cpp ov_extension.cpp) add_library(${TARGET_NAME} MODULE ${SRC}) target_compile_definitions(${TARGET_NAME} PRIVATE IMPLEMENT_OPENVINO_EXTENSION_API) -target_link_libraries(${TARGET_NAME} PRIVATE openvino::core) -# [cmake:extension] +target_link_libraries(${TARGET_NAME} PRIVATE openvino::runtime) -# Enable code style check -file(GLOB_RECURSE template_extension_src "${CMAKE_CURRENT_SOURCE_DIR}/*.cpp" "${CMAKE_CURRENT_SOURCE_DIR}/*.hpp") -add_clang_format_target(${TARGET_NAME}_clang FOR_SOURCES ${template_extension_src}) +# To map custom operation to framework +if(OpenVINO_Frontend_ONNX_FOUND) + target_link_libraries(${TARGET_NAME} PRIVATE openvino::frontend::onnx) + target_compile_definitions(${TARGET_NAME} PRIVATE OPENVINO_ONNX_FRONTEND_ENABLED) +endif() +# [cmake:extension] diff --git a/docs/template_extension/new/identity.hpp b/docs/template_extension/new/identity.hpp index f31ab239c34..b8c5160014d 100644 --- a/docs/template_extension/new/identity.hpp +++ b/docs/template_extension/new/identity.hpp @@ -4,7 +4,14 @@ #pragma once +//! [op:common_include] #include +//! [op:common_include] +//! [op:frontend_include] +#ifdef OPENVINO_ONNX_FRONTEND_ENABLED +# include +#endif +//! [op:frontend_include] //! [op:header] namespace TemplateExtension { @@ -13,6 +20,10 @@ class Identity : public ov::op::Op { public: OPENVINO_OP("Identity"); +#ifdef OPENVINO_ONNX_FRONTEND_ENABLED + OPENVINO_FRAMEWORK_MAP(onnx) +#endif + Identity() = default; Identity(const ov::Output& arg); void validate_and_infer_types() override; diff --git a/docs/template_extension/new/ov_extension.cpp b/docs/template_extension/new/ov_extension.cpp index 79d414a82e4..d2fa1e35361 100644 --- a/docs/template_extension/new/ov_extension.cpp +++ b/docs/template_extension/new/ov_extension.cpp @@ -7,5 +7,11 @@ #include "identity.hpp" +// clang-format off +//! [ov_extension:entry_point] OPENVINO_CREATE_EXTENSIONS( - std::vector({std::make_shared>()})); + std::vector({ + std::make_shared>() + })); +//! [ov_extension:entry_point] +// clang-format on diff --git a/docs/template_plugin/backend/evaluates_map.cpp b/docs/template_plugin/backend/evaluates_map.cpp index af0d8c37dcb..cb3f1fc9e0d 100644 --- a/docs/template_plugin/backend/evaluates_map.cpp +++ b/docs/template_plugin/backend/evaluates_map.cpp @@ -27,6 +27,7 @@ #include #include #include +#include #include #include #include @@ -40,6 +41,7 @@ #include #include #include +#include #include #include #include @@ -399,6 +401,42 @@ bool evaluate(const shared_ptr& op, return true; } +template +bool evaluate(const shared_ptr& op, const HostTensorVector& outputs, const HostTensorVector& inputs) { + const auto in0_data_ptr = inputs[0]->get_data_ptr(); + const auto in1_data_ptr = inputs[1]->get_data_ptr(); + const auto out_data_ptr = outputs[0]->get_data_ptr(); + const auto in0_shape = inputs[0]->get_shape(); + const auto in1_shape = inputs[1]->get_shape(); + const auto broadcast_spec = op->get_autob(); + runtime::reference::greater::value_type, + typename element_type_traits::value_type>(in0_data_ptr, + in1_data_ptr, + out_data_ptr, + in0_shape, + in1_shape, + broadcast_spec); + return true; +} + +template +bool evaluate(const shared_ptr& op, const HostTensorVector& outputs, const HostTensorVector& inputs) { + const auto in0_data_ptr = inputs[0]->get_data_ptr(); + const auto in1_data_ptr = inputs[1]->get_data_ptr(); + const auto out_data_ptr = outputs[0]->get_data_ptr(); + const auto in0_shape = inputs[0]->get_shape(); + const auto in1_shape = inputs[1]->get_shape(); + const auto broadcast_spec = op->get_autob(); + runtime::reference::equal::value_type, + typename element_type_traits::value_type>(in0_data_ptr, + in1_data_ptr, + out_data_ptr, + in0_shape, + in1_shape, + broadcast_spec); + return true; +} + namespace cum_sum_v0 { template inline void evaluate(const shared_ptr& op, @@ -428,6 +466,145 @@ bool evaluate(const shared_ptr& op, const HostTensorVector& outp return true; } +namespace if_op { +bool call(const HostTensorVector& func_outputs, + const HostTensorVector& func_inputs, + const std::shared_ptr& function) { + // map function params -> HostTensor + std::unordered_map> tensor_map; + size_t input_count = 0; + for (const auto& param : function->get_parameters()) { + for (size_t i = 0; i < param->get_output_size(); ++i) { + descriptor::Tensor* tensor = ¶m->output(i).get_tensor(); + tensor_map.insert({tensor, func_inputs[input_count++]}); + } + } + + std::unordered_map, size_t> results_map; + // map function outputs -> HostTensor + for (size_t output_count = 0; output_count < function->get_results().size(); ++output_count) { + auto output = function->get_results()[output_count]; + results_map[output] = output_count; + } + + // for each ordered op in the graph + for (const auto& op : function->get_ordered_ops()) { + if (op::is_parameter(op)) { + continue; + } + + // get op inputs from map + std::vector> op_inputs; + for (auto input : op->inputs()) { + descriptor::Tensor* tensor = &input.get_tensor(); + op_inputs.push_back(tensor_map.at(tensor)); + } + + // get op outputs from map or create + std::vector> op_outputs; + for (size_t i = 0; i < op->get_output_size(); ++i) { + descriptor::Tensor* tensor = &op->output(i).get_tensor(); + std::shared_ptr host_tensor; + auto it = tensor_map.find(tensor); + if (op::is_output(op)) { + host_tensor = func_outputs[results_map[op]]; + } else if (it == tensor_map.end()) { + host_tensor = std::make_shared(op->output(i)); + tensor_map.insert({tensor, host_tensor}); + } else { + host_tensor = it->second; + } + op_outputs.push_back(host_tensor); + } + op->validate_and_infer_types(); + OPENVINO_SUPPRESS_DEPRECATED_START + if (!op->evaluate(op_outputs, op_inputs)) { + auto evaluates_map = ngraph::runtime::interpreter::get_evaluators_map(); + auto it = evaluates_map.find(op->get_type_info()); + if (!it->second(op, op_outputs, op_inputs)) { + return false; + } + } + OPENVINO_SUPPRESS_DEPRECATED_END + } + return true; +} + +void function(const std::shared_ptr& function, + const HostTensorVector& inputs, + HostTensorVector& outputs) { + const auto& parameters = function->get_parameters(); + const auto& parametersNumber = parameters.size(); + const auto& inputsNumber = inputs.size(); + NGRAPH_CHECK(parametersNumber == inputsNumber, + "Got function (", + function->get_friendly_name(), + ") with ", + parametersNumber, + " parameters, but ", + inputsNumber, + " input blobs"); + + for (const auto& parameter : parameters) { + const auto& parameterIndex = function->get_parameter_index(parameter); + const auto& parameterShape = parameter->get_shape(); + const auto& parameterType = parameter->get_element_type(); + const auto& parameterSize = shape_size(parameterShape) * parameterType.size(); + + const auto& input = inputs[parameterIndex]; + const auto& inputSize = input->get_size_in_bytes(); + NGRAPH_CHECK(parameterSize == inputSize, + "Got parameter (", + parameter->get_friendly_name(), + ") of size ", + parameterSize, + " bytes, but corresponding input with index ", + parameterIndex, + " has ", + inputSize, + " bytes"); + } + + const auto& results = function->get_results(); + outputs.reserve(results.size()); + for (size_t i = 0; i < results.size(); ++i) { + outputs.push_back(std::make_shared()); + } + call(outputs, inputs, function); +} + +void if_reference(const std::vector>& bodies, + const std::vector& out_descs, + const std::vector& input_descs, + const HostTensorVector& out, + const HostTensorVector& args) { + NGRAPH_CHECK(args.size() > 0, "If operation must have input condition value"); + + auto condition_value = args[0]->get_data_ptr()[0]; + auto branch_index = (condition_value) ? op::v8::If::THEN_BODY_INDEX : op::v8::If::ELSE_BODY_INDEX; + HostTensorVector inputs_to_body; + HostTensorVector outs_from_body; + inputs_to_body.resize(input_descs[branch_index].size()); + auto inputs_size = args.size(); + auto output_size = out.size(); + for (const auto& input_desc : input_descs[branch_index]) { + NGRAPH_CHECK(inputs_size > input_desc->m_input_index, + "Incorrect associating! If has not input with id ", + input_desc->m_input_index); + inputs_to_body[input_desc->m_body_parameter_index] = args[input_desc->m_input_index]; + } + function(bodies[branch_index], inputs_to_body, outs_from_body); + for (const auto& out_descr : out_descs[branch_index]) { + NGRAPH_CHECK(output_size > out_descr->m_output_index, + "Incorrect associating! If has not output with id ", + out_descr->m_output_index); + auto res = outs_from_body[out_descr->m_body_value_index]; + out[out_descr->m_output_index]->set_shape(res->get_shape()); + out[out_descr->m_output_index]->write(res->get_data_ptr(), res->get_size_in_bytes()); + } +} +} // namespace if_op + template bool evaluate(const shared_ptr& op, const HostTensorVector& outputs, const HostTensorVector& inputs) { std::vector> bodies; @@ -442,7 +619,11 @@ bool evaluate(const shared_ptr& op, const HostTensorVector& outputs, for (size_t i = 0; i < op->get_output_descriptions_size(); i++) { out_descs.emplace_back(op->get_output_descriptions(i)); } - runtime::reference::if_reference(bodies, out_descs, in_descs, outputs, inputs); + try { + runtime::reference::if_reference(bodies, out_descs, in_descs, outputs, inputs); + } catch (...) { + if_op::if_reference(bodies, out_descs, in_descs, outputs, inputs); + } return true; } @@ -857,7 +1038,7 @@ bool evaluate(const shared_ptr& op, &valid_outputs, info.sort_result_descending); - auto selected_scores_type = (inputs.size() < 4) ? element::f32 : inputs[3]->get_element_type(); + auto selected_scores_type = (outputs.size() < 3) ? element::f32 : outputs[1]->get_element_type(); runtime::reference::nms5_postprocessing(outputs, info.output_type, @@ -3348,6 +3529,24 @@ bool evaluate(const shared_ptr& op, const HostTensorVector& outp return true; } +template +bool evaluate(const shared_ptr& op, const HostTensorVector& outputs, const HostTensorVector& inputs) { + outputs[0]->set_unary(inputs[0]); + void* input = inputs[0]->get_data_ptr(); + outputs[0]->write(input, outputs[0]->get_size_in_bytes()); + return true; +} + +template +bool evaluate(const shared_ptr& op, + const HostTensorVector& outputs, + const HostTensorVector& inputs) { + outputs[0]->set_unary(inputs[0]); + void* input = inputs[0]->get_data_ptr(); + outputs[0]->write(input, outputs[0]->get_size_in_bytes()); + return true; +} + template inline bool evaluate(const shared_ptr& op, const HostTensorVector& outputs, @@ -3430,7 +3629,7 @@ bool evaluate(const shared_ptr& op, template bool evaluate_node(std::shared_ptr node, const HostTensorVector& outputs, const HostTensorVector& inputs) { auto element_type = node->get_output_element_type(0); - if (ov::is_type(node)) + if (ov::is_type(node) || ov::is_type(node)) element_type = node->get_input_element_type(1); switch (element_type) { diff --git a/docs/template_plugin/backend/int_executable.cpp b/docs/template_plugin/backend/int_executable.cpp index b64d9812a40..e84bbe5d780 100644 --- a/docs/template_plugin/backend/int_executable.cpp +++ b/docs/template_plugin/backend/int_executable.cpp @@ -154,8 +154,7 @@ bool runtime::interpreter::INTExecutable::call(const vector(cloned_node->get_input_element_type(0), cloned_node->get_input_shape(0)); - std::vector data(ov::shape_size(cloned_node->get_input_shape(0)), 0); - h_tensor->write(data.data(), data.size() * sizeof(float)); + h_tensor->write(h_tensor->get_data_ptr(), h_tensor->get_size_in_bytes()); variable_context.set_variable_value(variable, std::make_shared(h_tensor)); } } diff --git a/docs/template_plugin/backend/opset_int_tbl.hpp b/docs/template_plugin/backend/opset_int_tbl.hpp index d8edd45d4b9..fabfb5ee2aa 100644 --- a/docs/template_plugin/backend/opset_int_tbl.hpp +++ b/docs/template_plugin/backend/opset_int_tbl.hpp @@ -45,6 +45,8 @@ NGRAPH_OP(ConvertLike, op::v1) NGRAPH_OP(Convolution, ngraph::op::v1) NGRAPH_OP(ConvolutionBackpropData, ngraph::op::v1) NGRAPH_OP(DeformablePSROIPooling, ngraph::op::v1) +NGRAPH_OP(Equal, ngraph::op::v1) +NGRAPH_OP(Greater, ngraph::op::v1) NGRAPH_OP(GroupConvolution, ngraph::op::v1) NGRAPH_OP(GroupConvolutionBackpropData, ngraph::op::v1) NGRAPH_OP(DeformableConvolution, ngraph::op::v1) @@ -63,6 +65,7 @@ NGRAPH_OP(Reshape, op::v1) NGRAPH_OP(Select, op::v1) NGRAPH_OP(GatherTree, op::v1) +NGRAPH_OP(Assign, op::v3) NGRAPH_OP(Bucketize, op::v3) NGRAPH_OP(EmbeddingBagOffsetsSum, ngraph::op::v3) NGRAPH_OP(EmbeddingBagPackedSum, ngraph::op::v3) @@ -71,6 +74,7 @@ NGRAPH_OP(EmbeddingSegmentsSum, ngraph::op::v3) NGRAPH_OP(GRUCell, ngraph::op::v3) NGRAPH_OP(NonMaxSuppression, op::v3) NGRAPH_OP(NonZero, op::v3) +NGRAPH_OP(ReadValue, op::v3) NGRAPH_OP(ScatterNDUpdate, op::v3) NGRAPH_OP(ShapeOf, op::v3) diff --git a/docs/template_plugin/src/template_plugin.cpp b/docs/template_plugin/src/template_plugin.cpp index acd4a809664..76c3dbc2182 100644 --- a/docs/template_plugin/src/template_plugin.cpp +++ b/docs/template_plugin/src/template_plugin.cpp @@ -82,8 +82,8 @@ std::shared_ptr TransformNetwork(const std::shared_ptr(); - passManager.register_pass(); + passManager.register_pass(); + passManager.register_pass(); // Register any other transformations // .. diff --git a/docs/template_plugin/src/transformations/preprocessing/mean_image_or_value.cpp b/docs/template_plugin/src/transformations/preprocessing/mean_image_or_value.cpp index 32257c85624..4a3938fbc53 100644 --- a/docs/template_plugin/src/transformations/preprocessing/mean_image_or_value.cpp +++ b/docs/template_plugin/src/transformations/preprocessing/mean_image_or_value.cpp @@ -10,8 +10,6 @@ using namespace ngraph; -NGRAPH_RTTI_DEFINITION(ngraph::pass::AddMeanSubtract, "AddMeanSubtract", 0); - ngraph::pass::AddMeanSubtract::AddMeanSubtract(const MeanMap& inputInfoMap) { // RUN_ON_FUNCTION_SCOPE(AddMeanSubtract); auto label = ngraph::pattern::wrap_type(); diff --git a/docs/template_plugin/src/transformations/preprocessing/mean_image_or_value.hpp b/docs/template_plugin/src/transformations/preprocessing/mean_image_or_value.hpp index 0eb289f8dc5..833d05a7903 100644 --- a/docs/template_plugin/src/transformations/preprocessing/mean_image_or_value.hpp +++ b/docs/template_plugin/src/transformations/preprocessing/mean_image_or_value.hpp @@ -27,6 +27,6 @@ class ngraph::pass::AddMeanSubtract : public ngraph::pass::MatcherPass { public: using MeanMap = std::map>; - NGRAPH_RTTI_DECLARATION; + OPENVINO_RTTI("AddMeanSubtract", "0"); explicit AddMeanSubtract(const MeanMap& inputInfoMap); }; diff --git a/docs/template_plugin/src/transformations/preprocessing/preprocessing.cpp b/docs/template_plugin/src/transformations/preprocessing/preprocessing.cpp index 2c644cbc808..fca5d56477f 100644 --- a/docs/template_plugin/src/transformations/preprocessing/preprocessing.cpp +++ b/docs/template_plugin/src/transformations/preprocessing/preprocessing.cpp @@ -10,8 +10,6 @@ #include "transformations/preprocessing/mean_image_or_value.hpp" #include "transformations/preprocessing/std_scale.hpp" -NGRAPH_RTTI_DEFINITION(ngraph::pass::AddPreprocessing, "AddPreprocessing", 0); - ngraph::pass::AddPreprocessing::AddPreprocessing(const InferenceEngine::InputsDataMap& inputInfoMap) : m_inputInfoMap(inputInfoMap) {} diff --git a/docs/template_plugin/src/transformations/preprocessing/preprocessing.hpp b/docs/template_plugin/src/transformations/preprocessing/preprocessing.hpp index f78a1bdbfbb..2488bade42d 100644 --- a/docs/template_plugin/src/transformations/preprocessing/preprocessing.hpp +++ b/docs/template_plugin/src/transformations/preprocessing/preprocessing.hpp @@ -29,7 +29,7 @@ class ngraph::pass::AddPreprocessing : public ngraph::pass::FunctionPass { const InferenceEngine::InputsDataMap& m_inputInfoMap; public: - NGRAPH_RTTI_DECLARATION; + OPENVINO_RTTI("AddPreprocessing", "0"); explicit AddPreprocessing(const InferenceEngine::InputsDataMap& inputInfoMap); bool run_on_model(const std::shared_ptr& m) override; diff --git a/docs/template_plugin/src/transformations/preprocessing/std_scale.cpp b/docs/template_plugin/src/transformations/preprocessing/std_scale.cpp index d51e7693b3e..efc21983a09 100644 --- a/docs/template_plugin/src/transformations/preprocessing/std_scale.cpp +++ b/docs/template_plugin/src/transformations/preprocessing/std_scale.cpp @@ -10,8 +10,6 @@ using namespace ngraph; -NGRAPH_RTTI_DEFINITION(ngraph::pass::AddStdScale, "AddStdScale", 0); - ngraph::pass::AddStdScale::AddStdScale(const ScaleMap& inputInfoMap) { // RUN_ON_FUNCTION_SCOPE(AddStdScale); auto label = ngraph::pattern::wrap_type(); diff --git a/docs/template_plugin/src/transformations/preprocessing/std_scale.hpp b/docs/template_plugin/src/transformations/preprocessing/std_scale.hpp index f2a80086795..f877724613a 100644 --- a/docs/template_plugin/src/transformations/preprocessing/std_scale.hpp +++ b/docs/template_plugin/src/transformations/preprocessing/std_scale.hpp @@ -27,6 +27,6 @@ class ngraph::pass::AddStdScale : public ngraph::pass::MatcherPass { public: using ScaleMap = std::map>; - NGRAPH_RTTI_DECLARATION; + OPENVINO_RTTI("AddStdScale", "0"); explicit AddStdScale(const ScaleMap& inputInfoMap); }; diff --git a/docs/template_plugin/src/transformations/template_function_transformation.hpp b/docs/template_plugin/src/transformations/template_function_transformation.hpp deleted file mode 100644 index 68641470730..00000000000 --- a/docs/template_plugin/src/transformations/template_function_transformation.hpp +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (C) 2018-2022 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#pragma once - -#include - -namespace ngraph { -namespace pass { - -class MyFunctionTransformation; - -} // namespace pass -} // namespace ngraph - -// ! [function_pass:template_transformation_hpp] -// template_function_transformation.hpp -class ngraph::pass::MyFunctionTransformation : public ngraph::pass::FunctionPass { -public: - NGRAPH_RTTI_DECLARATION; - bool run_on_model(const std::shared_ptr& f) override; -}; -// ! [function_pass:template_transformation_hpp] diff --git a/docs/template_plugin/src/transformations/template_function_transformation.cpp b/docs/template_plugin/src/transformations/template_model_transformation.cpp similarity index 72% rename from docs/template_plugin/src/transformations/template_function_transformation.cpp rename to docs/template_plugin/src/transformations/template_model_transformation.cpp index 669b5c811e2..dd6e56b1c42 100644 --- a/docs/template_plugin/src/transformations/template_function_transformation.cpp +++ b/docs/template_plugin/src/transformations/template_model_transformation.cpp @@ -2,18 +2,15 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "template_function_transformation.hpp" +#include "template_model_transformation.hpp" -#include +#include "openvino/cc/pass/itt.hpp" -using namespace ngraph; - -// ! [function_pass:template_transformation_cpp] +// ! [model_pass:template_transformation_cpp] // template_function_transformation.cpp -NGRAPH_RTTI_DEFINITION(ngraph::pass::MyFunctionTransformation, "MyFunctionTransformation", 0); -bool pass::MyFunctionTransformation::run_on_model(const std::shared_ptr& f) { - RUN_ON_FUNCTION_SCOPE(MyFunctionTransformation); +bool ov::pass::MyModelTransformation::run_on_model(const std::shared_ptr& f) { + RUN_ON_MODEL_SCOPE(MyModelTransformation); // Example transformation code NodeVector nodes; @@ -40,4 +37,4 @@ bool pass::MyFunctionTransformation::run_on_model(const std::shared_ptr& f) override; +}; +// ! [model_pass:template_transformation_hpp] diff --git a/docs/template_plugin/src/transformations/template_pattern_transformation.cpp b/docs/template_plugin/src/transformations/template_pattern_transformation.cpp index 7169d5946fc..7c451be5317 100644 --- a/docs/template_plugin/src/transformations/template_pattern_transformation.cpp +++ b/docs/template_plugin/src/transformations/template_pattern_transformation.cpp @@ -4,154 +4,147 @@ #include "transformations/template_pattern_transformation.hpp" -#include -#include -#include -#include -#include - -#include "transformations/template_function_transformation.hpp" - -using namespace ngraph; +#include "openvino/cc/pass/itt.hpp" +#include "openvino/core/rt_info.hpp" +#include "openvino/opsets/opset3.hpp" +#include "openvino/pass/manager.hpp" +#include "openvino/pass/pattern/op/wrap_type.hpp" +#include "transformations/template_model_transformation.hpp" // ! [graph_rewrite:template_transformation_cpp] // template_pattern_transformation.cpp -NGRAPH_RTTI_DEFINITION(ngraph::pass::DecomposeDivideMatcher, "DecomposeDivideMatcher", 0); - -ngraph::pass::DecomposeDivideMatcher::DecomposeDivideMatcher() { +ov::pass::DecomposeDivideMatcher::DecomposeDivideMatcher() { MATCHER_SCOPE(DecomposeDivideMatcher); // Pattern example auto input0 = pattern::any_input(); auto input1 = pattern::any_input(); - auto div = std::make_shared(input0, input1); + auto div = std::make_shared(input0, input1); - ngraph::matcher_pass_callback callback = [](pattern::Matcher& m) { - auto div = std::dynamic_pointer_cast(m.get_match_root()); + ov::matcher_pass_callback callback = [](pattern::Matcher& m) { + auto div = std::dynamic_pointer_cast(m.get_match_root()); // We can not apply this transformation in case with integer input data type if (!div || div->input(0).get_element_type().is_integral()) { return false; } // Decompose Divide into Multiply with Power operations - auto pow = std::make_shared( + auto pow = std::make_shared( div->input_value(1), opset3::Constant::create(div->get_input_element_type(1), Shape{1}, {-1})); - auto mul = std::make_shared(div->input_value(0), pow); + auto mul = std::make_shared(div->input_value(0), pow); // Save original name to last operation in replacement sub-graph mul->set_friendly_name(div->get_friendly_name()); // Copy runtime info attributes to newly created operation - ngraph::copy_runtime_info(div, {pow, mul}); + ov::copy_runtime_info(div, {pow, mul}); // Replace Divide operation with Multiply - ngraph::replace_node(div, mul); + ov::replace_node(div, mul); // Return true as the root node was changed return true; }; // Register pattern with Divide operation as a pattern root node - auto m = std::make_shared(div, "ConvertDivide"); + auto m = std::make_shared(div, "ConvertDivide"); // Register Matcher register_matcher(m, callback); } // ! [graph_rewrite:template_transformation_cpp] // ! [matcher_pass:relu_fusion] -NGRAPH_RTTI_DEFINITION(ngraph::pass::ReluReluFusionMatcher, "ReluReluFusionMatcher", 0); - -ngraph::pass::ReluReluFusionMatcher::ReluReluFusionMatcher() { +ov::pass::ReluReluFusionMatcher::ReluReluFusionMatcher() { MATCHER_SCOPE(ReluReluFusionMatcher); - auto m_relu1 = ngraph::pattern::wrap_type(pattern::consumers_count(1)); - auto m_relu2 = ngraph::pattern::wrap_type({m_relu1}); + auto m_relu1 = ov::pass::pattern::wrap_type(pattern::consumers_count(1)); + auto m_relu2 = ov::pass::pattern::wrap_type({m_relu1}); - ngraph::matcher_pass_callback callback = [=](pattern::Matcher& m) { + ov::matcher_pass_callback callback = [=](pattern::Matcher& m) { // Map that helps to connect labels with matched outputs auto& node_to_output = m.get_pattern_value_map(); // Create new Relu operation and add register it for additional execution auto new_relu = - register_new_node(node_to_output.at(m_relu1).get_node_shared_ptr()->input_value(0)); + register_new_node(node_to_output.at(m_relu1).get_node_shared_ptr()->input_value(0)); // Copy runtime info attributes to newly created operation - ngraph::copy_runtime_info(m.get_matched_nodes(), new_relu); + ov::copy_runtime_info(m.get_matched_nodes(), new_relu); // Save last Relu name to new Relu operation new_relu->set_friendly_name(m.get_match_root()->get_friendly_name()); // Replace Relu->Relu with Relu - ngraph::replace_node(m.get_match_root(), new_relu); + ov::replace_node(m.get_match_root(), new_relu); // Return true as the root node was changed return true; }; // Register pattern with Relu operation as a pattern root node - auto m = std::make_shared(m_relu2, "ReluReluFusion"); + auto m = std::make_shared(m_relu2, "ReluReluFusion"); // Register Matcher register_matcher(m, callback); } // ! [matcher_pass:relu_fusion] -void run_matcher_on_node(std::shared_ptr node) { +void run_matcher_on_node(std::shared_ptr node) { // ! [matcher_pass:run_on_node] - if (ngraph::pass::DecomposeDivideMatcher().apply(node)) { + if (ov::pass::DecomposeDivideMatcher().apply(node)) { // successful execution (root node was replaced) } // ! [matcher_pass:run_on_node] } -void run_matcher_with_manager(std::shared_ptr f) { +void run_matcher_with_manager(std::shared_ptr f) { // ! [matcher_pass:manager] // Two matchers will run independently (two independent graph traversals) // pass::Manager automatically creates GraphRewrite container for each MatcherPass - pass::Manager manager; - manager.register_pass(); - manager.register_pass(); + ov::pass::Manager manager; + manager.register_pass(); + manager.register_pass(); manager.run_passes(f); // ! [matcher_pass:manager] } -void run_matcher_with_manager2(std::shared_ptr f) { +void run_matcher_with_manager2(std::shared_ptr f) { // ! [matcher_pass:manager2] // Register anchor GraphRewrite pass inside manager that will execute two matchers simultaneously - pass::Manager manager; - auto anchor = manager.register_pass(); - anchor->add_matcher(); - anchor->add_matcher(); + ov::pass::Manager manager; + auto anchor = manager.register_pass(); + anchor->add_matcher(); + anchor->add_matcher(); manager.run_passes(f); // ! [matcher_pass:manager2] } -void run_matcher_with_manager3(std::shared_ptr f) { +void run_matcher_with_manager3(std::shared_ptr f) { // ! [matcher_pass:manager3] - pass::Manager manager; - manager.register_pass(); + ov::pass::Manager manager; + manager.register_pass(); // Two matchers will run independently (two independent graph traversals) // pass::Manager automatically creates GraphRewrite container for each MatcherPass - manager.register_pass(); - manager.register_pass(); + manager.register_pass(); + manager.register_pass(); manager.run_passes(f); // ! [matcher_pass:manager3] } -void run_matcher_with_gr(std::shared_ptr f) { +void run_matcher_with_gr(std::shared_ptr f) { // ! [matcher_pass:graph_rewrite] // Two matcher passes will run simultaneously in a single graph traversal - ngraph::pass::GraphRewrite pass; - pass.add_matcher(); - pass.add_matcher(); + ov::pass::GraphRewrite pass; + pass.add_matcher(); + pass.add_matcher(); pass.run_on_model(f); // ! [matcher_pass:graph_rewrite] } // ! [manual_constant_folding] template -Output eltwise_fold(const Output& input0, const Output& input1) { +ov::Output eltwise_fold(const ov::Output& input0, const ov::Output& input1) { auto eltwise = std::make_shared(input0, input1); - OutputVector output(eltwise->get_output_size()); + ov::OutputVector output(eltwise->get_output_size()); // If constant folding wasn't successful return eltwise output if (!eltwise->constant_fold(output, {input0, input1})) { return eltwise->output(0); diff --git a/docs/template_plugin/src/transformations/template_pattern_transformation.hpp b/docs/template_plugin/src/transformations/template_pattern_transformation.hpp index 9329ed92ead..5a16133b451 100644 --- a/docs/template_plugin/src/transformations/template_pattern_transformation.hpp +++ b/docs/template_plugin/src/transformations/template_pattern_transformation.hpp @@ -4,16 +4,16 @@ #pragma once -#include +#include "openvino/pass/graph_rewrite.hpp" -namespace ngraph { +namespace ov { namespace pass { class DecomposeDivideMatcher; class ReluReluFusionMatcher; } // namespace pass -} // namespace ngraph +} // namespace ov // ! [graph_rewrite:template_transformation_hpp] // transformations/template_pattern_transformation.hpp @@ -21,15 +21,15 @@ class ReluReluFusionMatcher; * @ingroup ie_transformation_common_api * @brief Add transformation description. */ -class ngraph::pass::DecomposeDivideMatcher : public ngraph::pass::MatcherPass { +class ov::pass::DecomposeDivideMatcher : public ov::pass::MatcherPass { public: - NGRAPH_RTTI_DECLARATION; + OPENVINO_RTTI("DecomposeDivideMatcher", "0"); DecomposeDivideMatcher(); }; // ! [graph_rewrite:template_transformation_hpp] -class ngraph::pass::ReluReluFusionMatcher : public ngraph::pass::MatcherPass { +class ov::pass::ReluReluFusionMatcher : public ov::pass::MatcherPass { public: - NGRAPH_RTTI_DECLARATION; + OPENVINO_RTTI("ReluReluFusionMatcher", "0"); ReluReluFusionMatcher(); }; diff --git a/docs/template_plugin/tests/functional/op_reference/memory.cpp b/docs/template_plugin/tests/functional/op_reference/memory.cpp new file mode 100644 index 00000000000..7610d5d9e23 --- /dev/null +++ b/docs/template_plugin/tests/functional/op_reference/memory.cpp @@ -0,0 +1,199 @@ +// Copyright (C) 2018-2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include + +#include "base_reference_test.hpp" +#include "openvino/op/constant.hpp" +#include "openvino/op/read_value.hpp" +#include "openvino/op/util/variable.hpp" + +using namespace ov; +using namespace reference_tests; + +namespace { + +struct ReadValueAssignParams { + template + ReadValueAssignParams(const Shape& input_shape, + const Shape& output_shape, + const element::Type& input_type, + const element::Type& ouput_type, + const std::vector& input_values, + const std::vector& output_values, + const std::string& variable_id) + : m_input_shape(input_shape), + m_output_shape(output_shape), + m_input_type(input_type), + m_output_type(ouput_type), + m_input_data(CreateTensor(input_type, input_values)), + m_expected_data(CreateTensor(ouput_type, output_values)), + m_variable_id(variable_id) {} + Shape m_input_shape; + Shape m_output_shape; + element::Type m_input_type; + element::Type m_output_type; + runtime::Tensor m_input_data; + runtime::Tensor m_expected_data; + std::string m_variable_id; +}; + +class ReferenceReadValueAssignV3LayerTest : public testing::TestWithParam, + public CommonReferenceTest { +public: + void SetUp() override { + auto params = GetParam(); + function = CreateFunction(params.m_input_shape, params.m_input_type, params.m_variable_id); + inputData = {params.m_input_data}; + refOutData = {params.m_expected_data}; + } + + static std::string getTestCaseName(const testing::TestParamInfo& obj) { + auto params = obj.param; + std::ostringstream result; + result << "shape=" << params.m_input_shape << "_"; + result << "iType=" << params.m_input_type << "_"; + result << "shape=" << params.m_output_shape << "_"; + result << "oType=" << params.m_output_type; + return result.str(); + } + +private: + static std::shared_ptr CreateFunction(const Shape& input_shape, + const element::Type& input_type, + const std::string variable_id) { + auto in = std::make_shared(input_type, input_shape); + auto read_value = std::make_shared(in, variable_id); + auto assign = std::make_shared(read_value, variable_id); + return std::make_shared(OutputVector{assign}, ParameterVector{in}); + } +}; + +class ReferenceReadValueAssignV6LayerTest : public testing::TestWithParam, + public CommonReferenceTest { +public: + void SetUp() override { + auto params = GetParam(); + function = CreateFunction(params.m_input_shape, params.m_input_type, params.m_variable_id); + inputData = {params.m_input_data}; + refOutData = {params.m_expected_data}; + } + + static std::string getTestCaseName(const testing::TestParamInfo& obj) { + auto params = obj.param; + std::ostringstream result; + result << "shape=" << params.m_input_shape << "_"; + result << "iType=" << params.m_input_type << "_"; + result << "shape=" << params.m_output_shape << "_"; + result << "oType=" << params.m_output_type; + return result.str(); + } + +private: + static std::shared_ptr CreateFunction(const Shape& input_shape, + const element::Type& input_type, + const std::string variable_id) { + auto in = std::make_shared(input_type, input_shape); + auto variable = std::make_shared( + op::util::VariableInfo{PartialShape::dynamic(), element::dynamic, variable_id}); + auto assign = std::make_shared(in, variable); + auto read_value = std::make_shared(assign, variable); + return std::make_shared(OutputVector{read_value}, + ParameterVector{in}, + op::util::VariableVector{variable}); + } +}; + +TEST_P(ReferenceReadValueAssignV3LayerTest, ReadValueAssignWithHardcodedRefs) { + Exec(); +} + +TEST_P(ReferenceReadValueAssignV6LayerTest, ReadValueAssignWithHardcodedRefs) { + Exec(); +} + +template +std::vector generateParamsForReadValueAssign() { + using T = typename element_type_traits::value_type; + + std::vector params{ + ReadValueAssignParams(ov::Shape{1}, ov::Shape{1}, IN_ET, IN_ET, std::vector{1}, std::vector{1}, "v0"), + ReadValueAssignParams(ov::Shape{2, 2}, + ov::Shape{2, 2}, + IN_ET, + IN_ET, + std::vector{1, 2, 3, 4}, + std::vector{1, 2, 3, 4}, + "v0"), + ReadValueAssignParams(ov::Shape{1, 2, 3}, + ov::Shape{1, 2, 3}, + IN_ET, + IN_ET, + std::vector{1, 2, 3, 4, 5, 6}, + std::vector{1, 2, 3, 4, 5, 6}, + "v0")}; + return params; +} + +template +std::vector generateParamsForReadValueAssignBoolean() { + using T = typename element_type_traits::value_type; + + std::vector params{ + ReadValueAssignParams(ov::Shape{1}, ov::Shape{1}, IN_ET, IN_ET, std::vector{true}, std::vector{true}, "v0"), + ReadValueAssignParams(ov::Shape{2, 2}, + ov::Shape{2, 2}, + IN_ET, + IN_ET, + std::vector{true, true, false, false}, + std::vector{true, true, false, false}, + "v0"), + ReadValueAssignParams(ov::Shape{1, 2, 3}, + ov::Shape{1, 2, 3}, + IN_ET, + IN_ET, + std::vector{true, false, true, false, true, false}, + std::vector{true, false, true, false, true, false}, + "v0")}; + return params; +} + +std::vector generateCombinedParamsForReadValueAssign() { + const std::vector> allTypeParams{ + generateParamsForReadValueAssign(), + generateParamsForReadValueAssign(), + generateParamsForReadValueAssign(), + generateParamsForReadValueAssign(), + generateParamsForReadValueAssign(), + generateParamsForReadValueAssign(), + generateParamsForReadValueAssign(), + generateParamsForReadValueAssign(), + generateParamsForReadValueAssign(), + generateParamsForReadValueAssign(), + generateParamsForReadValueAssign(), + generateParamsForReadValueAssign(), + generateParamsForReadValueAssign(), + generateParamsForReadValueAssign(), + generateParamsForReadValueAssignBoolean()}; + + std::vector combinedParams; + + for (const auto& params : allTypeParams) { + combinedParams.insert(combinedParams.end(), params.begin(), params.end()); + } + + return combinedParams; +} + +INSTANTIATE_TEST_SUITE_P(smoke_ReadValue_Assign_With_Hardcoded_Refs, + ReferenceReadValueAssignV3LayerTest, + ::testing::ValuesIn(generateCombinedParamsForReadValueAssign()), + ReferenceReadValueAssignV3LayerTest::getTestCaseName); + +INSTANTIATE_TEST_SUITE_P(smoke_ReadValue_Assign_With_Hardcoded_Refs, + ReferenceReadValueAssignV6LayerTest, + ::testing::ValuesIn(generateCombinedParamsForReadValueAssign()), + ReferenceReadValueAssignV6LayerTest::getTestCaseName); + +} // namespace diff --git a/docs/template_plugin/tests/functional/op_reference/roi_align.cpp b/docs/template_plugin/tests/functional/op_reference/roi_align.cpp new file mode 100644 index 00000000000..4ae9aeff19a --- /dev/null +++ b/docs/template_plugin/tests/functional/op_reference/roi_align.cpp @@ -0,0 +1,178 @@ +// Copyright (C) 2018-2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include + +#include "openvino/opsets/opset5.hpp" +#include "openvino/opsets/opset4.hpp" +#include "openvino/opsets/opset3.hpp" +#include "openvino/opsets/opset1.hpp" +#include "base_reference_test.hpp" + +using namespace reference_tests; +using namespace ov; + +namespace { +struct ROIAlignParams { + template + ROIAlignParams(const PartialShape& pShape, const element::Type& iType, const std::vector& iValues, + const reference_tests::Tensor& expectedFeatureMap, + const reference_tests::Tensor& coords, const reference_tests::Tensor& roiIdx, + const int32_t pooledH, const int32_t pooledW, + const float spatialScale, const int32_t poolingRatio, const std::string& poolingMode, + const std::string& testcaseName) + : pShape(pShape), + iType(iType), + featureMap(CreateTensor(iType, iValues)), + expectedFeatureMap(expectedFeatureMap), + coords(coords), + roiIdx(roiIdx), + pooledH(pooledH), + pooledW(pooledW), + spatialScale(spatialScale), + poolingRatio(poolingRatio), + poolingMode(poolingMode), + testcaseName(testcaseName) {} + + PartialShape pShape; + element::Type iType; + ov::Tensor featureMap; + reference_tests::Tensor expectedFeatureMap; + reference_tests::Tensor coords; + reference_tests::Tensor roiIdx; + int32_t pooledH; + int32_t pooledW; + float spatialScale; + int32_t poolingRatio; + std::string poolingMode; + std::string testcaseName; +}; + +class ReferenceROIAlignTest : public testing::TestWithParam, public CommonReferenceTest { +public: + void SetUp() override { + auto params = GetParam(); + function = CreateFunction(params); + inputData = {params.featureMap}; + refOutData = {params.expectedFeatureMap.data}; + } + + static std::string getTestCaseName(const testing::TestParamInfo& obj) { + auto param = obj.param; + std::ostringstream result; + result << "iType=" << param.iType; + result << "_pShape=" << param.pShape; + result << "_efType=" << param.expectedFeatureMap.type; + result << "_efShape=" << param.expectedFeatureMap.shape; + result << "_cType=" << param.coords.type; + result << "_cShape=" << param.coords.shape; + result << "_rType=" << param.roiIdx.type; + result << "_rShape=" << param.roiIdx.shape; + result << "_pooledH=" << param.pooledH; + result << "_pooledW=" << param.pooledW; + result << "_spatialScale=" << param.spatialScale; + result << "_poolingRatio=" << param.poolingRatio; + result << "_poolingMode=" << param.poolingMode; + if (param.testcaseName != "") { + result << "_=" << param.testcaseName; + } + return result.str(); + } + +private: + static std::shared_ptr CreateFunction(const ROIAlignParams& params) { + const auto featureMap = std::make_shared(params.iType, params.pShape); + const auto coords = std::make_shared(params.coords.type, params.coords.shape, params.coords.data.data()); + const auto roisIdx = std::make_shared(params.roiIdx.type, params.roiIdx.shape, params.roiIdx.data.data()); + const auto roi_align = std::make_shared(featureMap, + coords, + roisIdx, + params.pooledH, + params.pooledW, + params.poolingRatio, + params.spatialScale, + params.poolingMode); + auto f = std::make_shared(NodeVector{roi_align}, ParameterVector{featureMap}); + return f; + } +}; + +TEST_P(ReferenceROIAlignTest, CompareWithRefs) { + Exec(); +} + +template +std::vector generateParams() { + using T = typename element_type_traits::value_type; + using T_IND = typename element_type_traits::value_type; + std::vector params { + ROIAlignParams(PartialShape{2, 1, 8, 8}, ET, + std::vector{0, 1, 8, 5, 5, 2, 0, 7, 7, 10, 4, 5, 9, 0, 0, 5, + 7, 0, 4, 0, 4, 7, 6, 10, 9, 5, 1, 7, 4, 7, 10, 8, + 2, 0, 8, 3, 6, 8, 10, 4, 2, 10, 7, 8, 7, 0, 6, 9, + 2, 4, 8, 5, 2, 3, 3, 1, 5, 9, 10, 0, 9, 5, 5, 3, + 10, 5, 2, 0, 10, 0, 5, 4, 3, 10, 5, 5, 10, 0, 8, 8, + 9, 1, 0, 7, 9, 6, 8, 7, 10, 9, 2, 3, 3, 5, 6, 9, + 4, 9, 2, 4, 5, 5, 3, 1, 1, 6, 8, 0, 5, 5, 10, 8, + 6, 9, 6, 9, 1, 2, 7, 1, 1, 3, 0, 4, 0, 7, 10, 2}, + reference_tests::Tensor(ET, {2, 1, 2, 2}, std::vector{3, 3.75, 4.75, 5, 3, 5.5, 2.75, 3.75}), + reference_tests::Tensor(ET, {2, 4}, std::vector{2, 2, 4, 4, 2, 2, 4, 4}), + reference_tests::Tensor(ET_IND, {2}, std::vector{0, 1}), + 2, 2, 1, 2, "avg", "roi_align_avg"), + + ROIAlignParams(PartialShape{2, 1, 8, 8}, ET, + std::vector{0, 1, 8, 5, 5, 2, 0, 7, 7, 10, 4, 5, 9, 0, 0, 5, + 7, 0, 4, 0, 4, 7, 6, 10, 9, 5, 1, 7, 4, 7, 10, 8, + 2, 0, 8, 3, 6, 8, 10, 4, 2, 10, 7, 8, 7, 0, 6, 9, + 2, 4, 8, 5, 2, 3, 3, 1, 5, 9, 10, 0, 9, 5, 5, 3, + 10, 5, 2, 0, 10, 0, 5, 4, 3, 10, 5, 5, 10, 0, 8, 8, + 9, 1, 0, 7, 9, 6, 8, 7, 10, 9, 2, 3, 3, 5, 6, 9, + 4, 9, 2, 4, 5, 5, 3, 1, 1, 6, 8, 0, 5, 5, 10, 8, + 6, 9, 6, 9, 1, 2, 7, 1, 1, 3, 0, 4, 0, 7, 10, 2}, + reference_tests::Tensor(ET, {2, 1, 2, 2}, std::vector{4.375, 4.9375, 5.6875, 5.625, 4.625, 7.125, 3.3125, 4.3125}), + reference_tests::Tensor(ET, {2, 4}, std::vector{2, 2, 4, 4, 2, 2, 4, 4}), + reference_tests::Tensor(ET_IND, {2}, std::vector{0, 1}), + 2, 2, 1, 2, "max", "roi_align_max"), + }; + return params; +} + +std::vector generateCombinedParams() { + const std::vector> generatedParams { + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + generateParams(), + }; + std::vector combinedParams; + + for (const auto& params : generatedParams) { + combinedParams.insert(combinedParams.end(), params.begin(), params.end()); + } + return combinedParams; +} + +INSTANTIATE_TEST_SUITE_P(smoke_ROIAlign_With_Hardcoded_Refs, ReferenceROIAlignTest, + testing::ValuesIn(generateCombinedParams()), ReferenceROIAlignTest::getTestCaseName); +} // namespace diff --git a/docs/template_plugin/tests/functional/shared_tests_instances/behavior/ov_executable_network/properties.cpp b/docs/template_plugin/tests/functional/shared_tests_instances/behavior/ov_executable_network/properties.cpp index 8a7e41278f8..10ba97613de 100644 --- a/docs/template_plugin/tests/functional/shared_tests_instances/behavior/ov_executable_network/properties.cpp +++ b/docs/template_plugin/tests/functional/shared_tests_instances/behavior/ov_executable_network/properties.cpp @@ -29,6 +29,8 @@ const std::vector auto_inproperties = { const std::vector auto_batch_inproperties = { {ov::device::id("UNSUPPORTED_DEVICE_ID_STRING")}, + {{CONFIG_KEY(AUTO_BATCH_DEVICE_CONFIG) , std::string(CommonTestUtils::DEVICE_TEMPLATE) + "(4)"}, + {ov::auto_batch_timeout(-1)}}, }; INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, OVCompiledModelPropertiesIncorrectTests, @@ -89,9 +91,9 @@ const std::vector multi_properties = { }; const std::vector auto_batch_properties = { - {{CONFIG_KEY(AUTO_BATCH_DEVICE_CONFIG) , CommonTestUtils::DEVICE_TEMPLATE}}, - {{CONFIG_KEY(AUTO_BATCH_DEVICE_CONFIG) , CommonTestUtils::DEVICE_TEMPLATE}, - {CONFIG_KEY(AUTO_BATCH_TIMEOUT) , "1"}}, + {{CONFIG_KEY(AUTO_BATCH_DEVICE_CONFIG) , std::string(CommonTestUtils::DEVICE_TEMPLATE) + "(4)"}}, + {{CONFIG_KEY(AUTO_BATCH_DEVICE_CONFIG) , std::string(CommonTestUtils::DEVICE_TEMPLATE) + "(4)"}, {CONFIG_KEY(AUTO_BATCH_TIMEOUT) , "1"}}, + {{CONFIG_KEY(AUTO_BATCH_DEVICE_CONFIG) , std::string(CommonTestUtils::DEVICE_TEMPLATE) + "(4)"}, {ov::auto_batch_timeout(10)}}, }; INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, OVCompiledModelPropertiesTests, diff --git a/docs/template_plugin/tests/functional/shared_tests_instances/behavior/ov_plugin/caching_tests.cpp b/docs/template_plugin/tests/functional/shared_tests_instances/behavior/ov_plugin/caching_tests.cpp new file mode 100644 index 00000000000..636631e3e19 --- /dev/null +++ b/docs/template_plugin/tests/functional/shared_tests_instances/behavior/ov_plugin/caching_tests.cpp @@ -0,0 +1,25 @@ +// Copyright (C) 2018-2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "behavior/ov_plugin/caching_tests.hpp" + +using namespace ov::test::behavior; + +namespace { + static const std::vector precisionsTemplate = { + ov::element::f32, + }; + + static const std::vector batchSizesTemplate = { + 1, 2 + }; + + INSTANTIATE_TEST_SUITE_P(smoke_Behavior_CachingSupportCase_Template, CompileModelCacheTestBase, + ::testing::Combine( + ::testing::ValuesIn(CompileModelCacheTestBase::getStandardFunctions()), + ::testing::ValuesIn(precisionsTemplate), + ::testing::ValuesIn(batchSizesTemplate), + ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)), + CompileModelCacheTestBase::getTestCaseName); +} // namespace diff --git a/docs/template_plugin/tests/functional/shared_tests_instances/behavior/ov_plugin/properties_tests.cpp b/docs/template_plugin/tests/functional/shared_tests_instances/behavior/ov_plugin/properties_tests.cpp new file mode 100644 index 00000000000..d6a87759e01 --- /dev/null +++ b/docs/template_plugin/tests/functional/shared_tests_instances/behavior/ov_plugin/properties_tests.cpp @@ -0,0 +1,120 @@ +// Copyright (C) 2018-2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "behavior/ov_plugin/properties_tests.hpp" +#include "openvino/runtime/properties.hpp" + +using namespace ov::test::behavior; + +namespace { + +const std::vector inproperties = { + {ov::device::id("UNSUPPORTED_DEVICE_ID_STRING")}, +}; + +const std::vector hetero_inproperties = { + {ov::device::id("UNSUPPORTED_DEVICE_ID_STRING")}, +}; + +const std::vector multi_inproperties = { + {ov::device::id("UNSUPPORTED_DEVICE_ID_STRING")}, +}; + + +const std::vector auto_inproperties = { + {ov::device::id("UNSUPPORTED_DEVICE_ID_STRING")}, +}; + + +const std::vector auto_batch_inproperties = { + {ov::device::id("UNSUPPORTED_DEVICE_ID_STRING")}, +}; + +INSTANTIATE_TEST_SUITE_P(DISABLED_smoke_BehaviorTests, OVPropertiesIncorrectTests, + ::testing::Combine( + ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE), + ::testing::ValuesIn(inproperties)), + OVPropertiesIncorrectTests::getTestCaseName); + +INSTANTIATE_TEST_SUITE_P(DISABLED_smoke_Hetero_BehaviorTests, OVPropertiesIncorrectTests, + ::testing::Combine( + ::testing::Values(CommonTestUtils::DEVICE_HETERO), + ::testing::ValuesIn(hetero_inproperties)), + OVPropertiesIncorrectTests::getTestCaseName); + +INSTANTIATE_TEST_SUITE_P(DISABLED_smoke_Multi_BehaviorTests, OVPropertiesIncorrectTests, + ::testing::Combine( + ::testing::Values(CommonTestUtils::DEVICE_MULTI), + ::testing::ValuesIn(multi_inproperties)), + OVPropertiesIncorrectTests::getTestCaseName); + +INSTANTIATE_TEST_SUITE_P(DISABLED_smoke_Auto_BehaviorTests, OVPropertiesIncorrectTests, + ::testing::Combine( + ::testing::Values(CommonTestUtils::DEVICE_AUTO), + ::testing::ValuesIn(auto_inproperties)), + OVPropertiesIncorrectTests::getTestCaseName); + +INSTANTIATE_TEST_SUITE_P(DISABLED_smoke_AutoBatch_BehaviorTests, OVPropertiesIncorrectTests, + ::testing::Combine( + ::testing::Values(CommonTestUtils::DEVICE_BATCH), + ::testing::ValuesIn(auto_batch_inproperties)), + OVPropertiesIncorrectTests::getTestCaseName); + +const std::vector default_properties = { + {ov::enable_profiling(true)}, + {ov::device::id(0)}, +}; + +INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, OVPropertiesDefaultTests, + ::testing::Combine( + ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE), + ::testing::ValuesIn(default_properties)), + OVPropertiesDefaultTests::getTestCaseName); + +const std::vector properties = { + {ov::enable_profiling(true)}, + {ov::device::id(0)}, +}; + +const std::vector hetero_properties = { + {ov::device::priorities(CommonTestUtils::DEVICE_TEMPLATE), ov::enable_profiling(true)}, + {ov::device::priorities(CommonTestUtils::DEVICE_TEMPLATE), ov::device::id(0)}, +}; + + +const std::vector multi_properties = { + {ov::device::priorities(CommonTestUtils::DEVICE_TEMPLATE), ov::enable_profiling(true)}, + {ov::device::priorities(CommonTestUtils::DEVICE_TEMPLATE), ov::device::id(0)}, +}; + +const std::vector auto_batch_properties = { + {{CONFIG_KEY(AUTO_BATCH_DEVICE_CONFIG) , CommonTestUtils::DEVICE_TEMPLATE}}, + {{CONFIG_KEY(AUTO_BATCH_DEVICE_CONFIG) , CommonTestUtils::DEVICE_TEMPLATE}, + {CONFIG_KEY(AUTO_BATCH_TIMEOUT) , "1"}}, +}; + +INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, OVPropertiesTests, + ::testing::Combine( + ::testing::Values(CommonTestUtils::DEVICE_TEMPLATE), + ::testing::ValuesIn(properties)), + OVPropertiesTests::getTestCaseName); + +INSTANTIATE_TEST_SUITE_P(DISABLED_smoke_Hetero_BehaviorTests, OVPropertiesTests, + ::testing::Combine( + ::testing::Values(CommonTestUtils::DEVICE_HETERO), + ::testing::ValuesIn(hetero_properties)), + OVPropertiesTests::getTestCaseName); + +INSTANTIATE_TEST_SUITE_P(DISABLED_smoke_Multi_BehaviorTests, OVPropertiesTests, + ::testing::Combine( + ::testing::Values(CommonTestUtils::DEVICE_MULTI), + ::testing::ValuesIn(multi_properties)), + OVPropertiesTests::getTestCaseName); + +INSTANTIATE_TEST_SUITE_P(DISABLED_smoke_AutoBatch_BehaviorTests, OVPropertiesTests, + ::testing::Combine( + ::testing::Values(CommonTestUtils::DEVICE_BATCH), + ::testing::ValuesIn(auto_batch_properties)), + OVPropertiesTests::getTestCaseName); +} // namespace diff --git a/docs/template_plugin/tests/functional/shared_tests_instances/behavior/caching/caching_tests.cpp b/docs/template_plugin/tests/functional/shared_tests_instances/behavior/plugin/caching_tests.cpp similarity index 95% rename from docs/template_plugin/tests/functional/shared_tests_instances/behavior/caching/caching_tests.cpp rename to docs/template_plugin/tests/functional/shared_tests_instances/behavior/plugin/caching_tests.cpp index 5e0a9d72935..700663bb5a0 100644 --- a/docs/template_plugin/tests/functional/shared_tests_instances/behavior/caching/caching_tests.cpp +++ b/docs/template_plugin/tests/functional/shared_tests_instances/behavior/plugin/caching_tests.cpp @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "behavior/caching/caching_tests.hpp" +#include "behavior/plugin/caching_tests.hpp" using namespace LayerTestsDefinitions; diff --git a/docs/template_plugin/tests/functional/shared_tests_instances/behavior/preprocessing/preprocessing.cpp b/docs/template_plugin/tests/functional/shared_tests_instances/behavior/plugin/preprocessing.cpp similarity index 97% rename from docs/template_plugin/tests/functional/shared_tests_instances/behavior/preprocessing/preprocessing.cpp rename to docs/template_plugin/tests/functional/shared_tests_instances/behavior/plugin/preprocessing.cpp index bfdaf270353..af612c11697 100644 --- a/docs/template_plugin/tests/functional/shared_tests_instances/behavior/preprocessing/preprocessing.cpp +++ b/docs/template_plugin/tests/functional/shared_tests_instances/behavior/plugin/preprocessing.cpp @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "behavior/preprocessing/preprocessing.hpp" +#include "behavior/plugin/preprocessing.hpp" #ifdef ENABLE_GAPI_PREPROCESSING diff --git a/docs/template_plugin/tests/functional/shared_tests_instances/behavior/preprocessing/set_preprocess.cpp b/docs/template_plugin/tests/functional/shared_tests_instances/behavior/plugin/set_preprocess.cpp similarity index 98% rename from docs/template_plugin/tests/functional/shared_tests_instances/behavior/preprocessing/set_preprocess.cpp rename to docs/template_plugin/tests/functional/shared_tests_instances/behavior/plugin/set_preprocess.cpp index 5e516a8da35..ffc55943b0b 100644 --- a/docs/template_plugin/tests/functional/shared_tests_instances/behavior/preprocessing/set_preprocess.cpp +++ b/docs/template_plugin/tests/functional/shared_tests_instances/behavior/plugin/set_preprocess.cpp @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "behavior/preprocessing/set_preprocess.hpp" +#include "behavior/plugin/set_preprocess.hpp" #ifdef ENABLE_GAPI_PREPROCESSING diff --git a/docs/template_plugin/tests/functional/shared_tests_instances/hetero/synthetic.cpp b/docs/template_plugin/tests/functional/shared_tests_instances/behavior/plugin/synthetic.cpp similarity index 99% rename from docs/template_plugin/tests/functional/shared_tests_instances/hetero/synthetic.cpp rename to docs/template_plugin/tests/functional/shared_tests_instances/behavior/plugin/synthetic.cpp index dfadd8f4a84..e91afe05441 100644 --- a/docs/template_plugin/tests/functional/shared_tests_instances/hetero/synthetic.cpp +++ b/docs/template_plugin/tests/functional/shared_tests_instances/behavior/plugin/synthetic.cpp @@ -4,7 +4,7 @@ #include -#include "hetero/synthetic.hpp" +#include "behavior/plugin/hetero_synthetic.hpp" #include "ngraph_functions/builders.hpp" #include "ngraph_functions/subgraph_builders.hpp" diff --git a/docs/template_plugin/tests/functional/subgraph_reference/preprocess.cpp b/docs/template_plugin/tests/functional/subgraph_reference/preprocess.cpp index 4b136d4dfa9..97b1f2b3487 100644 --- a/docs/template_plugin/tests/functional/subgraph_reference/preprocess.cpp +++ b/docs/template_plugin/tests/functional/subgraph_reference/preprocess.cpp @@ -847,10 +847,10 @@ static RefPreprocessParams set_shape_custom_crop() { p.build(); return f; }; - auto input_size = 4 * 4 * 4 * 4; - std::vector input_values(input_size); + auto input_shape = Shape{4, 4, 4, 4}; + std::vector input_values(shape_size(input_shape)); std::iota(input_values.begin(), input_values.end(), 0); - res.inputs.emplace_back(element::f32, Shape{4, 4, 4, 4}, input_values); + res.inputs.emplace_back(element::f32, input_shape, input_values); res.expected.emplace_back(Shape{2, 2, 2, 2}, element::f32, std::vector{ 85, 86, 89, 90, 101, 102, 105, 106, 149, 150, 153, 154, @@ -880,6 +880,45 @@ static RefPreprocessParams set_shape_with_resize() { return res; } +static RefPreprocessParams preprocess_crop_basic() { + RefPreprocessParams res("preprocess_crop_basic"); + res.function = []() { + auto f = create_simple_function(element::f32, PartialShape{2, 2, 2, 2}); + auto p = PrePostProcessor(f); + p.input().tensor().set_shape({4, 4, 4, 4}); + p.input().preprocess().crop({1, 1, 1, 1}, {-1, -1, -1, -1}); + p.build(); + return f; + }; + auto input_shape = Shape{4, 4, 4, 4}; + std::vector input_values(shape_size(input_shape)); + std::iota(input_values.begin(), input_values.end(), 0); + res.inputs.emplace_back(element::f32, input_shape, input_values); + res.expected.emplace_back(Shape{2, 2, 2, 2}, element::f32, std::vector{ 85, 86, 89, 90, + 101, 102, 105, 106, + 149, 150, 153, 154, + 165, 166, 169, 170}); + return res; +} + +static RefPreprocessParams preprocess_crop_2axis_dynamic() { + RefPreprocessParams res("preprocess_crop_2axis_dynamic"); + res.function = []() { + auto f = create_simple_function(element::f32, PartialShape::dynamic()); + auto p = PrePostProcessor(f); + auto max_int = std::numeric_limits::max(); + p.input().preprocess().crop({0, 0, 1, 1}, {max_int, max_int, max_int, max_int}); + p.build(); + return f; + }; + auto input_shape = Shape{1, 3, 2, 2}; + std::vector input_values(shape_size(input_shape)); + std::iota(input_values.begin(), input_values.end(), 0); + res.inputs.emplace_back(element::f32, Shape{1, 3, 2, 2}, input_values); + res.expected.emplace_back(Shape{1, 3, 1, 1}, element::f32, std::vector{3, 7, 11}); + return res; +} + static RefPreprocessParams postprocess_2_inputs_basic() { RefPreprocessParams res("postprocess_2_inputs_basic"); res.function = []() { @@ -1154,6 +1193,8 @@ std::vector allPreprocessTests() { element_type_before_convert_color_nv12(), convert_color_i420_to_bgr_three_planes(), convert_color_i420_single_plane(), + preprocess_crop_basic(), + preprocess_crop_2axis_dynamic(), set_shape_custom_crop(), set_shape_with_resize(), postprocess_2_inputs_basic(), diff --git a/docs/tutorials.md b/docs/tutorials.md index 9fb3df811cb..6538477f515 100644 --- a/docs/tutorials.md +++ b/docs/tutorials.md @@ -15,6 +15,15 @@ This collection of Python tutorials are written for running on [Jupyter*](https://jupyter.org) notebooks. The tutorials provide an introduction to the OpenVINO™ toolkit and explain how to use the Python API and tools for optimized deep learning inference. You can run the code one section at a time to see how to integrate your application with OpenVINO™ libraries. -![Binder logo](img/badge_logo.svg) +@sphinxdirective + +|binder_link| + +.. |binder_link| raw:: html + + Binder + +@endsphinxdirective + Tutorials showing this logo may be run remotely using Binder with no setup, although running the notebooks on a local system is recommended for best performance. See the [OpenVINO™ Notebooks Installation Guide](https://github.com/openvinotoolkit/openvino_notebooks/blob/main/README.md#-installation-guide) to install and run locally. diff --git a/licensing/onednn_third-party-programs.txt b/licensing/onednn_third-party-programs.txt index 0fed7990633..3897a99f34c 100644 --- a/licensing/onednn_third-party-programs.txt +++ b/licensing/onednn_third-party-programs.txt @@ -6,7 +6,7 @@ terms. This third party software, even if included with the distribution of the Intel software, may be governed by separate license terms, including without limitation, third party license terms, other Intel software license terms, and open source software license terms. These separate license terms -govern your use of the third party programs as set forth in in the +govern your use of the third party programs as set forth in the "THIRD-PARTY-PROGRAMS" file. Third party programs and their corresponding required notices and/or license @@ -554,4 +554,4 @@ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/samples/c/common/opencv_c_wrapper/bmp_reader.c b/samples/c/common/opencv_c_wrapper/bmp_reader.c index 848415d204a..564d26c1ee8 100644 --- a/samples/c/common/opencv_c_wrapper/bmp_reader.c +++ b/samples/c/common/opencv_c_wrapper/bmp_reader.c @@ -4,11 +4,11 @@ #include #include -#define CLEANUP_AND_RETURN(x) \ - if (x && !image && !image->data) \ - free(image->data); \ - if (input != NULL) \ - fclose(input); \ +#define CLEANUP_AND_RETURN(x) \ + if (0 != x && NULL != image && NULL != image->data) \ + free(image->data); \ + if (input != NULL) \ + fclose(input); \ return x; int readBmpImage(const char* fileName, BitMap* image) { @@ -29,7 +29,7 @@ int readBmpImage(const char* fileName, BitMap* image) { } cnt = fread(&image->header.type, sizeof(image->header.type), sizeof(unsigned char), input); - if (cnt != sizeof(image->header.type)) { + if (cnt != sizeof(unsigned char)) { printf("[BMP] file read error\n"); CLEANUP_AND_RETURN(-2); } @@ -40,25 +40,25 @@ int readBmpImage(const char* fileName, BitMap* image) { } cnt = fread(&image->header.size, sizeof(image->header.size), sizeof(unsigned char), input); - if (cnt != sizeof(image->header.size)) { + if (cnt != sizeof(unsigned char)) { printf("[BMP] file read error\n"); CLEANUP_AND_RETURN(2); } cnt = fread(&image->header.reserved, sizeof(image->header.reserved), sizeof(unsigned char), input); - if (cnt != sizeof(image->header.reserved)) { + if (cnt != sizeof(unsigned char)) { printf("[BMP] file read error\n"); CLEANUP_AND_RETURN(2); } cnt = fread(&image->header.offset, sizeof(image->header.offset), sizeof(unsigned char), input); - if (cnt != sizeof(image->header.offset)) { + if (cnt != sizeof(unsigned char)) { printf("[BMP] file read error\n"); CLEANUP_AND_RETURN(2); } cnt = fread(&image->infoHeader, sizeof(BmpInfoHeader), sizeof(unsigned char), input); - if (cnt != sizeof(image->header.offset)) { + if (cnt != sizeof(unsigned char)) { printf("[BMP] file read error\n"); CLEANUP_AND_RETURN(2); } @@ -98,13 +98,12 @@ int readBmpImage(const char* fileName, BitMap* image) { for (i = 0; i < image_height; i++) { unsigned int storeAt = image->infoHeader.height < 0 ? i : (unsigned int)image_height - 1 - i; cnt = fread(image->data + row_size * storeAt, row_size, sizeof(unsigned char), input); - if (cnt != row_size) { + if (cnt != sizeof(unsigned char)) { printf("[BMP] file read error\n"); CLEANUP_AND_RETURN(2); } - cnt = fread(pad, padSize, sizeof(unsigned char), input); - if (cnt != padSize) { + if ((padSize != 0 && cnt != 0) && (cnt != sizeof(unsigned char))) { printf("[BMP] file read error\n"); CLEANUP_AND_RETURN(2); } diff --git a/samples/c/hello_classification/README.md b/samples/c/hello_classification/README.md index f9daa132d69..a77c10f5f5f 100644 --- a/samples/c/hello_classification/README.md +++ b/samples/c/hello_classification/README.md @@ -26,7 +26,7 @@ Upon the start-up, the sample application reads command line parameters, loads s Then, the sample creates an synchronous inference request object. When inference is done, the application outputs data to the standard output stream. You can see the explicit description of -each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) section of "Integrate the Inference Engine with Your Application" guide. +each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) section of "Integrate OpenVINO™ Runtime with Your Application" guide. ## Building @@ -92,8 +92,8 @@ This sample is an API example, for any performance measurements please use the d ## See Also -- [Integrate the Inference Engine with Your Application](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) -- [Using Inference Engine Samples](../../../docs/OV_Runtime_UG/Samples_Overview.md) +- [Integrate OpenVINO™ into Your Application](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) +- [Using OpenVINO™ Samples](../../../docs/OV_Runtime_UG/Samples_Overview.md) - [Model Downloader](@ref omz_tools_downloader) - [Model Optimizer](../../../docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) diff --git a/samples/c/hello_nv12_input_classification/README.md b/samples/c/hello_nv12_input_classification/README.md index a3879a129d0..da71ed74e7f 100644 --- a/samples/c/hello_nv12_input_classification/README.md +++ b/samples/c/hello_nv12_input_classification/README.md @@ -25,7 +25,7 @@ image in the NV12 color format to an Inference Engine plugin. Then, the sample c application outputs data to the standard output stream. You can see the explicit description of -each sample step at [Integration Steps](https://docs.openvino.ai/latest/openvino_docs_IE_DG_Integrate_with_customer_application_new_API.html) section of "Integrate the Inference Engine with Your Application" guide. +each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) section of "Integrate OpenVINO™ Runtime with Your Application" guide. ## Building @@ -107,8 +107,8 @@ This sample is an API example, for any performance measurements please use the d ## See Also -- [Integrate the Inference Engine with Your Application](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) -- [Using Inference Engine Samples](../../../docs/OV_Runtime_UG/Samples_Overview.md) +- [Integrate the OpenVINO™ into Your Application](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) +- [Using OpenVINO™ Samples](../../../docs/OV_Runtime_UG/Samples_Overview.md) - [Model Downloader](@ref omz_tools_downloader) - [Model Optimizer](../../../docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) diff --git a/samples/c/hello_nv12_input_classification/main.c b/samples/c/hello_nv12_input_classification/main.c index 3ac5d99414d..1f682bfe92d 100644 --- a/samples/c/hello_nv12_input_classification/main.c +++ b/samples/c/hello_nv12_input_classification/main.c @@ -104,7 +104,7 @@ void print_classify_res(struct classify_res* cls, size_t n, const char* img_path * doesn't equal to size param */ size_t read_image_from_file(const char* img_path, unsigned char* img_data, size_t size) { - FILE* fp = fopen(img_path, "rb+"); + FILE* fp = fopen(img_path, "rb"); size_t read_size = 0; if (fp) { diff --git a/samples/cpp/benchmark_app/README.md b/samples/cpp/benchmark_app/README.md index 08f7491fbb8..11a3239a362 100644 --- a/samples/cpp/benchmark_app/README.md +++ b/samples/cpp/benchmark_app/README.md @@ -56,7 +56,7 @@ Note that the benchmark_app usually produces optimal performance for any device But it is still may be sub-optimal for some cases, especially for very small networks. More details can read in [Performance Optimization Guide](../../../docs/optimization_guide/dldt_optimization_guide.md). -As explained in the [Performance Optimization Guide](../../../docs/optimization_guide/dldt_optimization_guide.md) section, for all devices, including new [MULTI device](../../../docs/OV_Runtime_UG/supported_plugins/MULTI.md) it is preferable to use the FP16 IR for the model. +As explained in the [Performance Optimization Guide](../../../docs/optimization_guide/dldt_optimization_guide.md) section, for all devices, including new [MULTI device](../../../docs/OV_Runtime_UG/multi_device.md) it is preferable to use the FP16 IR for the model. Also if latency of the CPU inference on the multi-socket machines is of concern, please refer to the same [Performance Optimization Guide](../../../docs/optimization_guide/dldt_optimization_guide.md). @@ -166,7 +166,7 @@ This section provides step-by-step instructions on how to run the Benchmark Tool > **NOTE**: The Internet access is required to execute the following steps successfully. If you have access to the Internet through the proxy server only, please make sure that it is configured in your OS environment. -1. Download the model. Go to the the Model Downloader directory and run the `downloader.py` script with specifying the model name and directory to download the model to: +1. Download the model. Go to the Model Downloader directory and run the `downloader.py` script with specifying the model name and directory to download the model to: ```sh cd /extras/open_model_zoo/tools/downloader ``` diff --git a/samples/cpp/benchmark_app/main.cpp b/samples/cpp/benchmark_app/main.cpp index 6c2326adb35..a609d951df4 100644 --- a/samples/cpp/benchmark_app/main.cpp +++ b/samples/cpp/benchmark_app/main.cpp @@ -32,7 +32,7 @@ static const size_t progressBarDefaultTotalCount = 1000; -bool ParseAndCheckCommandLine(int argc, char* argv[]) { +bool parse_and_check_command_line(int argc, char* argv[]) { // ---------------------------Parsing and validating input // arguments-------------------------------------- slog::info << "Parsing input parameters" << slog::endl; @@ -124,10 +124,12 @@ ov::hint::PerformanceMode get_performance_hint(const std::string& device, const ov_perf_hint = ov::hint::PerformanceMode::UNDEFINED; } } else { - slog::warn << "PerformanceMode was not explicitly specified in command line. " + ov_perf_hint = + FLAGS_api == "sync" ? ov::hint::PerformanceMode::LATENCY : ov::hint::PerformanceMode::THROUGHPUT; + + slog::warn << "Performance hint was not explicitly specified in command line. " "Device(" - << device << ") performance hint will be set to THROUGHPUT." << slog::endl; - ov_perf_hint = ov::hint::PerformanceMode::THROUGHPUT; + << device << ") performance hint will be set to " << ov_perf_hint << "." << slog::endl; } } else { if (FLAGS_hint != "") { @@ -149,7 +151,7 @@ int main(int argc, char* argv[]) { // ------------------------------------------------- next_step(); - if (!ParseAndCheckCommandLine(argc, argv)) { + if (!parse_and_check_command_line(argc, argv)) { return 0; } @@ -208,9 +210,9 @@ int main(int argc, char* argv[]) { ov::Core core; if (FLAGS_d.find("CPU") != std::string::npos && !FLAGS_l.empty()) { - // CPU (MKLDNN) extensions is loaded as a shared library + // CPU plugin extensions is loaded as a shared library core.add_extension(FLAGS_l); - slog::info << "CPU (MKLDNN) extensions is loaded " << FLAGS_l << slog::endl; + slog::info << "CPU plugin extensions is loaded " << FLAGS_l << slog::endl; } // Load clDNN Extensions @@ -390,10 +392,10 @@ int main(int argc, char* argv[]) { if ((device_name.find("MULTI") != std::string::npos) && (device_name.find("CPU") != std::string::npos)) { - slog::warn << "Turn on GPU throttling. Multi-device execution with " + slog::warn << "GPU throttling is turned on. Multi-device execution with " "the CPU + GPU performs best with GPU throttling hint, " << "which releases another CPU thread (that is otherwise " - "used by the GPU driver for active polling)" + "used by the GPU driver for active polling)." << slog::endl; device_config[GPU_CONFIG_KEY(PLUGIN_THROTTLE)] = "1"; } @@ -787,8 +789,11 @@ int main(int argc, char* argv[]) { std::map inputsData; if (isFlagSetInCommandLine("use_device_mem")) { if (device_name.find("GPU") == 0) { - inputsData = - ::gpu::get_remote_input_tensors(inputFiles, app_inputs_info, compiledModel, clInputsBuffer); + inputsData = ::gpu::get_remote_input_tensors(inputFiles, + app_inputs_info, + compiledModel, + clInputsBuffer, + inferRequestsQueue.requests.size()); useGpuMem = true; } else if (device_name.find("CPU") == 0) { if (newInputType) { diff --git a/samples/cpp/benchmark_app/remote_tensors_filling.cpp b/samples/cpp/benchmark_app/remote_tensors_filling.cpp index 0f2065c2979..40bc581d153 100644 --- a/samples/cpp/benchmark_app/remote_tensors_filling.cpp +++ b/samples/cpp/benchmark_app/remote_tensors_filling.cpp @@ -69,7 +69,8 @@ std::map get_remote_input_tensors( const std::map>& inputFiles, const std::vector& app_inputs_info, const ov::CompiledModel& compiledModel, - std::vector& clBuffer) { + std::vector& clBuffer, + size_t num_requests) { #ifdef HAVE_DEVICE_MEM_SUPPORT slog::info << "Device memory will be used for input and output blobs" << slog::endl; if (inputFiles.size()) { @@ -82,43 +83,45 @@ std::map get_remote_input_tensors( auto& oclContext = static_cast(context); auto oclInstance = std::make_shared(oclContext.get()); - for (auto& inputs_info : app_inputs_info) { - for (auto& input : inputs_info) { - // Fill random - slog::info << "Prepare remote blob for input '" << input.first << "' with random values (" - << std::string((input.second.is_image() ? "image" : "some binary data")) << " is expected)" - << slog::endl; + for (int i = 0; i < num_requests; i++) { + for (auto& inputs_info : app_inputs_info) { + for (auto& input : inputs_info) { + // Fill random + slog::info << "Prepare remote blob for input '" << input.first << "' with random values (" + << std::string((input.second.is_image() ? "image" : "some binary data")) << " is expected)" + << slog::endl; - // Creating and filling shared buffers - cl_int err; - auto elementsNum = std::accumulate(begin(input.second.dataShape), - end(input.second.dataShape), - 1, - std::multiplies()); - auto inputSize = elementsNum * input.second.type.bitwidth() / 8; + // Creating and filling shared buffers + cl_int err; + auto elementsNum = std::accumulate(begin(input.second.dataShape), + end(input.second.dataShape), + 1, + std::multiplies()); + auto inputSize = elementsNum * input.second.type.bitwidth() / 8; - clBuffer.push_back( - cl::Buffer(oclInstance->_context, CL_MEM_READ_WRITE, (cl::size_type)inputSize, NULL, &err)); + clBuffer.push_back( + cl::Buffer(oclInstance->_context, CL_MEM_READ_WRITE, (cl::size_type)inputSize, NULL, &err)); - void* mappedPtr = oclInstance->_queue.enqueueMapBuffer(clBuffer.back(), - CL_TRUE, - CL_MEM_READ_WRITE, - 0, - (cl::size_type)inputSize); + void* mappedPtr = oclInstance->_queue.enqueueMapBuffer(clBuffer.back(), + CL_TRUE, + CL_MEM_READ_WRITE, + 0, + (cl::size_type)inputSize); - auto tensor = oclContext.create_tensor(input.second.type, input.second.dataShape, clBuffer.back().get()); - remoteTensors[input.first].push_back(tensor); + auto tensor = + oclContext.create_tensor(input.second.type, input.second.dataShape, clBuffer.back().get()); + remoteTensors[input.first].push_back(tensor); - if (inputFiles.empty()) { - // Filling in random data - fill_buffer(mappedPtr, elementsNum, input.second.type); - } else { - // TODO: add filling with real image data + if (inputFiles.empty()) { + // Filling in random data + fill_buffer(mappedPtr, elementsNum, input.second.type); + } else { + // TODO: add filling with real image data + } + oclInstance->_queue.enqueueUnmapMemObject(clBuffer.back(), mappedPtr); } - oclInstance->_queue.enqueueUnmapMemObject(clBuffer.back(), mappedPtr); } } - return remoteTensors; #else IE_THROW() << "Device memory requested for GPU device, but OpenCL was not linked"; diff --git a/samples/cpp/benchmark_app/remote_tensors_filling.hpp b/samples/cpp/benchmark_app/remote_tensors_filling.hpp index 7cb919f565f..4e8555b844e 100644 --- a/samples/cpp/benchmark_app/remote_tensors_filling.hpp +++ b/samples/cpp/benchmark_app/remote_tensors_filling.hpp @@ -61,7 +61,8 @@ std::map get_remote_input_tensors( const std::map>& inputFiles, const std::vector& app_inputs_info, const ov::CompiledModel& compiledModel, - std::vector& clBuffer); + std::vector& clBuffer, + size_t num_requests); std::map get_remote_output_tensors(const ov::CompiledModel& compiledModel, std::map& clBuffer); diff --git a/samples/cpp/benchmark_app/utils.cpp b/samples/cpp/benchmark_app/utils.cpp index b9384303859..c3493642fc5 100644 --- a/samples/cpp/benchmark_app/utils.cpp +++ b/samples/cpp/benchmark_app/utils.cpp @@ -451,6 +451,7 @@ std::vector get_inputs_info(const std::string& shape_ for (size_t i = 0; i < min_size; ++i) { benchmark_app::InputsInfo info_map; + bool is_there_at_least_one_batch_dim = false; for (auto& item : input_info) { benchmark_app::InputInfo info; auto name = item.get_any_name(); @@ -602,6 +603,7 @@ std::vector get_inputs_info(const std::string& shape_ } info.dataShape[batch_index] = batch_size; reshape_required = true; + is_there_at_least_one_batch_dim = true; } } else { slog::warn << "Input '" << item.get_any_name() @@ -612,6 +614,12 @@ std::vector get_inputs_info(const std::string& shape_ info_map[name] = info; } + if (batch_size > 1 && !is_there_at_least_one_batch_dim) { + throw std::runtime_error("-b option is provided in command line, but there's no inputs with batch(B) " + "dimension in input layout, so batch cannot be set. " + "You may specify layout explicitly using -layout option."); + } + // Update scale and mean std::map> scale_map = parse_scale_or_mean(scale_string, info_map); std::map> mean_map = parse_scale_or_mean(mean_string, info_map); @@ -813,7 +821,7 @@ std::string parameter_name_to_tensor_name(const std::string& name, const std::vector>& outputs_info) { if (std::any_of(inputs_info.begin(), inputs_info.end(), [name](const ov::Output& port) { try { - return name == port.get_any_name(); + return port.get_names().count(name) > 0; } catch (const ov::Exception&) { return false; // Some ports might have no names - so this is workaround } @@ -821,7 +829,7 @@ std::string parameter_name_to_tensor_name(const std::string& name, return name; } else if (std::any_of(outputs_info.begin(), outputs_info.end(), [name](const ov::Output& port) { try { - return name == port.get_any_name(); + return port.get_names().count(name) > 0; } catch (const ov::Exception&) { return false; // Some ports might have no names - so this is workaround } diff --git a/samples/cpp/classification_sample_async/README.md b/samples/cpp/classification_sample_async/README.md index c9f601cc4c8..fbddfc4df2b 100644 --- a/samples/cpp/classification_sample_async/README.md +++ b/samples/cpp/classification_sample_async/README.md @@ -36,7 +36,7 @@ After that, the application starts inference for the first infer request and wai When inference is done, the application outputs data to the standard output stream. You can place labels in .labels file near the model to get pretty output. You can see the explicit description of -each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) section of "Integrate the OpenVINO™ Runtime with Your Application" guide. +each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) section of "Integrate OpenVINO™ Runtime with Your Application" guide. ## Building @@ -172,7 +172,7 @@ classid probability ## See Also -- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) +- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) - [Using OpenVINO™ Toolkit Samples](../../../docs/OV_Runtime_UG/Samples_Overview.md) - [Model Downloader](@ref omz_tools_downloader) - [Model Optimizer](../../../docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) diff --git a/samples/cpp/common/utils/include/samples/args_helper.hpp b/samples/cpp/common/utils/include/samples/args_helper.hpp index a3749cbf065..04188e88a5c 100644 --- a/samples/cpp/common/utils/include/samples/args_helper.hpp +++ b/samples/cpp/common/utils/include/samples/args_helper.hpp @@ -45,5 +45,67 @@ void configurePrePostProcessing(std::shared_ptr& function, const std::string& ioml); void printInputAndOutputsInfo(const ov::Model& network); -void printInputAndOutputsInfoShort(const ov::Model& network); -ov::element::Type getPrecision2(const std::string& value); \ No newline at end of file +ov::element::Type getPrecision2(const std::string& value); + +template +void printInputAndOutputsInfoShort(const T& network) { + std::cout << "Network inputs:" << std::endl; + for (auto&& input : network.inputs()) { + std::string in_name; + std::string node_name; + + // Workaround for "tensor has no name" issue + try { + for (const auto& name : input.get_names()) { + in_name += name + " , "; + } + in_name = in_name.substr(0, in_name.size() - 3); + + } catch (const ov::Exception&) { + } + try { + node_name = input.get_node()->get_friendly_name(); + } catch (const ov::Exception&) { + } + + if (in_name == "") { + in_name = "***NO_NAME***"; + } + if (node_name == "") { + node_name = "***NO_NAME***"; + } + + std::cout << " " << in_name << " (node: " << node_name << ") : " << input.get_element_type() << " / " + << ov::layout::get_layout(input).to_string() << std::endl; + } + + std::cout << "Network outputs:" << std::endl; + for (auto&& output : network.outputs()) { + std::string out_name; + std::string node_name; + + // Workaround for "tensor has no name" issue + try { + for (const auto& name : output.get_names()) { + out_name += name + " , "; + } + out_name = out_name.substr(0, out_name.size() - 3); + + } catch (const ov::Exception&) { + } + try { + node_name = output.get_node()->get_input_node_ptr(0)->get_friendly_name(); + } catch (const ov::Exception&) { + } + + if (out_name == "") { + out_name = "***NO_NAME***"; + } + if (node_name == "") { + node_name = "***NO_NAME***"; + } + + std::cout << " " << out_name << " (node: " << node_name << ") : " << output.get_element_type() << " / " + << ov::layout::get_layout(output).to_string() << std::endl; + } +} diff --git a/samples/cpp/common/utils/src/args_helper.cpp b/samples/cpp/common/utils/src/args_helper.cpp index 16e6e72060b..4ff1e14128b 100644 --- a/samples/cpp/common/utils/src/args_helper.cpp +++ b/samples/cpp/common/utils/src/args_helper.cpp @@ -248,34 +248,6 @@ bool isMatchLayoutToDims(InferenceEngine::Layout layout, size_t dimension) { } // namespace -void printInputAndOutputsInfoShort(const ov::Model& network) { - std::cout << "Network inputs:" << std::endl; - for (auto&& input : network.inputs()) { - std::cout << " " << input.get_any_name() << " (node: " << input.get_node()->get_friendly_name() - << ") : " << input.get_element_type() << " / " << ov::layout::get_layout(input).to_string() - << std::endl; - } - - std::cout << "Network outputs:" << std::endl; - for (auto&& output : network.outputs()) { - std::string out_name = "***NO_NAME***"; - std::string node_name = "***NO_NAME***"; - - // Workaround for "tensor has no name" issue - try { - out_name = output.get_any_name(); - } catch (const ov::Exception&) { - } - try { - node_name = output.get_node()->get_input_node_ptr(0)->get_friendly_name(); - } catch (const ov::Exception&) { - } - - std::cout << " " << out_name << " (node: " << node_name << ") : " << output.get_element_type() << " / " - << ov::layout::get_layout(output).to_string() << std::endl; - } -} - void printInputAndOutputsInfo(const ov::Model& network) { slog::info << "model name: " << network.get_friendly_name() << slog::endl; diff --git a/samples/cpp/hello_classification/README.md b/samples/cpp/hello_classification/README.md index 27af315a355..f0ee0f343b0 100644 --- a/samples/cpp/hello_classification/README.md +++ b/samples/cpp/hello_classification/README.md @@ -26,7 +26,7 @@ The following C++ API is used in the application: At startup, the sample application reads command line parameters, prepares input data, loads a specified model and image to the OpenVINO™ Runtime plugin and performs synchronous inference. Then processes output data and write it to a standard output stream. You can see the explicit description of -each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) section of "Integrate the OpenVINO™ Runtime with Your Application" guide. +each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) section of "Integrate OpenVINO™ Runtime with Your Application" guide. ## Building @@ -116,7 +116,7 @@ classid probability ## See Also -- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) +- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) - [Using OpenVINO™ Toolkit Samples](../../../docs/OV_Runtime_UG/Samples_Overview.md) - [Model Downloader](@ref omz_tools_downloader) - [Model Optimizer](../../../docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) diff --git a/samples/cpp/hello_classification/main.cpp b/samples/cpp/hello_classification/main.cpp index 148e02dfdb8..9c797256cbf 100644 --- a/samples/cpp/hello_classification/main.cpp +++ b/samples/cpp/hello_classification/main.cpp @@ -66,7 +66,6 @@ int tmain(int argc, tchar* argv[]) { // just wrap image data by ov::Tensor without allocating of new memory ov::Tensor input_tensor = ov::Tensor(input_type, input_shape, input_data.get()); - const ov::Shape tensor_shape = input_tensor.get_shape(); const ov::Layout tensor_layout{"NHWC"}; // -------- Step 4. Configure preprocessing -------- @@ -75,15 +74,9 @@ int tmain(int argc, tchar* argv[]) { // 1) Set input tensor information: // - input() provides information about a single model input - // - precision of tensor is supposed to be 'u8' + // - reuse precision and shape from already available `input_tensor` // - layout of data is 'NHWC' - // - set static spatial dimensions to input tensor to resize from - ppp.input() - .tensor() - .set_element_type(ov::element::u8) - .set_layout(tensor_layout) - .set_spatial_static_shape(tensor_shape[ov::layout::height_idx(tensor_layout)], - tensor_shape[ov::layout::width_idx(tensor_layout)]); + ppp.input().tensor().set_from(input_tensor).set_layout(tensor_layout); // 2) Adding explicit preprocessing steps: // - convert layout to 'NCHW' (from 'NHWC' specified above at tensor layout) // - apply linear resize from tensor spatial dims to model spatial dims @@ -94,7 +87,7 @@ int tmain(int argc, tchar* argv[]) { // - precision of tensor is supposed to be 'f32' ppp.output().tensor().set_element_type(ov::element::f32); - // 6) Apply preprocessing modifing the original 'model' + // 6) Apply preprocessing modifying the original 'model' model = ppp.build(); // -------- Step 5. Loading a model to the device -------- diff --git a/samples/cpp/hello_nv12_input_classification/README.md b/samples/cpp/hello_nv12_input_classification/README.md index 83fd42793a0..c1a78d56893 100644 --- a/samples/cpp/hello_nv12_input_classification/README.md +++ b/samples/cpp/hello_nv12_input_classification/README.md @@ -25,7 +25,7 @@ Basic OpenVINO™ Runtime API is covered by [Hello Classification C++ sample](.. At startup, the sample application reads command line parameters, loads the specified model and an image in the NV12 color format to an OpenVINO™ Runtime plugin. Then, the sample creates an synchronous inference request object. When inference is done, the application outputs data to the standard output stream. You can place labels in .labels file near the model to get pretty output. -You can see the explicit description of each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) section of "Integrate the OpenVINO™ Runtime with Your Application" guide. +You can see the explicit description of each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) section of "Integrate OpenVINO™ Runtime with Your Application" guide. ## Building @@ -130,7 +130,7 @@ classid probability ## See Also -- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) +- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) - [Using OpenVINO™ Toolkit Samples](../../../docs/OV_Runtime_UG/Samples_Overview.md) - [Model Downloader](@ref omz_tools_downloader) - [Model Optimizer](../../../docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) diff --git a/samples/cpp/hello_query_device/README.md b/samples/cpp/hello_query_device/README.md index 8b9ee2b12d6..202f733542b 100644 --- a/samples/cpp/hello_query_device/README.md +++ b/samples/cpp/hello_query_device/README.md @@ -1,6 +1,6 @@ # Hello Query Device C++ Sample {#openvino_inference_engine_samples_hello_query_device_README} -This sample demonstrates how to execute an query OpenVINO™ Runtime devices, prints their metrics and default configuration values, using [Query Device API feature](../../../docs/OV_Runtime_UG/InferenceEngine_QueryAPI.md). +This sample demonstrates how to execute an query OpenVINO™ Runtime devices, prints their metrics and default configuration values, using [Properties API](../../../docs/OV_Runtime_UG/supported_plugins/config_properties.md). The following C++ API is used in the application: @@ -13,7 +13,7 @@ Basic OpenVINO™ Runtime API is covered by [Hello Classification C++ sample](.. | Options | Values | | :--- |:--- | Supported devices | [All](../../../docs/OV_Runtime_UG/supported_plugins/Supported_Devices.md) | -| Other language realization | [Python](../../../samples/python/hello_query_device/README.md) | +| Other language realization | [Python](../../python/hello_query_device/README.md) | ## How It Works @@ -90,5 +90,5 @@ The application prints all available devices with their supported metrics and de ## See Also -- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) +- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) - [Using OpenVINO™ Toolkit Samples](../../../docs/OV_Runtime_UG/Samples_Overview.md) diff --git a/samples/cpp/hello_reshape_ssd/README.md b/samples/cpp/hello_reshape_ssd/README.md index ef9d21ba169..cdd0dcfe421 100644 --- a/samples/cpp/hello_reshape_ssd/README.md +++ b/samples/cpp/hello_reshape_ssd/README.md @@ -27,7 +27,7 @@ Upon the start-up the sample application reads command line parameters, loads sp Engine plugin. Then, the sample creates an synchronous inference request object. When inference is done, the application creates output image and output data to the standard output stream. You can see the explicit description of -each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) section of "Integrate the OpenVINO™ Runtime with Your Application" guide. +each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) section of "Integrate OpenVINO™ Runtime with Your Application" guide. ## Building @@ -116,7 +116,7 @@ This sample is an API example, for any performance measurements please use the d ## See Also -- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) +- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) - [Using OpenVINO™ Toolkit Samples](../../../docs/OV_Runtime_UG/Samples_Overview.md) - [Model Downloader](@ref omz_tools_downloader) - [Model Optimizer](../../../docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) diff --git a/samples/cpp/hello_reshape_ssd/main.cpp b/samples/cpp/hello_reshape_ssd/main.cpp index 70646b03330..2ae7d457cc2 100644 --- a/samples/cpp/hello_reshape_ssd/main.cpp +++ b/samples/cpp/hello_reshape_ssd/main.cpp @@ -53,7 +53,7 @@ int main(int argc, char* argv[]) { // try to find it. ov::NodeVector ops = model->get_ops(); auto it = std::find_if(ops.begin(), ops.end(), [](std::shared_ptr node) { - return node->get_type_info().name == ngraph::op::DetectionOutput::get_type_info_static().name; + return node->get_type_info() == ngraph::op::DetectionOutput::get_type_info_static(); }); if (it == ops.end()) { throw std::logic_error("model does not contain DetectionOutput layer"); diff --git a/samples/cpp/ngraph_function_creation_sample/CMakeLists.txt b/samples/cpp/model_creation_sample/CMakeLists.txt similarity index 52% rename from samples/cpp/ngraph_function_creation_sample/CMakeLists.txt rename to samples/cpp/model_creation_sample/CMakeLists.txt index c96ea7fae49..5c9d64db6aa 100644 --- a/samples/cpp/ngraph_function_creation_sample/CMakeLists.txt +++ b/samples/cpp/model_creation_sample/CMakeLists.txt @@ -2,9 +2,9 @@ # SPDX-License-Identifier: Apache-2.0 # -set(TARGET_NAME "ngraph_function_creation_sample") +set(TARGET_NAME "model_creation_sample") -ie_add_sample(NAME ngraph_function_creation_sample +ie_add_sample(NAME model_creation_sample SOURCES "${CMAKE_CURRENT_SOURCE_DIR}/main.cpp" - HEADERS "${CMAKE_CURRENT_SOURCE_DIR}/ngraph_function_creation_sample.hpp" + HEADERS "${CMAKE_CURRENT_SOURCE_DIR}/model_creation_sample.hpp" DEPENDENCIES format_reader ie_samples_utils) diff --git a/samples/cpp/ngraph_function_creation_sample/README.md b/samples/cpp/model_creation_sample/README.md similarity index 75% rename from samples/cpp/ngraph_function_creation_sample/README.md rename to samples/cpp/model_creation_sample/README.md index 390d522a13b..542d6d82ec0 100644 --- a/samples/cpp/ngraph_function_creation_sample/README.md +++ b/samples/cpp/model_creation_sample/README.md @@ -1,8 +1,8 @@ -# nGraph Function Creation C++ Sample {#openvino_inference_engine_samples_ngraph_function_creation_sample_README} +# Model Creation C++ Sample {#openvino_inference_engine_samples_model_creation_sample_README} This sample demonstrates how to execute an synchronous inference using [model](../../../docs/OV_Runtime_UG/model_representation.md) built on the fly which uses weights from LeNet classification model, which is known to work well on digit classification tasks. -You do not need an XML file to create a model. The API of ngraph::Function allows creating a model on the fly from the source code. +You do not need an XML file to create a model. The API of ov::Model allows creating a model on the fly from the source code. The following C++ API is used in the application: @@ -13,7 +13,7 @@ The following C++ API is used in the application: | Tensor Operations | `ov::Tensor::get_byte_size`, `ov::Tensor:data` | Get tensor byte size and its data | | Model Operations | `ov::set_batch` | Operate with model batch size | | Infer Request Operations | `ov::InferRequest::get_input_tensor` | Get a input tensor | -| nGraph Functions | `ov::opset8::Parameter`, `ov::Node::output`, `ov::opset8::Constant`, `ov::opset8::Convolution`, `ov::opset8::Add`, `ov::opset1::MaxPool`, `ov::opset8::Reshape`, `ov::opset8::MatMul`, `ov::opset8::Relu`, `ov::opset8::Softmax`, `ov::descriptor::Tensor::set_names`, `ov::opset8::Result`, `ov::Model`, `ov::ParameterVector::vector` | Used to construct an nGraph function | +| Model creation objects | `ov::opset8::Parameter`, `ov::Node::output`, `ov::opset8::Constant`, `ov::opset8::Convolution`, `ov::opset8::Add`, `ov::opset1::MaxPool`, `ov::opset8::Reshape`, `ov::opset8::MatMul`, `ov::opset8::Relu`, `ov::opset8::Softmax`, `ov::descriptor::Tensor::set_names`, `ov::opset8::Result`, `ov::Model`, `ov::ParameterVector::vector` | Used to construct an OpenVINO model | Basic OpenVINO™ Runtime API is covered by [Hello Classification C++ sample](../hello_classification/README.md). @@ -23,7 +23,7 @@ Basic OpenVINO™ Runtime API is covered by [Hello Classification C++ sample](.. | Model Format | model weights file (\*.bin) | | Validated images | single-channel `MNIST ubyte` images | | Supported devices | [All](../../../docs/OV_Runtime_UG/supported_plugins/Supported_Devices.md) | -| Other language realization | [Python](../../../samples/python/ngraph_function_creation_sample/README.md) | +| Other language realization | [Python](../../../samples/python/model_creation_sample/README.md) | ## How It Works @@ -33,7 +33,7 @@ At startup, the sample application does the following: - Loads the model and input data to the OpenVINO™ Runtime plugin - Performs synchronous inference and processes output data, logging each step in a standard output stream -You can see the explicit description of each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) section of "Integrate the OpenVINO™ Runtime with Your Application" guide. +You can see the explicit description of each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) section of "Integrate OpenVINO™ Runtime with Your Application" guide. ## Building @@ -42,7 +42,7 @@ To build the sample, please use instructions available at [Build the Sample Appl ## Running ``` -ngraph_function_creation_sample +model_creation_sample ``` > **NOTES**: @@ -56,7 +56,7 @@ ngraph_function_creation_sample You can do inference of an image using a pre-trained model on a GPU using the following command: ``` -ngraph_function_creation_sample lenet.bin GPU +model_creation_sample lenet.bin GPU ``` ## Sample Output @@ -176,12 +176,8 @@ classid probability label -*Starting with the OpenVINO™ toolkit 2020.2 release, all of the features previously available through nGraph have been merged into the OpenVINO™ toolkit. As a result, all the features previously available through ONNX RT Execution Provider for nGraph have been merged with ONNX RT Execution Provider for OpenVINO™ toolkit.* - -*Therefore, ONNX RT Execution Provider for nGraph will be deprecated starting June 1, 2020 and will be completely removed on December 1, 2020. Users are recommended to migrate to the ONNX RT Execution Provider for OpenVINO™ toolkit as the unified solution for all AI inferencing on Intel® hardware.* - ## See Also -- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) +- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) - [Using OpenVINO™ Toolkit Samples](../../../docs/OV_Runtime_UG/Samples_Overview.md) - [Model Optimizer](../../../docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) diff --git a/samples/cpp/ngraph_function_creation_sample/lenet.bin b/samples/cpp/model_creation_sample/lenet.bin similarity index 100% rename from samples/cpp/ngraph_function_creation_sample/lenet.bin rename to samples/cpp/model_creation_sample/lenet.bin diff --git a/samples/cpp/ngraph_function_creation_sample/lenet.labels b/samples/cpp/model_creation_sample/lenet.labels similarity index 100% rename from samples/cpp/ngraph_function_creation_sample/lenet.labels rename to samples/cpp/model_creation_sample/lenet.labels diff --git a/samples/cpp/ngraph_function_creation_sample/main.cpp b/samples/cpp/model_creation_sample/main.cpp similarity index 98% rename from samples/cpp/ngraph_function_creation_sample/main.cpp rename to samples/cpp/model_creation_sample/main.cpp index cb552cdf76c..1ae6db0554c 100644 --- a/samples/cpp/ngraph_function_creation_sample/main.cpp +++ b/samples/cpp/model_creation_sample/main.cpp @@ -21,7 +21,7 @@ #include "samples/classification_results.h" #include "samples/slog.hpp" -#include "ngraph_function_creation_sample.hpp" +#include "model_creation_sample.hpp" // clang-format on constexpr auto N_TOP_RESULTS = 1; @@ -214,10 +214,7 @@ std::shared_ptr create_model(const std::string& path_to_weights) { } /** - * @brief The entry point for inference engine automatic ov::Model - * creation sample - * @file ngraph_function_creation_sample/main.cpp - * @example ngraph_function_creation_sample/main.cpp + * @brief The entry point for OpenVINO ov::Model creation sample */ int main(int argc, char* argv[]) { try { diff --git a/samples/cpp/ngraph_function_creation_sample/ngraph_function_creation_sample.hpp b/samples/cpp/model_creation_sample/model_creation_sample.hpp similarity index 100% rename from samples/cpp/ngraph_function_creation_sample/ngraph_function_creation_sample.hpp rename to samples/cpp/model_creation_sample/model_creation_sample.hpp diff --git a/samples/cpp/speech_sample/README.md b/samples/cpp/speech_sample/README.md index 74ce696a106..0f440ba767d 100644 --- a/samples/cpp/speech_sample/README.md +++ b/samples/cpp/speech_sample/README.md @@ -1,6 +1,6 @@ # Automatic Speech Recognition C++ Sample {#openvino_inference_engine_samples_speech_sample_README} -This sample demonstrates how to execute an Asynchronous Inference of acoustic model based on Kaldi\* neural networks and speech feature vectors. +This sample demonstrates how to execute an Asynchronous Inference of acoustic model based on Kaldi\* neural networks and speech feature vectors. The sample works with Kaldi ARK or Numpy* uncompressed NPZ files, so it does not cover an end-to-end speech recognition scenario (speech to text), requiring additional preprocessing (feature extraction) to get a feature vector from a speech signal, as well as postprocessing (decoding) to produce text from scores. @@ -31,7 +31,7 @@ At startup, the sample application reads command-line parameters, loads a specif If the `-r` option is given, error statistics are provided for each speech utterance as shown above. You can see the explicit description of -each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) section of "Integrate the OpenVINO™ Runtime with Your Application" guide. +each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) section of "Integrate OpenVINO™ Runtime with Your Application" guide. ### GNA-specific details @@ -237,7 +237,7 @@ All of mentioned files can be downloaded from [https://storage.openvinotoolkit.o ## See Also -- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) +- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) - [Using OpenVINO™ Toolkit Samples](../../../docs/OV_Runtime_UG/Samples_Overview.md) - [Model Downloader](@ref omz_tools_downloader) - [Model Optimizer](../../../docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) diff --git a/samples/cpp/speech_sample/main.cpp b/samples/cpp/speech_sample/main.cpp index dc5fad5c4b6..7ebc4adde8c 100644 --- a/samples/cpp/speech_sample/main.cpp +++ b/samples/cpp/speech_sample/main.cpp @@ -86,10 +86,11 @@ int main(int argc, char* argv[]) { uint32_t batchSize = (FLAGS_cw_r > 0 || FLAGS_cw_l > 0 || !FLAGS_bs) ? 1 : (uint32_t)FLAGS_bs; std::shared_ptr model; std::vector outputs; + std::vector output_names; std::vector ports; // --------------------------- Processing custom outputs --------------------------------------------- if (!FLAGS_oname.empty()) { - std::vector output_names = convert_str_to_vector(FLAGS_oname); + output_names = convert_str_to_vector(FLAGS_oname); for (const auto& output_name : output_names) { auto pos_layer = output_name.rfind(":"); if (pos_layer == std::string::npos) { @@ -178,19 +179,22 @@ int main(int argc, char* argv[]) { } if (FLAGS_q.compare("user") == 0) { if (!FLAGS_rg.empty()) { - slog::warn << "Custom scale factor will be used for imported gna model: " << FLAGS_rg << slog::endl; - } - auto scale_factors_per_input = parse_scale_factors(model->inputs(), FLAGS_sf); - if (numInputFiles != scale_factors_per_input.size()) { - std::string errMessage( - "Incorrect command line for multiple inputs: " + std::to_string(scale_factors_per_input.size()) + - " scale factors provided for " + std::to_string(numInputFiles) + " input files."); + std::string errMessage("Custom scale factor can not be set for imported gna model: " + FLAGS_rg); throw std::logic_error(errMessage); + } else { + auto scale_factors_per_input = parse_scale_factors(model->inputs(), FLAGS_sf); + if (numInputFiles != scale_factors_per_input.size()) { + std::string errMessage("Incorrect command line for multiple inputs: " + + std::to_string(scale_factors_per_input.size()) + + " scale factors provided for " + std::to_string(numInputFiles) + + " input files."); + throw std::logic_error(errMessage); + } + for (auto&& sf : scale_factors_per_input) { + slog::info << "For input " << sf.first << " using scale factor of " << sf.second << slog::endl; + } + gnaPluginConfig[ov::intel_gna::scale_factors_per_input.name()] = scale_factors_per_input; } - for (auto&& sf : scale_factors_per_input) { - slog::info << "For input " << sf.first << " using scale factor of " << sf.second << slog::endl; - } - gnaPluginConfig[ov::intel_gna::scale_factors_per_input.name()] = scale_factors_per_input; } else { // "static" quantization with calculated scale factor if (!FLAGS_rg.empty()) { @@ -245,10 +249,9 @@ int main(int argc, char* argv[]) { auto t0 = Time::now(); ms loadTime = std::chrono::duration_cast(Time::now() - t0); slog::info << "Model loading time " << loadTime.count() << " ms" << slog::endl; - slog::info << "Loading model to the device " << FLAGS_d << slog::endl; ov::CompiledModel executableNet; if (!FLAGS_m.empty()) { - slog::info << "Loading model to the device" << slog::endl; + slog::info << "Loading model to the device " << FLAGS_d << slog::endl; executableNet = core.compile_model(model, deviceStr, genericPluginConfig); } else { slog::info << "Importing model to the device" << slog::endl; @@ -339,159 +342,189 @@ int main(int argc, char* argv[]) { } count_file = reference_name_files.empty() ? 1 : reference_name_files.size(); } + if (count_file > executableNet.outputs().size()) { + throw std::logic_error( + "The number of output/reference files is not equal to the number of network outputs."); + } // ----------------------------------------------------------------------------------------------------- // --------------------------- Step 5. Do inference -------------------------------------------------------- - for (size_t next_output = 0; next_output < count_file; next_output++) { - std::vector> ptrUtterances; - std::vector ptrScores; - std::vector ptrReferenceScores; - ScoreErrorT frameError, totalError; - ptrUtterances.resize(inputFiles.size()); - // initialize memory state before starting - for (auto&& state : inferRequests.begin()->inferRequest.query_state()) { - state.reset(); - } - /** Work with each utterance **/ - for (uint32_t utteranceIndex = 0; utteranceIndex < numUtterances; ++utteranceIndex) { - std::map utterancePerfMap; - uint64_t totalNumberOfRunsOnHw = 0; - std::string uttName; - uint32_t numFrames(0), n(0); - std::vector numFrameElementsInput; - uint32_t numFramesReference(0), numFrameElementsReference(0), numBytesPerElementReference(0), - numBytesReferenceScoreThisUtterance(0); - auto dims = executableNet.outputs()[0].get_shape(); - const auto numScoresPerFrame = - std::accumulate(std::begin(dims), std::end(dims), size_t{1}, std::multiplies()); - slog::info << "Number scores per frame : " << numScoresPerFrame << slog::endl; - /** Get information from input file for current utterance **/ - numFrameElementsInput.resize(numInputFiles); - for (size_t i = 0; i < inputFiles.size(); i++) { - std::vector ptrUtterance; - auto inputFilename = inputFiles[i].c_str(); - uint32_t currentNumFrames(0), currentNumFrameElementsInput(0), currentNumBytesPerElementInput(0); - file->get_file_info(inputFilename, utteranceIndex, &n, &numBytesThisUtterance[i]); - ptrUtterance.resize(numBytesThisUtterance[i]); - file->load_file(inputFilename, - utteranceIndex, - uttName, - ptrUtterance, - ¤tNumFrames, - ¤tNumFrameElementsInput, - ¤tNumBytesPerElementInput); - if (numFrames == 0) { - numFrames = currentNumFrames; - } else if (numFrames != currentNumFrames) { - std::string errMessage("Number of frames in input files is different: " + - std::to_string(numFrames) + " and " + std::to_string(currentNumFrames)); - throw std::logic_error(errMessage); - } - ptrUtterances[i] = ptrUtterance; - numFrameElementsInput[i] = currentNumFrameElementsInput; - } - int i = 0; - for (auto& ptrInputBlob : ptrInputBlobs) { - if (ptrInputBlob.get_size() != numFrameElementsInput[i++] * batchSize) { - throw std::logic_error("network input size(" + std::to_string(ptrInputBlob.get_size()) + - ") mismatch to input file size (" + - std::to_string(numFrameElementsInput[i - 1] * batchSize) + ")"); - } - } - ptrScores.resize(numFrames * numScoresPerFrame * sizeof(float)); - if (!FLAGS_r.empty()) { - /** Read file with reference scores **/ - BaseFile* fileReferenceScores; - auto exReferenceScoresFile = fileExt(FLAGS_r); - if (exReferenceScoresFile == "ark") { - fileReferenceScores = &arkFile; - } else if (exReferenceScoresFile == "npz") { - fileReferenceScores = &numpyFile; - } else { - throw std::logic_error("Invalid Reference Scores file"); - } - std::string refUtteranceName; - fileReferenceScores->get_file_info(reference_name_files[next_output].c_str(), - utteranceIndex, - &n, - &numBytesReferenceScoreThisUtterance); - ptrReferenceScores.resize(numBytesReferenceScoreThisUtterance); - fileReferenceScores->load_file(reference_name_files[next_output].c_str(), - utteranceIndex, - refUtteranceName, - ptrReferenceScores, - &numFramesReference, - &numFrameElementsReference, - &numBytesPerElementReference); - } - double totalTime = 0.0; - std::cout << "Utterance " << utteranceIndex << ": " << std::endl; - clear_score_error(&totalError); - totalError.threshold = frameError.threshold = MAX_SCORE_DIFFERENCE; - auto outputFrame = &ptrScores.front(); - std::vector inputFrame; - for (auto& ut : ptrUtterances) { - inputFrame.push_back(&ut.front()); - } - std::map callPerfMap; - size_t frameIndex = 0; - uint32_t numFramesFile = numFrames; - numFrames += FLAGS_cw_l + FLAGS_cw_r; - uint32_t numFramesThisBatch{batchSize}; - auto t0 = Time::now(); - auto t1 = t0; - while (frameIndex <= numFrames) { - if (frameIndex == numFrames) { - if (std::find_if(inferRequests.begin(), inferRequests.end(), [&](InferRequestStruct x) { - return (x.frameIndex != -1); - }) == inferRequests.end()) { - break; - } - } - bool inferRequestFetched = false; - /** Start inference loop **/ - for (auto& inferRequest : inferRequests) { - if (frameIndex == numFrames) { - numFramesThisBatch = 1; - } else { - numFramesThisBatch = - (numFrames - frameIndex < batchSize) ? (numFrames - frameIndex) : batchSize; - } + std::vector> ptrUtterances; + std::vector> vectorPtrScores((outputs.size() == 0) ? executableNet.outputs().size() + : outputs.size()); + std::vector numScoresPerOutput((outputs.size() == 0) ? executableNet.outputs().size() + : outputs.size()); + std::vector> vectorPtrReferenceScores(reference_name_files.size()); + std::vector vectorFrameError(reference_name_files.size()), + vectorTotalError(reference_name_files.size()); + ptrUtterances.resize(inputFiles.size()); + // initialize memory state before starting + for (auto&& state : inferRequests.begin()->inferRequest.query_state()) { + state.reset(); + } + /** Work with each utterance **/ + for (uint32_t utteranceIndex = 0; utteranceIndex < numUtterances; ++utteranceIndex) { + std::map utterancePerfMap; + uint64_t totalNumberOfRunsOnHw = 0; + std::string uttName; + uint32_t numFrames(0), n(0); + std::vector numFrameElementsInput; + std::vector numFramesReference(reference_name_files.size()), + numFrameElementsReference(reference_name_files.size()), + numBytesPerElementReference(reference_name_files.size()), + numBytesReferenceScoreThisUtterance(reference_name_files.size()); + + /** Get information from input file for current utterance **/ + numFrameElementsInput.resize(numInputFiles); + for (size_t i = 0; i < inputFiles.size(); i++) { + std::vector ptrUtterance; + auto inputFilename = inputFiles[i].c_str(); + uint32_t currentNumFrames(0), currentNumFrameElementsInput(0), currentNumBytesPerElementInput(0); + file->get_file_info(inputFilename, utteranceIndex, &n, &numBytesThisUtterance[i]); + ptrUtterance.resize(numBytesThisUtterance[i]); + file->load_file(inputFilename, + utteranceIndex, + uttName, + ptrUtterance, + ¤tNumFrames, + ¤tNumFrameElementsInput, + ¤tNumBytesPerElementInput); + if (numFrames == 0) { + numFrames = currentNumFrames; + } else if (numFrames != currentNumFrames) { + std::string errMessage("Number of frames in input files is different: " + + std::to_string(numFrames) + " and " + std::to_string(currentNumFrames)); + throw std::logic_error(errMessage); + } + ptrUtterances[i] = ptrUtterance; + numFrameElementsInput[i] = currentNumFrameElementsInput; + } + int i = 0; + for (auto& ptrInputBlob : ptrInputBlobs) { + if (ptrInputBlob.get_size() != numFrameElementsInput[i++] * batchSize) { + throw std::logic_error("network input size(" + std::to_string(ptrInputBlob.get_size()) + + ") mismatch to input file size (" + + std::to_string(numFrameElementsInput[i - 1] * batchSize) + ")"); + } + } + + double totalTime = 0.0; + + for (size_t errorIndex = 0; errorIndex < vectorFrameError.size(); errorIndex++) { + clear_score_error(&vectorTotalError[errorIndex]); + vectorTotalError[errorIndex].threshold = vectorFrameError[errorIndex].threshold = MAX_SCORE_DIFFERENCE; + } + + std::vector inputFrame; + for (auto& ut : ptrUtterances) { + inputFrame.push_back(&ut.front()); + } + std::map callPerfMap; + size_t frameIndex = 0; + uint32_t numFramesFile = numFrames; + numFrames += FLAGS_cw_l + FLAGS_cw_r; + uint32_t numFramesThisBatch{batchSize}; + auto t0 = Time::now(); + auto t1 = t0; + + BaseFile* fileReferenceScores; + std::string refUtteranceName; + + if (!FLAGS_r.empty()) { + /** Read file with reference scores **/ + auto exReferenceScoresFile = fileExt(FLAGS_r); + if (exReferenceScoresFile == "ark") { + fileReferenceScores = &arkFile; + } else if (exReferenceScoresFile == "npz") { + fileReferenceScores = &numpyFile; + } else { + throw std::logic_error("Invalid Reference Scores file"); + } + for (size_t next_output = 0; next_output < count_file; next_output++) { + if (fileReferenceScores != nullptr) { + fileReferenceScores->get_file_info(reference_name_files[next_output].c_str(), + utteranceIndex, + &n, + &numBytesReferenceScoreThisUtterance[next_output]); + vectorPtrReferenceScores[next_output].resize(numBytesReferenceScoreThisUtterance[next_output]); + fileReferenceScores->load_file(reference_name_files[next_output].c_str(), + utteranceIndex, + refUtteranceName, + vectorPtrReferenceScores[next_output], + &numFramesReference[next_output], + &numFrameElementsReference[next_output], + &numBytesPerElementReference[next_output]); + } + } + } + + while (frameIndex <= numFrames) { + if (frameIndex == numFrames) { + if (std::find_if(inferRequests.begin(), inferRequests.end(), [&](InferRequestStruct x) { + return (x.frameIndex != -1); + }) == inferRequests.end()) { + break; + } + } + bool inferRequestFetched = false; + /** Start inference loop **/ + for (auto& inferRequest : inferRequests) { + if (frameIndex == numFrames) { + numFramesThisBatch = 1; + } else { + numFramesThisBatch = + (numFrames - frameIndex < batchSize) ? (numFrames - frameIndex) : batchSize; + } + + /* waits until inference result becomes available */ + if (inferRequest.frameIndex != -1) { + inferRequest.inferRequest.wait(); + if (inferRequest.frameIndex >= 0) + for (size_t next_output = 0; next_output < count_file; next_output++) { + std::string outputName = (outputs.size() == 0) + ? executableNet.output(next_output).get_any_name() + : output_names[next_output]; + auto dims = executableNet.output(outputName).get_shape(); + numScoresPerOutput[next_output] = std::accumulate(std::begin(dims), + std::end(dims), + size_t{1}, + std::multiplies()); + + vectorPtrScores[next_output].resize(numFramesFile * numScoresPerOutput[next_output] * + sizeof(float)); - /* waits until inference result becomes available */ - if (inferRequest.frameIndex != -1) { - inferRequest.inferRequest.wait(); - if (inferRequest.frameIndex >= 0) { if (!FLAGS_o.empty()) { /* Prepare output data for save to file in future */ - outputFrame = &ptrScores.front() + - numScoresPerFrame * sizeof(float) * (inferRequest.frameIndex); + auto outputFrame = &vectorPtrScores[next_output].front() + + numScoresPerOutput[next_output] * sizeof(float) * + (inferRequest.frameIndex) / batchSize; ov::Tensor outputBlob = - inferRequest.inferRequest.get_tensor(executableNet.outputs()[0]); - if (!outputs.empty()) { - outputBlob = - inferRequest.inferRequest.get_tensor(executableNet.output(FLAGS_oname)); - } - // locked memory holder should be alive all time while access to its buffer - // happens - auto byteSize = numScoresPerFrame * sizeof(float); + inferRequest.inferRequest.get_tensor(executableNet.output(outputName)); + // locked memory holder should be alive all time while access to its buffer happens + auto byteSize = numScoresPerOutput[next_output] * sizeof(float); std::memcpy(outputFrame, outputBlob.data(), byteSize); } if (!FLAGS_r.empty()) { /** Compare output data with reference scores **/ ov::Tensor outputBlob = - inferRequest.inferRequest.get_tensor(executableNet.outputs()[0]); - if (!FLAGS_oname.empty()) - outputBlob = - inferRequest.inferRequest.get_tensor(executableNet.output(FLAGS_oname)); - compare_scores( - outputBlob.data(), - &ptrReferenceScores[inferRequest.frameIndex * numFrameElementsReference * - numBytesPerElementReference], - &frameError, - inferRequest.numFramesThisBatch, - numFrameElementsReference); - update_score_error(&frameError, &totalError); + inferRequest.inferRequest.get_tensor(executableNet.output(outputName)); + + if (numScoresPerOutput[next_output] / numFrameElementsReference[next_output] == + batchSize) { + compare_scores( + outputBlob.data(), + &vectorPtrReferenceScores[next_output] + [inferRequest.frameIndex * + numFrameElementsReference[next_output] * + numBytesPerElementReference[next_output]], + &vectorFrameError[next_output], + inferRequest.numFramesThisBatch, + numFrameElementsReference[next_output]); + update_score_error(&vectorFrameError[next_output], + &vectorTotalError[next_output]); + } else { + throw std::logic_error("Number of output and reference frames does not match."); + } } if (FLAGS_pc) { // retrieve new counters @@ -500,61 +533,110 @@ int main(int argc, char* argv[]) { sum_performance_counters(callPerfMap, utterancePerfMap, totalNumberOfRunsOnHw); } } - // ----------------------------------------------------------------------------------------------------- - } - if (frameIndex == numFrames) { - inferRequest.frameIndex = -1; - continue; - } // ----------------------------------------------------------------------------------------------------- - int index = static_cast(frameIndex) - (FLAGS_cw_l + FLAGS_cw_r); - for (int i = 0; i < executableNet.inputs().size(); i++) { - inferRequest.inferRequest.set_input_tensor( - i, - ov::Tensor(ov::element::f32, executableNet.inputs()[i].get_shape(), inputFrame[i])); - } - /* Starting inference in asynchronous mode*/ - inferRequest.inferRequest.start_async(); - inferRequest.frameIndex = index < 0 ? -2 : index; - inferRequest.numFramesThisBatch = numFramesThisBatch; - frameIndex += numFramesThisBatch; - for (size_t j = 0; j < inputFiles.size(); j++) { - if (FLAGS_cw_l > 0 || FLAGS_cw_r > 0) { - int idx = frameIndex - FLAGS_cw_l; - if (idx > 0 && idx < static_cast(numFramesFile)) { - inputFrame[j] += sizeof(float) * numFrameElementsInput[j] * numFramesThisBatch; - } else if (idx >= static_cast(numFramesFile)) { - inputFrame[j] = &ptrUtterances[j].front() + (numFramesFile - 1) * sizeof(float) * - numFrameElementsInput[j] * - numFramesThisBatch; - } else if (idx <= 0) { - inputFrame[j] = &ptrUtterances[j].front(); - } - } else { - inputFrame[j] += sizeof(float) * numFrameElementsInput[j] * numFramesThisBatch; - } - } - inferRequestFetched |= true; } - /** Inference was finished for current frame **/ - if (!inferRequestFetched) { - std::this_thread::sleep_for(std::chrono::milliseconds(1)); + if (frameIndex == numFrames) { + inferRequest.frameIndex = -1; continue; } - } - t1 = Time::now(); - fsec fs = t1 - t0; - ms d = std::chrono::duration_cast(fs); - totalTime += d.count(); - // resetting state between utterances - for (auto&& state : inferRequests.begin()->inferRequest.query_state()) { - state.reset(); - } - // ----------------------------------------------------------------------------------------------------- + ptrInputBlobs.clear(); + if (FLAGS_iname.empty()) { + for (auto& input : cInputInfo) { + ptrInputBlobs.push_back(inferRequest.inferRequest.get_tensor(input)); + } + } else { + std::vector inputNameBlobs = convert_str_to_vector(FLAGS_iname); + for (const auto& input : inputNameBlobs) { + ov::Tensor blob = inferRequests.begin()->inferRequest.get_tensor(input); + if (!blob) { + std::string errMessage("No blob with name : " + input); + throw std::logic_error(errMessage); + } + ptrInputBlobs.push_back(blob); + } + } - // --------------------------- Step 6. Process output - // ------------------------------------------------------- + /** Iterate over all the input blobs **/ + for (size_t i = 0; i < numInputFiles; ++i) { + ov::Tensor minput = ptrInputBlobs[i]; + if (!minput) { + std::string errMessage("We expect ptrInputBlobs[" + std::to_string(i) + + "] to be inherited from Tensor, " + + "but in fact we were not able to cast input to Tensor"); + throw std::logic_error(errMessage); + } + memcpy(minput.data(), + inputFrame[i], + numFramesThisBatch * numFrameElementsInput[i] * sizeof(float)); + // Used to infer fewer frames than the batch size + if (batchSize != numFramesThisBatch) { + memset(minput.data() + numFramesThisBatch * numFrameElementsInput[i], + 0, + (batchSize - numFramesThisBatch) * numFrameElementsInput[i]); + } + } + // ----------------------------------------------------------------------------------------------------- + int index = static_cast(frameIndex) - (FLAGS_cw_l + FLAGS_cw_r); + /* Starting inference in asynchronous mode*/ + inferRequest.inferRequest.start_async(); + inferRequest.frameIndex = index < 0 ? -2 : index; + inferRequest.numFramesThisBatch = numFramesThisBatch; + frameIndex += numFramesThisBatch; + for (size_t j = 0; j < inputFiles.size(); j++) { + if (FLAGS_cw_l > 0 || FLAGS_cw_r > 0) { + int idx = frameIndex - FLAGS_cw_l; + if (idx > 0 && idx < static_cast(numFramesFile)) { + inputFrame[j] += sizeof(float) * numFrameElementsInput[j] * numFramesThisBatch; + } else if (idx >= static_cast(numFramesFile)) { + inputFrame[j] = &ptrUtterances[j].front() + (numFramesFile - 1) * sizeof(float) * + numFrameElementsInput[j] * + numFramesThisBatch; + } else if (idx <= 0) { + inputFrame[j] = &ptrUtterances[j].front(); + } + } else { + inputFrame[j] += sizeof(float) * numFrameElementsInput[j] * numFramesThisBatch; + } + } + inferRequestFetched |= true; + } + /** Inference was finished for current frame **/ + if (!inferRequestFetched) { + std::this_thread::sleep_for(std::chrono::milliseconds(1)); + continue; + } + } + t1 = Time::now(); + fsec fs = t1 - t0; + ms d = std::chrono::duration_cast(fs); + totalTime += d.count(); + // resetting state between utterances + for (auto&& state : inferRequests.begin()->inferRequest.query_state()) { + state.reset(); + } + // ----------------------------------------------------------------------------------------------------- + // --------------------------- Step 6. Process output + // ------------------------------------------------------- + + /** Show performance results **/ + std::cout << "Utterance " << utteranceIndex << ": " << std::endl; + std::cout << "Total time in Infer (HW and SW):\t" << totalTime << " ms" << std::endl; + std::cout << "Frames in utterance:\t\t\t" << numFrames << " frames" << std::endl; + std::cout << "Average Infer time per frame:\t\t" << totalTime / static_cast(numFrames) << " ms\n" + << std::endl; + + if (FLAGS_pc) { + // print performance results + print_performance_counters(utterancePerfMap, + frameIndex, + std::cout, + getFullDeviceName(core, FLAGS_d), + totalNumberOfRunsOnHw, + FLAGS_d); + } + + for (size_t next_output = 0; next_output < count_file; next_output++) { if (!FLAGS_o.empty()) { auto exOutputScoresFile = fileExt(FLAGS_o); if (exOutputScoresFile == "ark") { @@ -569,33 +651,21 @@ int main(int argc, char* argv[]) { fileOutput->save_file(output_name_files[next_output].c_str(), shouldAppend, uttName, - &ptrScores.front(), + &vectorPtrScores[next_output].front(), numFramesFile, - numScoresPerFrame); - } - /** Show performance results **/ - std::cout << "Total time in Infer (HW and SW):\t" << totalTime << " ms" << std::endl; - std::cout << "Frames in utterance:\t\t\t" << numFrames << " frames" << std::endl; - std::cout << "Average Infer time per frame:\t\t" << totalTime / static_cast(numFrames) << " ms" - << std::endl; - if (FLAGS_pc) { - // print performance results - print_performance_counters(utterancePerfMap, - frameIndex, - std::cout, - getFullDeviceName(core, FLAGS_d), - totalNumberOfRunsOnHw, - FLAGS_d); + numScoresPerOutput[next_output] / batchSize); } if (!FLAGS_r.empty()) { // print statistical score error - print_reference_compare_results(totalError, numFrames, std::cout); + std::string outputName = (outputs.size() == 0) ? executableNet.output(next_output).get_any_name() + : output_names[next_output]; + std::cout << "Output name: " << outputName << std::endl; + std::cout << "Number scores per frame: " << numScoresPerOutput[next_output] / batchSize << std::endl + << std::endl; + print_reference_compare_results(vectorTotalError[next_output], numFrames, std::cout); } - std::cout << "End of Utterance " << utteranceIndex << std::endl << std::endl; - // ----------------------------------------------------------------------------------------------------- } } - // ----------------------------------------------------------------------------------------------------- } catch (const std::exception& error) { slog::err << error.what() << slog::endl; return 1; diff --git a/samples/python/classification_sample_async/README.md b/samples/python/classification_sample_async/README.md index 98c38a47166..24f4095001a 100644 --- a/samples/python/classification_sample_async/README.md +++ b/samples/python/classification_sample_async/README.md @@ -23,7 +23,7 @@ Basic OpenVINO™ Runtime API is covered by [Hello Classification Python* Sample At startup, the sample application reads command-line parameters, prepares input data, loads a specified model and image(s) to the OpenVINO™ Runtime plugin, performs synchronous inference, and processes output data, logging each step in a standard output stream. You can see the explicit description of -each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) section of "Integrate the OpenVINO™ Runtime with Your Application" guide. +each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) section of "Integrate OpenVINO™ Runtime with Your Application" guide. ## Running @@ -135,7 +135,7 @@ The sample application logs each step in a standard output stream and outputs to ## See Also -- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) +- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) - [Using OpenVINO™ Toolkit Samples](../../../docs/OV_Runtime_UG/Samples_Overview.md) - [Model Downloader](@ref omz_tools_downloader) - [Model Optimizer](../../../docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) diff --git a/samples/python/hello_classification/README.md b/samples/python/hello_classification/README.md index 71f15ba8315..a5dd902fc1b 100644 --- a/samples/python/hello_classification/README.md +++ b/samples/python/hello_classification/README.md @@ -24,7 +24,7 @@ The following Python API is used in the application: At startup, the sample application reads command-line parameters, prepares input data, loads a specified model and image to the OpenVINO™ Runtime plugin, performs synchronous inference, and processes output data, logging each step in a standard output stream. You can see the explicit description of -each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) section of "Integrate the OpenVINO™ Runtime with Your Application" guide. +each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) section of "Integrate OpenVINO™ Runtime with Your Application" guide. ## Running @@ -98,7 +98,7 @@ The sample application logs each step in a standard output stream and outputs to ## See Also -- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) +- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) - [Using OpenVINO™ Toolkit Samples](../../../docs/OV_Runtime_UG/Samples_Overview.md) - [Model Downloader](@ref omz_tools_downloader) - [Model Optimizer](../../../docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) diff --git a/samples/python/hello_classification/hello_classification.py b/samples/python/hello_classification/hello_classification.py index 9af5a7e0b72..9b8355b0607 100755 --- a/samples/python/hello_classification/hello_classification.py +++ b/samples/python/hello_classification/hello_classification.py @@ -54,13 +54,11 @@ def main(): # 1) Set input tensor information: # - input() provides information about a single model input - # - precision of tensor is supposed to be 'u8' + # - reuse precision and shape from already available `input_tensor` # - layout of data is 'NHWC' - # - set static spatial dimensions to input tensor to resize from ppp.input().tensor() \ - .set_element_type(Type.u8) \ - .set_layout(Layout('NHWC')) \ - .set_spatial_static_shape(h, w) # noqa: ECE001, N400 + .set_from(input_tensor) \ + .set_layout(Layout('NHWC')) # noqa: ECE001, N400 # 2) Adding explicit preprocessing steps: # - apply linear resize from tensor spatial dims to model spatial dims @@ -73,7 +71,7 @@ def main(): # - precision of tensor is supposed to be 'f32' ppp.output().tensor().set_element_type(Type.f32) - # 5) Apply preprocessing modifing the original 'model' + # 5) Apply preprocessing modifying the original 'model' model = ppp.build() # --------------------------- Step 5. Loading model to the device ----------------------------------------------------- diff --git a/samples/python/hello_query_device/README.md b/samples/python/hello_query_device/README.md index a37852be755..aa934529df0 100644 --- a/samples/python/hello_query_device/README.md +++ b/samples/python/hello_query_device/README.md @@ -1,6 +1,6 @@ # Hello Query Device Python* Sample {#openvino_inference_engine_ie_bridges_python_sample_hello_query_device_README} -This sample demonstrates how to show OpenVINO™ Runtime devices and prints their metrics and default configuration values using [Query Device API feature](../../../docs/OV_Runtime_UG/InferenceEngine_QueryAPI.md). +This sample demonstrates how to show OpenVINO™ Runtime devices and prints their metrics and default configuration values using [Query Device API feature](../../../docs/OV_Runtime_UG/supported_plugins/config_properties.md). The following Python API is used in the application: @@ -28,7 +28,7 @@ python hello_query_device.py ## Sample Output -The application prints all available devices with their supported metrics and default values for configuration parameters. +The application prints all available devices with their supported metrics and default values for configuration parameters. For example: ``` diff --git a/samples/python/hello_reshape_ssd/README.md b/samples/python/hello_reshape_ssd/README.md index ef16766af1f..f7bc8e6aba7 100644 --- a/samples/python/hello_reshape_ssd/README.md +++ b/samples/python/hello_reshape_ssd/README.md @@ -24,7 +24,7 @@ At startup, the sample application reads command-line parameters, prepares input As a result, the program creates an output image, logging each step in a standard output stream. You can see the explicit description of -each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) section of "Integrate the OpenVINO™ Runtime with Your Application" guide. +each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) section of "Integrate OpenVINO™ Runtime with Your Application" guide. ## Running @@ -86,7 +86,7 @@ The sample application logs each step in a standard output stream and creates an ## See Also -- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) +- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) - [Using OpenVINO™ Toolkit Samples](../../../docs/OV_Runtime_UG/Samples_Overview.md) - [Model Downloader](@ref omz_tools_downloader) - [Model Optimizer](../../../docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) diff --git a/samples/python/ngraph_function_creation_sample/README.md b/samples/python/model_creation_sample/README.md similarity index 88% rename from samples/python/ngraph_function_creation_sample/README.md rename to samples/python/model_creation_sample/README.md index 22979415726..068cb25894c 100644 --- a/samples/python/ngraph_function_creation_sample/README.md +++ b/samples/python/model_creation_sample/README.md @@ -1,13 +1,13 @@ -# nGraph Function Creation Python* Sample {#openvino_inference_engine_ie_bridges_python_sample_ngraph_function_creation_sample_README} +# Model Creation Python* Sample {#openvino_inference_engine_ie_bridges_python_sample_model_creation_sample_README} -This sample demonstrates how to run inference using a [model](../../../docs/OV_Runtime_UG/model_representation.md) built on the fly that uses weights from the LeNet classification model, which is known to work well on digit classification tasks. You do not need an XML file, the model is created from the source code on the fly. +This sample demonstrates how to run inference using a [model](../../../docs/OV_Runtime_UG/model_representation.md) built on the fly that uses weights from the LeNet classification model, which is known to work well on digit classification tasks. You do not need an XML file, the model is created from the source code on the fly. -The following Python API is used in the application: +The following OpenVINO Python API is used in the application: | Feature | API | Description | | :--------------- | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------------------------------------------------ | | Model Operations | [openvino.runtime.Model], [openvino.runtime.set_batch], [openvino.runtime.Model.input] | Managing of model | -| nGraph Functions | [openvino.runtime.op.Parameter], [openvino.runtime.op.Constant], [openvino.runtime.opset8.convolution], [openvino.runtime.opset8.add], [openvino.runtime.opset1.max_pool], [openvino.runtime.opset8.reshape], [openvino.runtime.opset8.matmul], [openvino.runtime.opset8.relu], [openvino.runtime.opset8.softmax] | Description of a model topology using nGraph Python API | +| Opset operations | [openvino.runtime.op.Parameter], [openvino.runtime.op.Constant], [openvino.runtime.opset8.convolution], [openvino.runtime.opset8.add], [openvino.runtime.opset1.max_pool], [openvino.runtime.opset8.reshape], [openvino.runtime.opset8.matmul], [openvino.runtime.opset8.relu], [openvino.runtime.opset8.softmax] | Description of a model topology using OpenVINO Python API | Basic OpenVINO™ Runtime API is covered by [Hello Classification Python* Sample](../hello_classification/README.md). @@ -16,7 +16,7 @@ Basic OpenVINO™ Runtime API is covered by [Hello Classification Python* Sample | Validated Models | LeNet | | Model Format | Model weights file (\*.bin) | | Supported devices | [All](../../../docs/OV_Runtime_UG/supported_plugins/Supported_Devices.md) | -| Other language realization | [C++](../../../samples/cpp/ngraph_function_creation_sample/README.md) | +| Other language realization | [C++](../../../samples/cpp/model_creation_sample/README.md) | ## How It Works @@ -28,14 +28,14 @@ At startup, the sample application does the following: You can see the explicit description of -each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) section of "Integrate the OpenVINO™ Runtime with Your Application" guide. +each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) section of "Integrate OpenVINO™ Runtime with Your Application" guide. ## Running To run the sample, you need to specify model weights and device. ``` -python ngraph_function_creation_sample.py +python model_creation_sample.py ``` > **NOTE**: @@ -49,7 +49,7 @@ python ngraph_function_creation_sample.py For example: ``` -python ngraph_function_creation_sample.py lenet.bin GPU +python model_creation_sample.py lenet.bin GPU ``` ## Sample Output @@ -127,7 +127,7 @@ The sample application logs each step in a standard output stream and outputs 10 ## See Also -- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) +- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) - [Using OpenVINO™ Toolkit Samples](../../../docs/OV_Runtime_UG/Samples_Overview.md) - [Model Downloader](@ref omz_tools_downloader) - [Model Optimizer](../../../docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) diff --git a/samples/python/ngraph_function_creation_sample/data.py b/samples/python/model_creation_sample/data.py similarity index 100% rename from samples/python/ngraph_function_creation_sample/data.py rename to samples/python/model_creation_sample/data.py diff --git a/samples/python/ngraph_function_creation_sample/lenet.bin b/samples/python/model_creation_sample/lenet.bin similarity index 100% rename from samples/python/ngraph_function_creation_sample/lenet.bin rename to samples/python/model_creation_sample/lenet.bin diff --git a/samples/python/ngraph_function_creation_sample/ngraph_function_creation_sample.py b/samples/python/model_creation_sample/model_creation_sample.py similarity index 100% rename from samples/python/ngraph_function_creation_sample/ngraph_function_creation_sample.py rename to samples/python/model_creation_sample/model_creation_sample.py diff --git a/samples/python/speech_sample/README.md b/samples/python/speech_sample/README.md index 1f460379557..48752c6a575 100644 --- a/samples/python/speech_sample/README.md +++ b/samples/python/speech_sample/README.md @@ -29,7 +29,7 @@ Basic OpenVINO™ Runtime API is covered by [Hello Classification Python* Sample At startup, the sample application reads command-line parameters, loads a specified model and input data to the OpenVINO™ Runtime plugin, performs synchronous inference on all speech utterances stored in the input file, logging each step in a standard output stream. You can see the explicit description of -each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) section of "Integrate the OpenVINO™ Runtime with Your Application" guide. +each sample step at [Integration Steps](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) section of "Integrate OpenVINO™ Runtime with Your Application" guide. ## GNA-specific details @@ -87,11 +87,11 @@ python speech_sample.py -h Usage message: ``` -usage: speech_sample.py [-h] (-m MODEL | -rg IMPORT_GNA_MODEL) -i INPUT [-o OUTPUT] [-r REFERENCE] [-d DEVICE] [-bs [1-8]] - [-qb [8, 16]] [-sf SCALE_FACTOR] [-wg EXPORT_GNA_MODEL] [-we EXPORT_EMBEDDED_GNA_MODEL] - [-we_gen [GNA1, GNA3]] [--exec_target [GNA_TARGET_2_0, GNA_TARGET_3_0]] [-pc] [-a [CORE, ATOM]] - [-iname INPUT_LAYERS] [-oname OUTPUT_LAYERS] [-cw_l CONTEXT_WINDOW_LEFT] [-cw_r CONTEXT_WINDOW_RIGHT] - [-pwl_me PWL_ME] +usage: speech_sample.py [-h] (-m MODEL | -rg IMPORT_GNA_MODEL) -i INPUT [-o OUTPUT] [-r REFERENCE] [-d DEVICE] [-bs [1-8]] + [-layout LAYOUT] [-qb [8, 16]] [-sf SCALE_FACTOR] [-wg EXPORT_GNA_MODEL] + [-we EXPORT_EMBEDDED_GNA_MODEL] [-we_gen [GNA1, GNA3]] + [--exec_target [GNA_TARGET_2_0, GNA_TARGET_3_0]] [-pc] [-a [CORE, ATOM]] [-iname INPUT_LAYERS] + [-oname OUTPUT_LAYERS] [-cw_l CONTEXT_WINDOW_LEFT] [-cw_r CONTEXT_WINDOW_RIGHT] [-pwl_me PWL_ME] optional arguments: -m MODEL, --model MODEL @@ -108,44 +108,46 @@ Options: -r REFERENCE, --reference REFERENCE Optional. Read reference score file and compare scores. -d DEVICE, --device DEVICE - Optional. Specify a target device to infer on. CPU, GPU, MYRIAD, GNA_AUTO, GNA_HW, GNA_SW_FP32, - GNA_SW_EXACT and HETERO with combination of GNA as the primary device and CPU as a secondary (e.g. - HETERO:GNA,CPU) are supported. The sample will look for a suitable plugin for device specified. Default - value is CPU. + Optional. Specify a target device to infer on. CPU, GPU, MYRIAD, GNA_AUTO, GNA_HW, GNA_SW_FP32, + GNA_SW_EXACT and HETERO with combination of GNA as the primary device and CPU as a secondary (e.g. + HETERO:GNA,CPU) are supported. The sample will look for a suitable plugin for device specified. + Default value is CPU. -bs [1-8], --batch_size [1-8] - Optional. Batch size 1-8 (default 1). + Optional. Batch size 1-8. + -layout LAYOUT Optional. Custom layout in format: "input0[value0],input1[value1]" or "[value]" (applied to all + inputs) -qb [8, 16], --quantization_bits [8, 16] Optional. Weight bits for quantization: 8 or 16 (default 16). -sf SCALE_FACTOR, --scale_factor SCALE_FACTOR - Optional. The user-specified input scale factor for quantization. If the model contains multiple + Optional. The user-specified input scale factor for quantization. If the model contains multiple inputs, provide scale factors by separating them with commas. -wg EXPORT_GNA_MODEL, --export_gna_model EXPORT_GNA_MODEL Optional. Write GNA model to file using path/filename provided. -we EXPORT_EMBEDDED_GNA_MODEL, --export_embedded_gna_model EXPORT_EMBEDDED_GNA_MODEL Optional. Write GNA embedded model to file using path/filename provided. -we_gen [GNA1, GNA3], --embedded_gna_configuration [GNA1, GNA3] - Optional. GNA generation configuration string for embedded export. Can be GNA1 (default) or GNA3. + Optional. GNA generation configuration string for embedded export. Can be GNA1 (default) or GNA3. --exec_target [GNA_TARGET_2_0, GNA_TARGET_3_0] - Optional. Specify GNA execution target generation. By default, generation corresponds to the GNA HW - available in the system or the latest fully supported generation by the software. See the GNA Plugin's - GNA_EXEC_TARGET config option description. + Optional. Specify GNA execution target generation. By default, generation corresponds to the GNA HW + available in the system or the latest fully supported generation by the software. See the GNA + Plugin's GNA_EXEC_TARGET config option description. -pc, --performance_counter Optional. Enables performance report (specify -a to ensure arch accurate results). -a [CORE, ATOM], --arch [CORE, ATOM] Optional. Specify architecture. CORE, ATOM with the combination of -pc. -iname INPUT_LAYERS, --input_layers INPUT_LAYERS - Optional. Layer names for input blobs. The names are separated with ",". Allows to change the order of - input layers for -i flag. Example: Input1,Input2 + Optional. Layer names for input blobs. The names are separated with ",". Allows to change the order + of input layers for -i flag. Example: Input1,Input2 -oname OUTPUT_LAYERS, --output_layers OUTPUT_LAYERS - Optional. Layer names for output blobs. The names are separated with ",". Allows to change the order of - output layers for -o flag. Example: Output1:port,Output2:port. + Optional. Layer names for output blobs. The names are separated with ",". Allows to change the + order of output layers for -o flag. Example: Output1:port,Output2:port. -cw_l CONTEXT_WINDOW_LEFT, --context_window_left CONTEXT_WINDOW_LEFT - Optional. Number of frames for left context windows (default is 0). Works only with context window + Optional. Number of frames for left context windows (default is 0). Works only with context window models. If you use the cw_l or cw_r flag, then batch size argument is ignored. -cw_r CONTEXT_WINDOW_RIGHT, --context_window_right CONTEXT_WINDOW_RIGHT - Optional. Number of frames for right context windows (default is 0). Works only with context window + Optional. Number of frames for right context windows (default is 0). Works only with context window models. If you use the cw_l or cw_r flag, then batch size argument is ignored. - -pwl_me PWL_ME Optional. The maximum percent of error for PWL function. The value must be in <0, 100> range. The + -pwl_me PWL_ME Optional. The maximum percent of error for PWL function. The value must be in <0, 100> range. The default value is 1.0. ``` @@ -328,7 +330,7 @@ The sample application logs each step in a standard output stream. ## See Also -- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/Integrate_with_customer_application_new_API.md) +- [Integrate the OpenVINO™ Runtime with Your Application](../../../docs/OV_Runtime_UG/integrate_with_your_application.md) - [Using OpenVINO™ Toolkit Samples](../../../docs/OV_Runtime_UG/Samples_Overview.md) - [Model Downloader](@ref omz_tools_downloader) - [Model Optimizer](../../../docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md) diff --git a/samples/python/speech_sample/utils.py b/samples/python/speech_sample/utils.py index ded1dd52a2c..e3a87635958 100644 --- a/samples/python/speech_sample/utils.py +++ b/samples/python/speech_sample/utils.py @@ -82,7 +82,8 @@ def parse_outputs_from_args(args: argparse.Namespace) -> Tuple[List[str], List[i def parse_input_layouts(args: argparse.Namespace, inputs: List[Output]) -> Dict[str, str]: - if ',' in args.layout: - return dict([_input.split('[') for _input in args.layout[:-1].split('],')]) - else: + if args.layout[0] == '[': return {_input.get_any_name(): args.layout[1:-1] for _input in inputs} + else: + sep = '],' if ',' in args.layout else ']' + return dict([_input.split('[') for _input in args.layout[:-1].split(sep)]) diff --git a/scripts/setupvars/setupvars.sh b/scripts/setupvars/setupvars.sh index fdfcc0f34f3..ab186881dc6 100755 --- a/scripts/setupvars/setupvars.sh +++ b/scripts/setupvars/setupvars.sh @@ -34,8 +34,8 @@ if [ -e "$INSTALLDIR/runtime" ]; then export HDDL_INSTALL_DIR=$INSTALLDIR/runtime/3rdparty/hddl if [[ "$OSTYPE" == "darwin"* ]]; then - export DYLD_LIBRARY_PATH=${IE_PLUGINS_PATH}${DYLD_LIBRARY_PATH:+:DYLD_LIBRARY_PATH} - export LD_LIBRARY_PATH=${IE_PLUGINS_PATH}${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH} + export DYLD_LIBRARY_PATH=${IE_PLUGINS_PATH}/Release:${IE_PLUGINS_PATH}/Debug${DYLD_LIBRARY_PATH:+:$DYLD_LIBRARY_PATH} + export LD_LIBRARY_PATH=${IE_PLUGINS_PATH}/Release:${IE_PLUGINS_PATH}/Debug${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH} else export LD_LIBRARY_PATH=$HDDL_INSTALL_DIR/lib:${IE_PLUGINS_PATH}${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH} fi @@ -49,7 +49,7 @@ fi if [ -e "$INSTALLDIR/runtime/3rdparty/tbb" ]; then if [[ "$OSTYPE" == "darwin"* ]]; then - export DYLD_LIBRARY_PATH=$INSTALLDIR/runtime/3rdparty/tbb/lib:${DYLD_LIBRARY_PATH:+:DYLD_LIBRARY_PATH} + export DYLD_LIBRARY_PATH=$INSTALLDIR/runtime/3rdparty/tbb/lib:${DYLD_LIBRARY_PATH:+:$DYLD_LIBRARY_PATH} fi export LD_LIBRARY_PATH=$INSTALLDIR/runtime/3rdparty/tbb/lib:${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH} export TBB_DIR=$INSTALLDIR/runtime/3rdparty/tbb/cmake diff --git a/src/bindings/c/docs/api_overview.md b/src/bindings/c/docs/api_overview.md index a1b9129ef73..224af6ee963 100644 --- a/src/bindings/c/docs/api_overview.md +++ b/src/bindings/c/docs/api_overview.md @@ -352,7 +352,7 @@ This strcut represents an Inference Engine entity and allows you to manipulate w - Parameters: - `core` -A pointer to `ie_core_t` instance. - - `device_name` - Name of the the registered plugin. + - `device_name` - Name of the registered plugin. - `version_result` - Dictionary mapping a plugin name . - Return value: Status of the operation: OK(0) for success. diff --git a/src/bindings/python/src/compatibility/ngraph/opset5/__init__.py b/src/bindings/python/src/compatibility/ngraph/opset5/__init__.py index 4605a5faa65..5a60a9ff2aa 100644 --- a/src/bindings/python/src/compatibility/ngraph/opset5/__init__.py +++ b/src/bindings/python/src/compatibility/ngraph/opset5/__init__.py @@ -74,7 +74,7 @@ from ngraph.opset5.ops import log_softmax from ngraph.opset5.ops import loop from ngraph.opset1.ops import lrn from ngraph.opset4.ops import lstm_cell -from ngraph.opset1.ops import lstm_sequence +from ngraph.opset5.ops import lstm_sequence from ngraph.opset1.ops import matmul from ngraph.opset1.ops import max_pool from ngraph.opset1.ops import maximum diff --git a/src/bindings/python/src/compatibility/ngraph/opset6/__init__.py b/src/bindings/python/src/compatibility/ngraph/opset6/__init__.py index f1e175aa6f6..6a15cdfcaa3 100644 --- a/src/bindings/python/src/compatibility/ngraph/opset6/__init__.py +++ b/src/bindings/python/src/compatibility/ngraph/opset6/__init__.py @@ -76,7 +76,7 @@ from ngraph.opset5.ops import log_softmax from ngraph.opset5.ops import loop from ngraph.opset1.ops import lrn from ngraph.opset4.ops import lstm_cell -from ngraph.opset1.ops import lstm_sequence +from ngraph.opset5.ops import lstm_sequence from ngraph.opset1.ops import matmul from ngraph.opset1.ops import max_pool from ngraph.opset1.ops import maximum diff --git a/src/bindings/python/src/compatibility/ngraph/opset7/__init__.py b/src/bindings/python/src/compatibility/ngraph/opset7/__init__.py index 13a39ef7815..ca7be65d95b 100644 --- a/src/bindings/python/src/compatibility/ngraph/opset7/__init__.py +++ b/src/bindings/python/src/compatibility/ngraph/opset7/__init__.py @@ -79,7 +79,7 @@ from ngraph.opset5.ops import log_softmax from ngraph.opset5.ops import loop from ngraph.opset1.ops import lrn from ngraph.opset4.ops import lstm_cell -from ngraph.opset1.ops import lstm_sequence +from ngraph.opset5.ops import lstm_sequence from ngraph.opset1.ops import matmul from ngraph.opset1.ops import max_pool from ngraph.opset1.ops import maximum diff --git a/src/bindings/python/src/compatibility/ngraph/opset8/__init__.py b/src/bindings/python/src/compatibility/ngraph/opset8/__init__.py index fc94b1243b4..d6e225e665f 100644 --- a/src/bindings/python/src/compatibility/ngraph/opset8/__init__.py +++ b/src/bindings/python/src/compatibility/ngraph/opset8/__init__.py @@ -84,7 +84,7 @@ from ngraph.opset5.ops import log_softmax from ngraph.opset5.ops import loop from ngraph.opset1.ops import lrn from ngraph.opset4.ops import lstm_cell -from ngraph.opset1.ops import lstm_sequence +from ngraph.opset5.ops import lstm_sequence from ngraph.opset1.ops import matmul from ngraph.opset8.ops import matrix_nms from ngraph.opset8.ops import max_pool diff --git a/src/bindings/python/src/openvino/runtime/__init__.py b/src/bindings/python/src/openvino/runtime/__init__.py index b14c460d0b6..2628fb17b2a 100644 --- a/src/bindings/python/src/openvino/runtime/__init__.py +++ b/src/bindings/python/src/openvino/runtime/__init__.py @@ -31,7 +31,6 @@ from openvino.pyopenvino import AxisVector from openvino.pyopenvino import Coordinate from openvino.pyopenvino import Layout from openvino.pyopenvino import ConstOutput -from openvino.pyopenvino import util from openvino.pyopenvino import layout_helpers from openvino.pyopenvino import RTMap diff --git a/src/bindings/python/src/openvino/runtime/exceptions.py b/src/bindings/python/src/openvino/runtime/exceptions.py index 622c9d1d6d8..8a6f8760310 100644 --- a/src/bindings/python/src/openvino/runtime/exceptions.py +++ b/src/bindings/python/src/openvino/runtime/exceptions.py @@ -1,16 +1,16 @@ # Copyright (C) 2018-2022 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -"""openvino exceptions hierarchy. All exceptions are descendants of NgraphError.""" +"""openvino exceptions hierarchy. All exceptions are descendants of OVError.""" -class NgraphError(Exception): - """Base class for Ngraph exceptions.""" +class OVError(Exception): + """Base class for OV exceptions.""" -class UserInputError(NgraphError): +class UserInputError(OVError): """User provided unexpected input.""" -class NgraphTypeError(NgraphError, TypeError): +class OVTypeError(OVError, TypeError): """Type mismatch error.""" diff --git a/src/bindings/python/src/openvino/runtime/ie_api.py b/src/bindings/python/src/openvino/runtime/ie_api.py index 67bb84ef68a..b43d335463d 100644 --- a/src/bindings/python/src/openvino/runtime/ie_api.py +++ b/src/bindings/python/src/openvino/runtime/ie_api.py @@ -132,7 +132,7 @@ class InferRequest(InferRequestBase): :param inputs: Data to be set on input tensors. :type inputs: Union[Dict[keys, values], List[values]], optional - :param userdata: Any data that will be passed inside callback call. + :param userdata: Any data that will be passed inside the callback. :type userdata: Any """ super().start_async( @@ -164,8 +164,8 @@ class CompiledModel(CompiledModelBase): Blocks all methods of CompiledModel while request is running. Method creates new temporary InferRequest and run inference on it. - It is advised to use dedicated InferRequest class for performance, - optimizing workflows and creating advanced pipelines. + It is advised to use a dedicated InferRequest class for performance, + optimizing workflows, and creating advanced pipelines. The allowed types of keys in the `inputs` dictionary are: @@ -188,7 +188,10 @@ class CompiledModel(CompiledModelBase): ) def __call__(self, inputs: Union[dict, list] = None) -> dict: - """Callable infer wrapper for CompiledModel. Look at `infer_new_request` for reference.""" + """Callable infer wrapper for CompiledModel. + + Take a look at `infer_new_request` for reference. + """ return self.infer_new_request(inputs) @@ -245,7 +248,7 @@ class Core(CoreBase): """Core class represents OpenVINO runtime Core entity. User applications can create several Core class instances, but in this - case the underlying plugins are created multiple times and not shared + case, the underlying plugins are created multiple times and not shared between several Core instances. The recommended way is to have a single Core instance per application. """ diff --git a/src/bindings/python/src/openvino/runtime/opset1/ops.py b/src/bindings/python/src/openvino/runtime/opset1/ops.py index 347870c2c35..c05fe5159a2 100644 --- a/src/bindings/python/src/openvino/runtime/opset1/ops.py +++ b/src/bindings/python/src/openvino/runtime/opset1/ops.py @@ -2,12 +2,12 @@ # SPDX-License-Identifier: Apache-2.0 """Factory functions for all openvino ops.""" -from typing import Callable, Iterable, List, Optional, Set, Union +from typing import List, Optional, Union import numpy as np from functools import partial -from openvino.runtime import Node, PartialShape, Shape +from openvino.runtime import Node, PartialShape, Type from openvino.runtime.op import Constant, Parameter, tensor_iterator from openvino.runtime.opset_utils import _get_node_factory from openvino.runtime.utils.decorators import binary_op, nameable_op, unary_op @@ -43,7 +43,7 @@ def absolute(node: NodeInput, name: Optional[str] = None) -> Node: :param node: One of: input node, array or scalar. :param name: Optional new name for output node. - returns New node with Abs operation applied on it. + :return: New node with Abs operation applied on it. """ return _get_node_factory_opset1().create("Abs", [node]) @@ -54,7 +54,7 @@ def acos(node: NodeInput, name: Optional[str] = None) -> Node: :param node: One of: input node, array or scalar. :param name: Optional new name for output node. - returns New node with arccos operation applied on it. + :return: New node with arccos operation applied on it. """ return _get_node_factory_opset1().create("Acos", [node]) @@ -78,7 +78,7 @@ def asin(node: NodeInput, name: Optional[str] = None) -> Node: :param node: One of: input node, array or scalar. :param name: Optional new name for output node. - returns New node with arcsin operation applied on it. + :return: New node with arcsin operation applied on it. """ return _get_node_factory_opset1().create("Asin", [node]) @@ -89,7 +89,7 @@ def atan(node: NodeInput, name: Optional[str] = None) -> Node: :param node: One of: input node, array or scalar. :param name: Optional new name for output node. - returns New node with arctan operation applied on it. + :return: New node with arctan operation applied on it. """ return _get_node_factory_opset1().create("Atan", [node]) @@ -120,7 +120,7 @@ def avg_pool( [None, 'same_upper', 'same_lower', 'valid'] :param name: Optional name for the new output node. - returns New node with AvgPool operation applied on its data. + :return: New node with AvgPool operation applied on its data. """ if auto_pad is None: auto_pad = "explicit" @@ -159,7 +159,7 @@ def batch_norm_inference( :param epsilon: The number to be added to the variance to avoid division by zero when normalizing a value. :param name: The optional name of the output node. - returns The new node which performs BatchNormInference. + :return: The new node which performs BatchNormInference. """ inputs = as_nodes(gamma, beta, data, mean, variance) return _get_node_factory_opset1().create("BatchNormInference", inputs, {"epsilon": epsilon}) @@ -190,7 +190,7 @@ def binary_convolution( :param pad_value: Floating-point value used to fill pad area. :param auto_pad: The type of padding. Range of values: explicit, same_upper, same_lower, valid. :param name: The optional new name for output node. - returns New node performing binary convolution operation. + :return: New node performing binary convolution operation. """ return _get_node_factory_opset1().create( "BinaryConvolution", @@ -224,7 +224,7 @@ def broadcast( :param mode: The type of broadcasting that specifies mapping of input tensor axes to output shape axes. Range of values: NUMPY, EXPLICIT. :param name: Optional new name for output node. - returns New node with broadcast shape. + :return: New node with broadcast shape. """ inputs = as_nodes(data, target_shape) if mode.upper() == "EXPLICIT": @@ -247,7 +247,7 @@ def ctc_greedy_decoder( :param sequence_mask: The tensor with sequence masks for each sequence in the batch. :param merge_repeated: The flag for merging repeated labels during the CTC calculation. :param name: Optional name for output node. - returns The new node performing an CTCGreedyDecoder operation on input tensor. + :return: The new node performing an CTCGreedyDecoder operation on input tensor. """ node_inputs = as_nodes(data, sequence_mask) return _get_node_factory_opset1().create( @@ -261,7 +261,7 @@ def ceiling(node: NodeInput, name: Optional[str] = None) -> Node: :param node: The node providing data to ceiling operation. :param name: Optional name for output node. - returns The node performing element-wise ceiling. + :return: The node performing element-wise ceiling. """ return _get_node_factory_opset1().create("Ceiling", [node]) @@ -276,7 +276,7 @@ def clamp( :param min_value: The lower bound of the range. Scalar value. :param max_value: The upper bound of the range. Scalar value. :param name: Optional output node name. - returns The new node performing a clamp operation on its input data element-wise. + :return: The new node performing a clamp operation on its input data element-wise. Performs a clipping operation on an input value between a pair of boundary values. @@ -306,19 +306,23 @@ def concat(nodes: List[NodeInput], axis: int, name: Optional[str] = None) -> Nod :param nodes: The nodes we want concatenate into single new node. :param axis: The axis along which we want to concatenate input nodes. :param name: The optional new name for output node. - returns Return new node that is a concatenation of input nodes. + :return: Return new node that is a concatenation of input nodes. """ return _get_node_factory_opset1().create("Concat", as_nodes(*nodes), {"axis": axis}) @nameable_op -def constant(value: NumericData, dtype: NumericType = None, name: Optional[str] = None) -> Constant: +def constant( + value: NumericData, + dtype: Union[NumericType, Type] = None, + name: Optional[str] = None, +) -> Constant: """Create a Constant node from provided value. :param value: One of: array of values or scalar to initialize node with. :param dtype: The data type of provided data. :param name: Optional name for output node. - returns The Constant node initialized with provided data. + :return: The Constant node initialized with provided data. """ return make_constant_node(value, dtype) @@ -332,7 +336,7 @@ def convert( :param data: Node which produces the input tensor. :param destination_type: Provides the target type for the conversion. :param name: Optional name for the output node. - returns New node performing the conversion operation. + :return: New node performing the conversion operation. """ if not isinstance(destination_type, str): destination_type = get_element_type_str(destination_type) @@ -348,7 +352,7 @@ def convert_like(data: NodeInput, like: NodeInput, name: Optional[str] = None) - :param data: Node which produces the input tensor :param like: Node which provides the target type information for the conversion :param name: Optional name for the output node. - returns New node performing the conversion operation. + :return: New node performing the conversion operation. """ return _get_node_factory_opset1().create("ConvertLike", [data, like]) @@ -374,7 +378,7 @@ def convolution( :param dilations: The data batch dilation strides. :param auto_pad: The type of padding. Range of values: explicit, same_upper, same_lower, valid. :param name: The optional new name for output node. - returns New node performing batched convolution operation. + :return: New node performing batched convolution operation. """ return _get_node_factory_opset1().create( "Convolution", @@ -415,7 +419,7 @@ def convolution_backprop_data( in the filter. :param name: The node name. - returns The node object representing ConvolutionBackpropData operation. + :return: The node object representing ConvolutionBackpropData operation. """ spatial_dim_count = len(strides) if pads_begin is None: @@ -452,7 +456,7 @@ def cos(node: NodeInput, name: Optional[str] = None) -> Node: :param node: One of: input node, array or scalar. :param name: Optional new name for output node. - returns New node with cos operation applied on it. + :return: New node with cos operation applied on it. """ return _get_node_factory_opset1().create("Cos", [node]) @@ -463,7 +467,7 @@ def cosh(node: NodeInput, name: Optional[str] = None) -> Node: :param node: One of: input node, array or scalar. :param name: Optional new name for output node. - returns New node with cosh operation applied on it. + :return: New node with cosh operation applied on it. """ return _get_node_factory_opset1().create("Cosh", [node]) @@ -495,7 +499,7 @@ def deformable_convolution( :param deformable_group: The number of groups which deformable values and output should be split into along the channel axis. :param name: The optional new name for output node. - returns New node performing deformable convolution operation. + :return: New node performing deformable convolution operation. """ return _get_node_factory_opset1().create( "DeformableConvolution", @@ -544,7 +548,7 @@ def deformable_psroi_pooling( :param part_size: The number of parts the output tensor spatial dimensions are divided into. :param offsets: Optional node. 4D input blob with transformation values (offsets). :param name: The optional new name for output node. - returns New node performing DeformablePSROIPooling operation. + :return: New node performing DeformablePSROIPooling operation. """ node_inputs = as_nodes(feature_maps, coords) if offsets is not None: @@ -588,7 +592,7 @@ def depth_to_space(node: Node, mode: str, block_size: int = 1, name: str = None) :param block_size: The size of the spatial block of values describing how the tensor's data is to be rearranged. :param name: Optional output node name. - returns The new node performing an DepthToSpace operation on its input tensor. + :return: The new node performing an DepthToSpace operation on its input tensor. """ return _get_node_factory_opset1().create( "DepthToSpace", [node], {"mode": mode, "block_size": block_size}, @@ -614,7 +618,7 @@ def detection_output( :param aux_class_preds: The 2D input tensor with additional class predictions information. :param aux_box_preds: The 2D input tensor with additional box predictions information. :param name: Optional name for the output node. - returns Node representing DetectionOutput operation. + :return: Node representing DetectionOutput operation. Available attributes are: @@ -770,7 +774,7 @@ def divide( :param right_node: The node providing divisor data. :param auto_broadcast: Specifies rules used for auto-broadcasting of input tensors. :param name: Optional name for output node. - returns The node performing element-wise division. + :return: The node performing element-wise division. """ return _get_node_factory_opset1().create( "Divide", [left_node, right_node], {"auto_broadcast": auto_broadcast.upper()} @@ -789,7 +793,7 @@ def elu(data: NodeInput, alpha: NumericType, name: Optional[str] = None) -> Node :param data: Input tensor. One of: input node, array or scalar. :param alpha: Scalar multiplier for negative values. :param name: Optional output node name. - returns The new node performing an ELU operation on its input data element-wise. + :return: The new node performing an ELU operation on its input data element-wise. """ return _get_node_factory_opset1().create("Elu", [as_node(data)], {"alpha": alpha}) @@ -808,7 +812,7 @@ def equal( :param auto_broadcast: The type of broadcasting specifies rules used for auto-broadcasting of input tensors. :param name: The optional name for output new node. - returns The node performing element-wise equality check. + :return: The node performing element-wise equality check. """ return _get_node_factory_opset1().create( "Equal", [left_node, right_node], {"auto_broadcast": auto_broadcast.upper()} @@ -821,7 +825,7 @@ def erf(node: NodeInput, name: Optional[str] = None) -> Node: :param node: The node providing data for operation. :param name: The optional name for new output node. - returns The new node performing element-wise Erf operation. + :return: The new node performing element-wise Erf operation. """ return _get_node_factory_opset1().create("Erf", [node]) @@ -832,7 +836,7 @@ def exp(node: NodeInput, name: Optional[str] = None) -> Node: :param node: The node providing data for operation. :param name: The optional name for new output node. - returns The new node performing natural exponential operation. + :return: The new node performing natural exponential operation. """ return _get_node_factory_opset1().create("Exp", [node]) @@ -858,7 +862,7 @@ def fake_quantize( :param levels: The number of quantization levels. Integer value. :param auto_broadcast: The type of broadcasting specifies rules used for auto-broadcasting of input tensors. - returns New node with quantized value. + :return: New node with quantized value. Input floating point values are quantized into a discrete set of floating point values. @@ -891,7 +895,7 @@ def floor(node: NodeInput, name: Optional[str] = None) -> Node: :param node: The input node providing data. :param name: The optional name for new output node. - returns The node performing element-wise floor operation. + :return: The node performing element-wise floor operation. """ return _get_node_factory_opset1().create("Floor", [node]) @@ -909,7 +913,7 @@ def floor_mod( :param right_node: The second input node for FloorMod operation. :param auto_broadcast: Specifies rules used for auto-broadcasting of input tensors. :param name: Optional name for output node. - returns The node performing element-wise FloorMod operation. + :return: The node performing element-wise FloorMod operation. """ return _get_node_factory_opset1().create( "FloorMod", [left_node, right_node], {"auto_broadcast": auto_broadcast.upper()} @@ -926,7 +930,7 @@ def gather( :param indices: Tensor with indexes to gather. :param axis: The dimension index to gather data from. :param name: Optional name for output node. - returns The new node performing a Gather operation on the data input tensor. + :return: The new node performing a Gather operation on the data input tensor. """ node_inputs = as_nodes(data, indices, axis) return _get_node_factory_opset1().create("Gather", node_inputs) @@ -947,7 +951,7 @@ def gather_tree( :param max_seq_len: The tensor with maximum lengths for each sequence in the batch. :param end_token: The scalar tensor with value of the end marker in a sequence. :param name: Optional name for output node. - returns The new node performing a GatherTree operation. + :return: The new node performing a GatherTree operation. The GatherTree node generates the complete beams from the indices per each step and the parent beam indices. @@ -984,7 +988,7 @@ def greater( :param auto_broadcast: The type of broadcasting specifies rules used for auto-broadcasting of input tensors. :param name: The optional new name for output node. - returns The node performing element-wise check whether left_node is greater than right_node. + :return: The node performing element-wise check whether left_node is greater than right_node. """ return _get_node_factory_opset1().create( "Greater", [left_node, right_node], {"auto_broadcast": auto_broadcast.upper()} @@ -1005,7 +1009,7 @@ def greater_equal( :param auto_broadcast: The type of broadcasting specifies rules used for auto-broadcasting of input tensors. :param name: The optional new name for output node. - returns The node performing element-wise check whether left_node is greater than or equal + :return: The node performing element-wise check whether left_node is greater than or equal right_node. """ return _get_node_factory_opset1().create( @@ -1023,7 +1027,7 @@ def grn(data: Node, bias: float, name: Optional[str] = None) -> Node: :param data: The node with data tensor. :param bias: The bias added to the variance. Scalar value. :param name: Optional output node name. - returns The new node performing a GRN operation on tensor's channels. + :return: The new node performing a GRN operation on tensor's channels. """ return _get_node_factory_opset1().create("GRN", [data], {"bias": bias}) @@ -1058,7 +1062,7 @@ def group_convolution( Ceil(num_dims/2) at the end VALID: No padding :param name: Optional output node name. - returns The new node performing a Group Convolution operation on tensor from input node. + :return: The new node performing a Group Convolution operation on tensor from input node. """ return _get_node_factory_opset1().create( "GroupConvolution", @@ -1109,7 +1113,7 @@ def group_convolution_backprop_data( :param output_padding: The additional amount of paddings added per each spatial axis in the output tensor. :param name: Optional output node name. - returns The new node performing a Group Convolution operation on tensor from input node. + :return: The new node performing a Group Convolution operation on tensor from input node. """ spatial_dim_count = len(strides) if dilations is None: @@ -1146,7 +1150,7 @@ def hard_sigmoid(data: Node, alpha: NodeInput, beta: NodeInput, name: Optional[s :param alpha: A node producing the alpha parameter. :param beta: A node producing the beta parameter :param name: Optional output node name. - returns The new node performing a Hard Sigmoid element-wise on input tensor. + :return: The new node performing a Hard Sigmoid element-wise on input tensor. Hard Sigmoid uses the following logic: @@ -1167,7 +1171,7 @@ def interpolate( :param output_shape: 1D tensor describing output shape for spatial axes. :param attrs: The dictionary containing key, value pairs for attributes. :param name: Optional name for the output node. - returns Node representing interpolation operation. + :return: Node representing interpolation operation. Available attributes are: @@ -1247,7 +1251,7 @@ def less( :param auto_broadcast: The type of broadcasting specifies rules used for auto-broadcasting of input tensors. :param name: The optional new name for output node. - returns The node performing element-wise check whether left_node is less than the right_node. + :return: The node performing element-wise check whether left_node is less than the right_node. """ return _get_node_factory_opset1().create( "Less", [left_node, right_node], {"auto_broadcast": auto_broadcast.upper()} @@ -1268,7 +1272,7 @@ def less_equal( :param auto_broadcast: The type of broadcasting specifies rules used for auto-broadcasting of input tensors. :param name: The optional new name for output node. - returns The node performing element-wise check whether left_node is less than or equal the + :return: The node performing element-wise check whether left_node is less than or equal the right_node. """ return _get_node_factory_opset1().create( @@ -1282,7 +1286,7 @@ def log(node: NodeInput, name: Optional[str] = None) -> Node: :param node: The input node providing data for operation. :param name: The optional new name for output node. - returns The new node performing log operation element-wise. + :return: The new node performing log operation element-wise. """ return _get_node_factory_opset1().create("Log", [node]) @@ -1301,7 +1305,7 @@ def logical_and( :param auto_broadcast: The type of broadcasting that specifies mapping of input tensor axes to output shape axes. Range of values: numpy, explicit. :param name: The optional new name for output node. - returns The node performing logical and operation on input nodes corresponding elements. + :return: The node performing logical and operation on input nodes corresponding elements. """ return _get_node_factory_opset1().create( "LogicalAnd", [left_node, right_node], {"auto_broadcast": auto_broadcast.upper()} @@ -1314,7 +1318,7 @@ def logical_not(node: NodeInput, name: Optional[str] = None) -> Node: :param node: The input node providing data. :param name: The optional new name for output node. - returns The node performing element-wise logical NOT operation with given tensor. + :return: The node performing element-wise logical NOT operation with given tensor. """ return _get_node_factory_opset1().create("LogicalNot", [node]) @@ -1333,7 +1337,7 @@ def logical_or( :param auto_broadcast: The type of broadcasting that specifies mapping of input tensor axes to output shape axes. Range of values: numpy, explicit. :param name: The optional new name for output node. - returns The node performing logical or operation on input nodes corresponding elements. + :return: The node performing logical or operation on input nodes corresponding elements. """ return _get_node_factory_opset1().create( "LogicalOr", [left_node, right_node], {"auto_broadcast": auto_broadcast.upper()} @@ -1354,7 +1358,7 @@ def logical_xor( :param auto_broadcast: The type of broadcasting that specifies mapping of input tensor axes to output shape axes. Range of values: numpy, explicit. :param name: The optional new name for output node. - returns The node performing logical or operation on input nodes corresponding elements. + :return: The node performing logical or operation on input nodes corresponding elements. """ return _get_node_factory_opset1().create( "LogicalXor", [left_node, right_node], {"auto_broadcast": auto_broadcast.upper()} @@ -1379,7 +1383,7 @@ def lrn( :param bias: An offset (usually positive) to avoid dividing by 0. :param size: Width of the 1-D normalization window. :param name: An optional name of the output node. - returns The new node which performs LRN. + :return: The new node which performs LRN. """ attributes = {"alpha": alpha, "beta": beta, "bias": bias, "size": size} return _get_node_factory_opset1().create("LRN", as_nodes(data, axes), attributes) @@ -1415,7 +1419,7 @@ def lstm_cell( :param clip: Specifies bound values [-C, C] for tensor clipping performed before activations. :param name: An optional name of the output node. - returns The new node represents LSTMCell. Node outputs count: 2. + :return: The new node represents LSTMCell. Node outputs count: 2. """ if activations is None: activations = ["sigmoid", "tanh", "tanh"] @@ -1489,7 +1493,7 @@ def lstm_sequence( :param clip: Specifies bound values [-C, C] for tensor clipping performed before activations. :param name: An optional name of the output node. - returns The new node represents LSTMSequence. Node outputs count: 3. + :return: The new node represents LSTMSequence. Node outputs count: 3. """ if activations is None: activations = ["sigmoid", "tanh", "tanh"] @@ -1542,9 +1546,8 @@ def matmul( :param data_b: right-hand side matrix :param transpose_a: should the first matrix be transposed before operation :param transpose_b: should the second matrix be transposed - returns MatMul operation node + :return: MatMul operation node """ - print("transpose_a", transpose_a, "transpose_b", transpose_b) return _get_node_factory_opset1().create( "MatMul", as_nodes(data_a, data_b), {"transpose_a": transpose_a, "transpose_b": transpose_b} ) @@ -1575,7 +1578,7 @@ def max_pool( [None, 'same_upper', 'same_lower', 'valid'] :param name: The optional name for the created output node. - returns The new node performing max pooling operation. + :return: The new node performing max pooling operation. """ if auto_pad is None: auto_pad = "explicit" @@ -1632,7 +1635,7 @@ def mod( :param right_node: The second input node for mod operation. :param auto_broadcast: Specifies rules used for auto-broadcasting of input tensors. :param name: Optional name for output node. - returns The node performing element-wise Mod operation. + :return: The node performing element-wise Mod operation. """ return _get_node_factory_opset1().create( "Mod", [left_node, right_node], {"auto_broadcast": auto_broadcast.upper()} @@ -1680,7 +1683,7 @@ def non_max_suppression( :param box_encoding: Format of boxes data encoding. Range of values: corner or cente. :param sort_result_descending: Flag that specifies whenever it is necessary to sort selected boxes across batches or not. - returns The new node which performs NonMaxSuppression + :return: The new node which performs NonMaxSuppression """ if max_output_boxes_per_class is None: max_output_boxes_per_class = make_constant_node(0, np.int64) @@ -1708,7 +1711,7 @@ def normalize_l2( :param axes: Node indicating axes along which L2 reduction is calculated :param eps: The epsilon added to L2 norm :param eps_mode: how eps is combined with L2 value (`add` or `max`) - returns New node which performs the L2 normalization. + :return: New node which performs the L2 normalization. """ return _get_node_factory_opset1().create( "NormalizeL2", as_nodes(data, axes), {"eps": eps, "mode": eps_mode} @@ -1729,7 +1732,7 @@ def not_equal( :param auto_broadcast: The type of broadcasting specifies rules used for auto-broadcasting of input tensors. :param name: The optional name for output new node. - returns The node performing element-wise inequality check. + :return: The node performing element-wise inequality check. """ return _get_node_factory_opset1().create( "NotEqual", [left_node, right_node], {"auto_broadcast": auto_broadcast.upper()} @@ -1756,7 +1759,7 @@ def one_hot( by indices in input take. :param name: The optional name for new output node. - returns New node performing one-hot operation. + :return: New node performing one-hot operation. """ return _get_node_factory_opset1().create( "OneHot", as_nodes(indices, depth, on_value, off_value), {"axis": axis} @@ -1780,7 +1783,7 @@ def pad( :param pads_end: number of padding elements to be added after the last element. :param pad_mode: "constant", "edge", "reflect" or "symmetric" :param arg_pad_value: value used for padding if pad_mode is "constant" - returns Pad operation node. + :return: Pad operation node. """ input_nodes = as_nodes(arg, pads_begin, pads_end) if arg_pad_value: @@ -1792,11 +1795,13 @@ def pad( @nameable_op def parameter( - shape: TensorShape, dtype: NumericType = np.float32, name: Optional[str] = None + shape: TensorShape, dtype: Union[NumericType, Type] = np.float32, name: Optional[str] = None ) -> Parameter: """Return an openvino Parameter object.""" - element_type = get_element_type(dtype) - return Parameter(element_type, PartialShape(shape)) + return Parameter(get_element_type(dtype) + if isinstance(dtype, (type, np.dtype)) + else dtype, + PartialShape(shape)) @binary_op @@ -1813,7 +1818,7 @@ def power( :param name: The optional name for the new output node. :param auto_broadcast: The type of broadcasting specifies rules used for auto-broadcasting of input tensors. - returns The new node performing element-wise exponentiation operation on input nodes. + :return: The new node performing element-wise exponentiation operation on input nodes. """ return _get_node_factory_opset1().create( "Power", [left_node, right_node], {"auto_broadcast": auto_broadcast.upper()} @@ -1827,7 +1832,7 @@ def prelu(data: NodeInput, slope: NodeInput, name: Optional[str] = None) -> Node :param data: The node with data tensor. :param slope: The node with the multipliers for negative values. :param name: Optional output node name. - returns The new node performing a PRelu operation on tensor's channels. + :return: The new node performing a PRelu operation on tensor's channels. PRelu uses the following logic: @@ -1853,7 +1858,7 @@ def prior_box_clustered( specifies shape of the image for which boxes are generated. :param attrs: The dictionary containing key, value pairs for attributes. :param name: Optional name for the output node. - returns Node representing PriorBoxClustered operation. + :return: Node representing PriorBoxClustered operation. Available attributes are: @@ -1937,7 +1942,7 @@ def prior_box( :param image_shape: Shape of image to which prior boxes are scaled. :param attrs: The dictionary containing key, value pairs for attributes. :param name: Optional name for the output node. - returns Node representing prior box operation. + :return: Node representing prior box operation. Available attributes are: @@ -2057,7 +2062,7 @@ def proposal( :param image_shape: The 1D input tensor with 3 or 4 elements describing image shape. :param attrs: The dictionary containing key, value pairs for attributes. :param name: Optional name for the output node. - returns Node representing Proposal operation. + :return: Node representing Proposal operation. * base_size The size of the anchor to which scale and ratio attributes are applied. Range of values: a positive unsigned integer number @@ -2191,15 +2196,15 @@ def psroi_pooling( ) -> Node: """Return a node which produces a PSROIPooling operation. - :param input: Input feature map {N, C, ...} - :param coords: Coordinates of bounding boxes - :param output_dim: Output channel number - :param group_size: Number of groups to encode position-sensitive scores - :param spatial_scale: Ratio of input feature map over input image size - :param spatial_bins_x: Numbers of bins to divide the input feature maps over - :param spatial_bins_y: Numbers of bins to divide the input feature maps over - :param mode: Mode of pooling - "avg" or "bilinear" - returns PSROIPooling node + :param input: Input feature map `{N, C, ...}`. + :param coords: Coordinates of bounding boxes. + :param output_dim: Output channel number. + :param group_size: Number of groups to encode position-sensitive scores. + :param spatial_scale: Ratio of input feature map over input image size. + :param spatial_bins_x: Numbers of bins to divide the input feature maps over. + :param spatial_bins_y: Numbers of bins to divide the input feature maps over. + :param mode: Mode of pooling - "avg" or "bilinear". + :return: PSROIPooling node """ mode = mode.lower() return _get_node_factory_opset1().create( @@ -2220,11 +2225,11 @@ def psroi_pooling( def range(start: Node, stop: NodeInput, step: NodeInput, name: Optional[str] = None) -> Node: """Return a node which produces the Range operation. - :param start: The start value of the generated range - :param stop: The stop value of the generated range - :param step: The step value for the generated range + :param start: The start value of the generated range. + :param stop: The stop value of the generated range. + :param step: The step value for the generated range. :param name: Optional name for output node. - returns Range node + :return: Range node """ return _get_node_factory_opset1().create("Range", as_nodes(start, stop, step)) @@ -2235,7 +2240,7 @@ def relu(node: NodeInput, name: Optional[str] = None) -> Node: :param node: One of: input node, array or scalar. :param name: The optional output node name. - returns The new node performing relu operation on its input element-wise. + :return: The new node performing relu operation on its input element-wise. """ return _get_node_factory_opset1().create("Relu", [node]) @@ -2248,9 +2253,9 @@ def reduce_logical_and( :param node: The tensor we want to reduce. :param reduction_axes: The axes to eliminate through AND operation. - :param keep_dims: If set to True it holds axes that are used for reduction + :param keep_dims: If set to True it holds axes that are used for reduction. :param name: Optional name for output node. - returns The new node performing reduction operation. + :return: The new node performing reduction operation. """ return _get_node_factory_opset1().create( "ReduceLogicalAnd", as_nodes(node, reduction_axes), {"keep_dims": keep_dims} @@ -2265,9 +2270,9 @@ def reduce_logical_or( :param node: The tensor we want to reduce. :param reduction_axes: The axes to eliminate through OR operation. - :param keep_dims: If set to True it holds axes that are used for reduction + :param keep_dims: If set to True it holds axes that are used for reduction. :param name: Optional name for output node. - returns The new node performing reduction operation. + :return: The new node performing reduction operation. """ return _get_node_factory_opset1().create( "ReduceLogicalOr", as_nodes(node, reduction_axes), {"keep_dims": keep_dims} @@ -2282,7 +2287,7 @@ def reduce_max( :param node: The tensor we want to max-reduce. :param reduction_axes: The axes to eliminate through max operation. - :param keep_dims: If set to True it holds axes that are used for reduction + :param keep_dims: If set to True it holds axes that are used for reduction. :param name: Optional name for output node. """ return _get_node_factory_opset1().create( @@ -2298,9 +2303,9 @@ def reduce_mean( :param node: The tensor we want to mean-reduce. :param reduction_axes: The axes to eliminate through mean operation. - :param keep_dims: If set to True it holds axes that are used for reduction + :param keep_dims: If set to True it holds axes that are used for reduction. :param name: Optional name for output node. - returns The new node performing mean-reduction operation. + :return: The new node performing mean-reduction operation. """ return _get_node_factory_opset1().create( "ReduceMean", as_nodes(node, reduction_axes), {"keep_dims": keep_dims} @@ -2333,7 +2338,7 @@ def reduce_prod( :param reduction_axes: The axes to eliminate through product operation. :param keep_dims: If set to True it holds axes that are used for reduction :param name: Optional name for output node. - returns The new node performing product-reduction operation. + :return: The new node performing product-reduction operation. """ return _get_node_factory_opset1().create( "ReduceProd", as_nodes(node, reduction_axes), {"keep_dims": keep_dims} @@ -2350,7 +2355,7 @@ def reduce_sum( :param reduction_axes: The axes to eliminate through summation. :param keep_dims: If set to True it holds axes that are used for reduction :param name: The optional new name for output node. - returns The new node performing summation along `reduction_axes` element-wise. + :return: The new node performing summation along `reduction_axes` element-wise. """ return _get_node_factory_opset1().create( "ReduceSum", as_nodes(node, reduction_axes), {"keep_dims": keep_dims} @@ -2382,7 +2387,7 @@ def region_yolo( :param end_axis: Axis to end softmax on :param anchors: A flattened list of pairs `[width, height]` that describes prior box sizes :param name: Optional name for output node. - returns RegionYolo node + :return: RegionYolo node """ if anchors is None: anchors = [] @@ -2429,7 +2434,7 @@ def result(data: NodeInput, name: Optional[str] = None) -> Node: """Return a node which represents an output of a graph (Model). :param data: The tensor containing the input data - returns Result node + :return: Result node """ return _get_node_factory_opset1().create("Result", [data]) @@ -2448,7 +2453,7 @@ def reverse_sequence( :param seq_lengths: 1D tensor of integers with sequence lengths in the input tensor. :param batch_axis: index of the batch dimension. :param seq_axis: index of the sequence dimension. - returns ReverseSequence node + :return: ReverseSequence node """ return _get_node_factory_opset1().create( "ReverseSequence", @@ -2474,7 +2479,7 @@ def select( item value is `False`. :param auto_broadcast: Mode specifies rules used for auto-broadcasting of input tensors. :param name: The optional new name for output node. - returns The new node with values selected according to provided arguments. + :return: The new node with values selected according to provided arguments. """ inputs = as_nodes(cond, then_node, else_node) return _get_node_factory_opset1().create( @@ -2494,7 +2499,7 @@ def selu( :param alpha: Alpha coefficient of SELU operation :param lambda_value: Lambda coefficient of SELU operation :param name: The optional output node name. - returns The new node performing relu operation on its input element-wise. + :return: The new node performing relu operation on its input element-wise. """ return _get_node_factory_opset1().create("Selu", as_nodes(data, alpha, lambda_value)) @@ -2504,7 +2509,7 @@ def shape_of(data: NodeInput, name: Optional[str] = None) -> Node: """Return a node which produces a tensor containing the shape of its input data. :param data: The tensor containing the input data. - returns ShapeOf node + :return: ShapeOf node """ return _get_node_factory_opset1().create("ShapeOf", [as_node(data)]) @@ -2514,7 +2519,7 @@ def sigmoid(data: NodeInput, name: Optional[str] = None) -> Node: """Return a node which applies the sigmoid function element-wise. :param data: The tensor containing the input data - returns Sigmoid node + :return: Sigmoid node """ return _get_node_factory_opset1().create("Sigmoid", [data]) @@ -2525,7 +2530,7 @@ def sign(node: NodeInput, name: Optional[str] = None) -> Node: :param node: One of: input node, array or scalar. :param name: The optional new name for output node. - returns The node with mapped elements of the input tensor to -1 (if it is negative), + :return: The node with mapped elements of the input tensor to -1 (if it is negative), 0 (if it is zero), or 1 (if it is positive). """ return _get_node_factory_opset1().create("Sign", [node]) @@ -2537,7 +2542,7 @@ def sin(node: NodeInput, name: Optional[str] = None) -> Node: :param node: One of: input node, array or scalar. :param name: Optional new name for output node. - returns New node with sin operation applied on it. + :return: New node with sin operation applied on it. """ return _get_node_factory_opset1().create("Sin", [node]) @@ -2548,7 +2553,7 @@ def sinh(node: NodeInput, name: Optional[str] = None) -> Node: :param node: One of: input node, array or scalar. :param name: Optional new name for output node. - returns New node with sin operation applied on it. + :return: New node with sin operation applied on it. """ return _get_node_factory_opset1().create("Sinh", [node]) @@ -2559,7 +2564,7 @@ def softmax(data: NodeInput, axis: int, name: Optional[str] = None) -> Node: :param data: The tensor providing input data. :param axis: An axis along which Softmax should be calculated - returns The new node with softmax operation applied on each element. + :return: The new node with softmax operation applied on each element. """ return _get_node_factory_opset1().create("Softmax", [as_node(data)], {"axis": axis}) @@ -2569,7 +2574,7 @@ def space_to_depth(data: Node, mode: str, block_size: int = 1, name: str = None) """Perform SpaceToDepth operation on the input tensor. SpaceToDepth rearranges blocks of spatial data into depth. - The operator returns a copy of the input tensor where values from the height + The operator :return: a copy of the input tensor where values from the height and width dimensions are moved to the depth dimension. :param data: The node with data tensor. @@ -2580,7 +2585,7 @@ def space_to_depth(data: Node, mode: str, block_size: int = 1, name: str = None) :param block_size: The size of the block of values to be moved. Scalar value. :param name: Optional output node name. - returns The new node performing a SpaceToDepth operation on input tensor. + :return: The new node performing a SpaceToDepth operation on input tensor. """ return _get_node_factory_opset1().create( "SpaceToDepth", [data], {"mode": mode, "block_size": block_size}, @@ -2594,7 +2599,7 @@ def split(data: NodeInput, axis: NodeInput, num_splits: int, name: Optional[str] :param data: The input tensor to be split :param axis: Axis along which the input data will be split :param num_splits: Number of the output tensors that should be produced - returns Split node + :return: Split node """ return _get_node_factory_opset1().create( "Split", @@ -2609,7 +2614,7 @@ def sqrt(node: NodeInput, name: Optional[str] = None) -> Node: :param node: One of: input node, array or scalar. :param name: Optional new name for output node. - returns The new node with sqrt operation applied element-wise. + :return: The new node with sqrt operation applied element-wise. """ return _get_node_factory_opset1().create("Sqrt", [node]) @@ -2627,7 +2632,7 @@ def squared_difference( :param auto_broadcast: The type of broadcasting that specifies mapping of input tensor axes to output shape axes. Range of values: numpy, explicit. :param name: Optional new name for output node. - returns The new node performing a squared difference between two tensors. + :return: The new node performing a squared difference between two tensors. """ return _get_node_factory_opset1().create( "SquaredDifference", [x1, x2], {"auto_broadcast": auto_broadcast.upper()} @@ -2642,7 +2647,7 @@ def squeeze(data: NodeInput, axes: NodeInput, name: Optional[str] = None) -> Nod :param axes: List of non-negative integers, indicate the dimensions to squeeze. One of: input node or array. :param name: Optional new name for output node. - returns The new node performing a squeeze operation on input tensor. + :return: The new node performing a squeeze operation on input tensor. Remove single-dimensional entries from the shape of a tensor. Takes a parameter `axes` with a list of axes to squeeze. @@ -2685,7 +2690,7 @@ def strided_slice( :param new_axis_mask: A mask indicating dimensions where '1' should be inserted :param shrink_axis_mask: A mask indicating which dimensions should be deleted :param ellipsis_mask: Indicates positions where missing dimensions should be inserted - returns StridedSlice node + :return: StridedSlice node """ if new_axis_mask is None: new_axis_mask = [] @@ -2720,7 +2725,7 @@ def subtract( :param auto_broadcast: The type of broadcasting that specifies mapping of input tensor axes to output shape axes. Range of values: numpy, explicit. :param name: The optional name for output node. - returns The new output node performing subtraction operation on both tensors element-wise. + :return: The new output node performing subtraction operation on both tensors element-wise. """ return _get_node_factory_opset1().create( "Subtract", [left_node, right_node], {"auto_broadcast": auto_broadcast.upper()} @@ -2733,7 +2738,7 @@ def tan(node: NodeInput, name: Optional[str] = None) -> Node: :param node: One of: input node, array or scalar. :param name: Optional new name for output node. - returns New node with tan operation applied on it. + :return: New node with tan operation applied on it. """ return _get_node_factory_opset1().create("Tan", [node]) diff --git a/src/bindings/python/src/openvino/runtime/opset2/ops.py b/src/bindings/python/src/openvino/runtime/opset2/ops.py index 9d863962c8d..c833e3a31ea 100644 --- a/src/bindings/python/src/openvino/runtime/opset2/ops.py +++ b/src/bindings/python/src/openvino/runtime/opset2/ops.py @@ -54,7 +54,7 @@ def batch_to_space( :param crops_begin: Specifies the amount to crop from the beginning along each axis of `data`. :param crops_end: Specifies the amount to crop from the end along each axis of `data`. :param name: Optional output node name. - returns The new node performing a BatchToSpace operation. + :return: The new node performing a BatchToSpace operation. """ return _get_node_factory_opset2().create( "BatchToSpace", as_nodes(data, block_shape, crops_begin, crops_end) @@ -73,7 +73,7 @@ def gelu(node: NodeInput, name: Optional[str] = None) -> Node: :param node: Input tensor. One of: input node, array or scalar. :param name: Optional output node name. - returns The new node performing a GELU operation on its input data element-wise. + :return: The new node performing a GELU operation on its input data element-wise. """ return _get_node_factory_opset2().create("Gelu", [node]) @@ -96,9 +96,9 @@ def mvn( :param across_channels: Denotes if mean values are shared across channels. :param normalize_variance: Denotes whether to perform variance normalization. :param eps: The number added to the variance to avoid division by zero - when normalizing the value. Scalar value. + when normalizing the value. Scalar value. :param name: Optional output node name. - returns The new node performing a MVN operation on input tensor. + :return: The new node performing a MVN operation on input tensor. """ return _get_node_factory_opset2().create( "MVN", @@ -111,10 +111,10 @@ def mvn( def reorg_yolo(input: Node, stride: List[int], name: Optional[str] = None) -> Node: """Return a node which produces the ReorgYolo operation. - :param input: Input data - :param stride: Stride to reorganize input by + :param input: Input data. + :param stride: Stride to reorganize input by. :param name: Optional name for output node. - returns ReorgYolo node + :return: ReorgYolo node. """ return _get_node_factory_opset2().create("ReorgYolo", [input], {"stride": stride}) @@ -130,12 +130,12 @@ def roi_pooling( ) -> Node: """Return a node which produces an ROIPooling operation. - :param input: Input feature map {N, C, ...} - :param coords: Coordinates of bounding boxes - :param output_size: Height/Width of ROI output features (shape) - :param spatial_scale: Ratio of input feature map over input image size (float) - :param method: Method of pooling - string: "max" or "bilinear" - returns ROIPooling node + :param input: Input feature map `{N, C, ...}`. + :param coords: Coordinates of bounding boxes. + :param output_size: Height/Width of ROI output features (shape). + :param spatial_scale: Ratio of input feature map over input image size (float). + :param method: Method of pooling - string: "max" or "bilinear". + :return: ROIPooling node. """ method = method.lower() return _get_node_factory_opset2().create( @@ -164,7 +164,7 @@ def space_to_batch( :param pads_begin: Specifies the padding for the beginning along each axis of `data`. :param pads_end: Specifies the padding for the ending along each axis of `data`. :param name: Optional output node name. - returns The new node performing a SpaceToBatch operation. + :return: The new node performing a SpaceToBatch operation. """ return _get_node_factory_opset2().create( "SpaceToBatch", as_nodes(data, block_shape, pads_begin, pads_end) diff --git a/src/bindings/python/src/openvino/runtime/opset3/ops.py b/src/bindings/python/src/openvino/runtime/opset3/ops.py index 6afefdf20fb..47f745228ae 100644 --- a/src/bindings/python/src/openvino/runtime/opset3/ops.py +++ b/src/bindings/python/src/openvino/runtime/opset3/ops.py @@ -44,7 +44,7 @@ def assign(new_value: NodeInput, variable_id: str, name: Optional[str] = None) - :param new_value: Node producing a value to be assigned to a variable. :param variable_id: Id of a variable to be updated. :param name: Optional name for output node. - returns Assign node + :return: Assign node """ return _get_node_factory_opset3().create( "Assign", @@ -70,7 +70,7 @@ def broadcast( :param broadcast_spec: The type of broadcasting that specifies mapping of input tensor axes to output shape axes. Range of values: NUMPY, EXPLICIT, BIDIRECTIONAL. :param name: Optional new name for output node. - returns New node with broadcast shape. + :return: New node with broadcast shape. """ inputs = as_nodes(data, target_shape) if broadcast_spec.upper() == "EXPLICIT": @@ -96,7 +96,7 @@ def bucketize( :param with_right_bound: indicates whether bucket includes the right or left edge of interval. default true = includes right edge :param name: Optional name for output node. - returns Bucketize node + :return: Bucketize node """ return _get_node_factory_opset3().create( "Bucketize", @@ -119,7 +119,7 @@ def cum_sum( :param axis: zero dimension tensor specifying axis position along which sum will be performed. :param exclusive: if set to true, the top element is not included :param reverse: if set to true, will perform the sums in reverse direction - returns New node performing the operation + :return: New node performing the operation """ return _get_node_factory_opset3().create( "CumSum", as_nodes(arg, axis), {"exclusive": exclusive, "reverse": reverse} @@ -143,7 +143,7 @@ def embedding_bag_offsets_sum( :param per_sample_weights: Tensor with weights for each sample. :param default_index: Scalar containing default index in embedding table to fill empty bags. :param name: Optional name for output node. - returns The new node which performs EmbeddingBagOffsetsSum + :return: The new node which performs EmbeddingBagOffsetsSum """ inputs = [emb_table, as_node(indices), as_node(offsets)] if per_sample_weights is not None: @@ -171,7 +171,7 @@ def embedding_bag_packed_sum( :param indices: Tensor with indices. :param per_sample_weights: Weights to be multiplied with embedding table. :param name: Optional name for output node. - returns EmbeddingBagPackedSum node + :return: EmbeddingBagPackedSum node """ inputs = [as_node(emb_table), as_node(indices)] if per_sample_weights is not None: @@ -202,7 +202,7 @@ def embedding_segments_sum( :param default_index: Scalar containing default index in embedding table to fill empty bags. :param per_sample_weights: Weights to be multiplied with embedding table. :param name: Optional name for output node. - returns EmbeddingSegmentsSum node + :return: EmbeddingSegmentsSum node """ inputs = [as_node(emb_table), as_node(indices), as_node(segment_ids)] if per_sample_weights is not None: @@ -235,7 +235,7 @@ def extract_image_patches( :param rates: Element seleciton rate for creating a patch. :param auto_pad: Padding type. :param name: Optional name for output node. - returns ExtractImagePatches node + :return: ExtractImagePatches node """ return _get_node_factory_opset3().create( "ExtractImagePatches", @@ -288,7 +288,7 @@ def gru_cell( :param linear_before_reset: Flag denotes if the layer behaves according to the modification of GRUCell described in the formula in the ONNX documentation. :param name: Optional output node name. - returns The new node performing a GRUCell operation on tensor from input node. + :return: The new node performing a GRUCell operation on tensor from input node. """ if activations is None: activations = ["sigmoid", "tanh"] @@ -333,7 +333,7 @@ def non_max_suppression( :param sort_result_descending: Flag that specifies whenever it is necessary to sort selected boxes across batches or not. :param output_type: Output element type. - returns The new node which performs NonMaxSuppression + :return: The new node which performs NonMaxSuppression """ if max_output_boxes_per_class is None: max_output_boxes_per_class = make_constant_node(0, np.int64) @@ -359,7 +359,7 @@ def non_zero(data: NodeInput, output_type: str = "i64", name: Optional[str] = No :param data: Input data. :param output_type: Output tensor type. - returns The new node which performs NonZero + :return: The new node which performs NonZero """ return _get_node_factory_opset3().create( "NonZero", @@ -375,7 +375,7 @@ def read_value(init_value: NodeInput, variable_id: str, name: Optional[str] = No :param init_value: Node producing a value to be returned instead of an unassigned variable. :param variable_id: Id of a variable to be read. :param name: Optional name for output node. - returns ReadValue node + :return: ReadValue node """ return _get_node_factory_opset3().create( "ReadValue", @@ -422,7 +422,7 @@ def rnn_cell( :param clip: The value defining clipping range [-clip, clip] on input of activation functions. :param name: Optional output node name. - returns The new node performing a RNNCell operation on tensor from input node. + :return: The new node performing a RNNCell operation on tensor from input node. """ if activations is None: activations = ["tanh"] @@ -467,7 +467,7 @@ def roi_align( :param spatial_scale: Multiplicative spatial scale factor to translate ROI coordinates. :param mode: Method to perform pooling to produce output feature map elements. - returns The new node which performs ROIAlign + :return: The new node which performs ROIAlign """ inputs = as_nodes(data, rois, batch_indices) attributes = { @@ -494,7 +494,7 @@ def scatter_elements_update( :param indices: The tensor with indexes which will be updated. :param updates: The tensor with update values. :param axis: The axis for scatter. - returns ScatterElementsUpdate node + :return: ScatterElementsUpdate node ScatterElementsUpdate creates a copy of the first input tensor with updated elements specified with second and third input tensors. @@ -523,7 +523,7 @@ def scatter_update( :param indices: The tensor with indexes which will be updated. :param updates: The tensor with update values. :param axis: The axis at which elements will be updated. - returns ScatterUpdate node + :return: ScatterUpdate node """ return _get_node_factory_opset3().create( "ScatterUpdate", @@ -537,7 +537,7 @@ def shape_of(data: NodeInput, output_type: str = "i64", name: Optional[str] = No :param data: The tensor containing the input data. :param output_type: Output element type. - returns ShapeOf node + :return: ShapeOf node """ return _get_node_factory_opset3().create( "ShapeOf", @@ -557,7 +557,7 @@ def shuffle_channels(data: Node, axis: int, group: int, name: Optional[str] = No :param group: The channel dimension specified by the axis parameter should be split into this number of groups. :param name: Optional output node name. - returns The new node performing a permutation on data in the channel dimension + :return: The new node performing a permutation on data in the channel dimension of the input tensor. The operation is the equivalent with the following transformation of the input tensor @@ -617,7 +617,7 @@ def topk( :param mode: Compute TopK largest ('max') or smallest ('min') :param sort: Order of output elements (sort by: 'none', 'index' or 'value') :param index_element_type: Type of output tensor with indices. - returns The new node which performs TopK (both indices and values) + :return: The new node which performs TopK (both indices and values) """ return _get_node_factory_opset3().create( "TopK", diff --git a/src/bindings/python/src/openvino/runtime/opset4/ops.py b/src/bindings/python/src/openvino/runtime/opset4/ops.py index 8ee3ffef925..3c16762e549 100644 --- a/src/bindings/python/src/openvino/runtime/opset4/ops.py +++ b/src/bindings/python/src/openvino/runtime/opset4/ops.py @@ -59,7 +59,7 @@ def ctc_loss( :param preprocess_collapse_repeated: Flag for preprocessing labels before loss calculation. :param ctc_merge_repeated: Flag for merging repeated characters in a potential alignment. :param unique: Flag to find unique elements in a target. - returns The new node which performs CTCLoss + :return: The new node which performs CTCLoss """ if blank_index is not None: inputs = as_nodes(logits, logit_length, labels, label_length, blank_index) @@ -99,7 +99,7 @@ def non_max_suppression( :param sort_result_descending: Flag that specifies whenever it is necessary to sort selected boxes across batches or not. :param output_type: Output element type. - returns The new node which performs NonMaxSuppression + :return: The new node which performs NonMaxSuppression """ if max_output_boxes_per_class is None: max_output_boxes_per_class = make_constant_node(0, np.int64) @@ -123,7 +123,7 @@ def softplus(data: NodeInput, name: Optional[str] = None) -> Node: """Apply SoftPlus operation on each element of input tensor. :param data: The tensor providing input data. - returns The new node with SoftPlus operation applied on each element. + :return: The new node with SoftPlus operation applied on each element. """ return _get_node_factory_opset4().create("SoftPlus", as_nodes(data), {}) @@ -133,7 +133,7 @@ def mish(data: NodeInput, name: Optional[str] = None,) -> Node: """Return a node which performs Mish. :param data: Tensor with input data floating point type. - returns The new node which performs Mish + :return: The new node which performs Mish """ return _get_node_factory_opset4().create("Mish", as_nodes(data), {}) @@ -143,7 +143,7 @@ def hswish(data: NodeInput, name: Optional[str] = None,) -> Node: """Return a node which performs HSwish (hard version of Swish). :param data: Tensor with input data floating point type. - returns The new node which performs HSwish + :return: The new node which performs HSwish """ return _get_node_factory_opset4().create("HSwish", as_nodes(data), {}) @@ -157,7 +157,7 @@ def swish( """Return a node which performing Swish activation function Swish(x, beta=1.0) = x * sigmoid(x * beta)). :param data: Tensor with input data floating point type. - returns The new node which performs Swish + :return: The new node which performs Swish """ if beta is None: beta = make_constant_node(1.0, np.float32) @@ -170,7 +170,7 @@ def acosh(node: NodeInput, name: Optional[str] = None) -> Node: :param node: One of: input node, array or scalar. :param name: Optional new name for output node. - returns New node with arccosh operation applied on it. + :return: New node with arccosh operation applied on it. """ return _get_node_factory_opset4().create("Acosh", [node]) @@ -181,7 +181,7 @@ def asinh(node: NodeInput, name: Optional[str] = None) -> Node: :param node: One of: input node, array or scalar. :param name: Optional new name for output node. - returns New node with arcsinh operation applied on it. + :return: New node with arcsinh operation applied on it. """ return _get_node_factory_opset4().create("Asinh", [node]) @@ -192,7 +192,7 @@ def atanh(node: NodeInput, name: Optional[str] = None) -> Node: :param node: One of: input node, array or scalar. :param name: Optional new name for output node. - returns New node with arctanh operation applied on it. + :return: New node with arctanh operation applied on it. """ return _get_node_factory_opset4().create("Atanh", [node]) @@ -292,7 +292,7 @@ def proposal( } Optional attributes which are absent from dictionary will be set with corresponding default. - returns Node representing Proposal operation. + :return: Node representing Proposal operation. """ requirements = [ ("base_size", True, np.unsignedinteger, is_positive_value), @@ -328,7 +328,7 @@ def reduce_l1( :param reduction_axes: The axes to eliminate through mean operation. :param keep_dims: If set to True it holds axes that are used for reduction :param name: Optional name for output node. - returns The new node performing mean-reduction operation. + :return: The new node performing mean-reduction operation. """ return _get_node_factory_opset4().create( "ReduceL1", as_nodes(node, reduction_axes), {"keep_dims": keep_dims} @@ -345,7 +345,7 @@ def reduce_l2( :param reduction_axes: The axes to eliminate through mean operation. :param keep_dims: If set to True it holds axes that are used for reduction :param name: Optional name for output node. - returns The new node performing mean-reduction operation. + :return: The new node performing mean-reduction operation. """ return _get_node_factory_opset4().create( "ReduceL2", as_nodes(node, reduction_axes), {"keep_dims": keep_dims} @@ -382,7 +382,7 @@ def lstm_cell( :param clip: Specifies bound values [-C, C] for tensor clipping performed before activations. :param name: An optional name of the output node. - returns The new node represents LSTMCell. Node outputs count: 2. + :return: The new node represents LSTMCell. Node outputs count: 2. """ if activations is None: activations = ["sigmoid", "tanh", "tanh"] diff --git a/src/bindings/python/src/openvino/runtime/opset5/__init__.py b/src/bindings/python/src/openvino/runtime/opset5/__init__.py index 74fb610a02b..f18d61e927d 100644 --- a/src/bindings/python/src/openvino/runtime/opset5/__init__.py +++ b/src/bindings/python/src/openvino/runtime/opset5/__init__.py @@ -74,7 +74,7 @@ from openvino.runtime.opset5.ops import log_softmax from openvino.runtime.opset5.ops import loop from openvino.runtime.opset1.ops import lrn from openvino.runtime.opset4.ops import lstm_cell -from openvino.runtime.opset1.ops import lstm_sequence +from openvino.runtime.opset5.ops import lstm_sequence from openvino.runtime.opset1.ops import matmul from openvino.runtime.opset1.ops import max_pool from openvino.runtime.opset1.ops import maximum diff --git a/src/bindings/python/src/openvino/runtime/opset5/ops.py b/src/bindings/python/src/openvino/runtime/opset5/ops.py index b024210f4a8..f02200cf3ae 100644 --- a/src/bindings/python/src/openvino/runtime/opset5/ops.py +++ b/src/bindings/python/src/openvino/runtime/opset5/ops.py @@ -57,7 +57,7 @@ def batch_norm_inference( :param epsilon: The number to be added to the variance to avoid division by zero when normalizing a value. :param name: The optional name of the output node. - @return: The new node which performs BatchNormInference. + :return: The new node which performs BatchNormInference. """ inputs = as_nodes(data, gamma, beta, mean, variance) return _get_node_factory_opset5().create("BatchNormInference", inputs, {"epsilon": epsilon}) @@ -75,7 +75,7 @@ def gather_nd( :param data: N-D tensor with data for gathering :param indices: K-D tensor of tuples with indices by which data is gathered :param batch_dims: Scalar value of batch dimensions - @return: The new node which performs GatherND + :return: The new node which performs GatherND """ inputs = as_nodes(data, indices) @@ -92,7 +92,7 @@ def log_softmax(data: NodeInput, axis: int, name: Optional[str] = None) -> Node: :param data: The tensor providing input data. :param axis: An axis along which LogSoftmax should be calculated - @return: The new node with LogSoftmax operation applied on each element. + :return: The new node with LogSoftmax operation applied on each element. """ return _get_node_factory_opset5().create("LogSoftmax", [as_node(data)], {"axis": axis}) @@ -123,7 +123,7 @@ def non_max_suppression( :param sort_result_descending: Flag that specifies whenever it is necessary to sort selected boxes across batches or not. :param output_type: Output element type. - @return: The new node which performs NonMaxSuppression + :return: The new node which performs NonMaxSuppression """ if max_output_boxes_per_class is None: max_output_boxes_per_class = make_constant_node(0, np.int64) @@ -158,7 +158,7 @@ def round(data: NodeInput, mode: str = "half_to_even", name: Optional[str] = Non integer or rounding in such a way that the result heads away from zero if `mode` attribute is 'half_away_from_zero`. :param name: An optional name of the output node. - @return: The new node with Round operation applied on each element. + :return: The new node with Round operation applied on each element. """ return _get_node_factory_opset5().create("Round", as_nodes(data), {"mode": mode.upper()}) @@ -205,7 +205,7 @@ def lstm_sequence( :param clip: Specifies bound values [-C, C] for tensor clipping performed before activations. :param name: An optional name of the output node. - @return: The new node represents LSTMSequence. Node outputs count: 3. + :return: The new node represents LSTMSequence. Node outputs count: 3. """ if activations is None: activations = ["sigmoid", "tanh", "tanh"] @@ -231,7 +231,7 @@ def hsigmoid(data: NodeInput, name: Optional[str] = None,) -> Node: """Return a node which performs HSigmoid. :param data: Tensor with input data floating point type. - @return: The new node which performs HSigmoid + :return: The new node which performs HSigmoid """ return _get_node_factory_opset5().create("HSigmoid", as_nodes(data), {}) @@ -277,7 +277,7 @@ def gru_sequence( of GRU described in the formula in the ONNX documentation. :param name: An optional name of the output node. - @return: The new node represents GRUSequence. Node outputs count: 2. + :return: The new node represents GRUSequence. Node outputs count: 2. """ if activations is None: activations = ["sigmoid", "tanh"] @@ -337,7 +337,7 @@ def rnn_sequence( :param clip: Specifies bound values [-C, C] for tensor clipping performed before activations. :param name: An optional name of the output node. - @return: The new node represents RNNSequence. Node outputs count: 2. + :return: The new node represents RNNSequence. Node outputs count: 2. """ if activations is None: activations = ["tanh"] diff --git a/src/bindings/python/src/openvino/runtime/opset6/__init__.py b/src/bindings/python/src/openvino/runtime/opset6/__init__.py index 3153669ea09..9721ad311b0 100644 --- a/src/bindings/python/src/openvino/runtime/opset6/__init__.py +++ b/src/bindings/python/src/openvino/runtime/opset6/__init__.py @@ -76,7 +76,7 @@ from openvino.runtime.opset5.ops import log_softmax from openvino.runtime.opset5.ops import loop from openvino.runtime.opset1.ops import lrn from openvino.runtime.opset4.ops import lstm_cell -from openvino.runtime.opset1.ops import lstm_sequence +from openvino.runtime.opset5.ops import lstm_sequence from openvino.runtime.opset1.ops import matmul from openvino.runtime.opset1.ops import max_pool from openvino.runtime.opset1.ops import maximum diff --git a/src/bindings/python/src/openvino/runtime/opset6/ops.py b/src/bindings/python/src/openvino/runtime/opset6/ops.py index 08d62ea1b3b..45b295a216e 100644 --- a/src/bindings/python/src/openvino/runtime/opset6/ops.py +++ b/src/bindings/python/src/openvino/runtime/opset6/ops.py @@ -53,7 +53,7 @@ def ctc_greedy_decoder_seq_len( :param sequence_length: Input 1D tensor with sequence length. Shape: [batch_size] :param blank_index: Scalar or 1D tensor with specifies the class index to use for the blank class. Optional parameter. Default value is num_classes-1. - @return: The new node which performs CTCGreedyDecoderSeqLen. + :return: The new node which performs CTCGreedyDecoderSeqLen. """ if blank_index is not None: inputs = as_nodes(data, sequence_length, blank_index) @@ -81,7 +81,7 @@ def gather_elements( :param data: N-D tensor with data for gathering :param indices: N-D tensor with indices by which data is gathered :param axis: axis along which elements are gathered - @return: The new node which performs GatherElements + :return: The new node which performs GatherElements """ inputs = as_nodes(data, indices) @@ -110,7 +110,7 @@ def mvn( when normalizing the value. Scalar value. :param eps_mode: how eps is applied (`inside_sqrt` or `outside_sqrt`) :param name: Optional output node name. - returns The new node performing a MVN operation on input tensor. + :return: The new node performing a MVN operation on input tensor. """ inputs = as_nodes(data, axes) @@ -130,7 +130,7 @@ def assign(new_value: NodeInput, variable_id: str, name: Optional[str] = None) - :param new_value: Node producing a value to be assigned to a variable. :param variable_id: Id of a variable to be updated. :param name: Optional name for output node. - returns Assign node + :return: Assign node """ return _get_node_factory_opset6().create( "Assign", @@ -146,7 +146,7 @@ def read_value(init_value: NodeInput, variable_id: str, name: Optional[str] = No :param init_value: Node producing a value to be returned instead of an unassigned variable. :param variable_id: Id of a variable to be read. :param name: Optional name for output node. - returns ReadValue node + :return: ReadValue node """ return _get_node_factory_opset6().create( "ReadValue", diff --git a/src/bindings/python/src/openvino/runtime/opset7/__init__.py b/src/bindings/python/src/openvino/runtime/opset7/__init__.py index 5c794e986ef..ec91b1b0f74 100644 --- a/src/bindings/python/src/openvino/runtime/opset7/__init__.py +++ b/src/bindings/python/src/openvino/runtime/opset7/__init__.py @@ -79,7 +79,7 @@ from openvino.runtime.opset5.ops import log_softmax from openvino.runtime.opset5.ops import loop from openvino.runtime.opset1.ops import lrn from openvino.runtime.opset4.ops import lstm_cell -from openvino.runtime.opset1.ops import lstm_sequence +from openvino.runtime.opset5.ops import lstm_sequence from openvino.runtime.opset1.ops import matmul from openvino.runtime.opset1.ops import max_pool from openvino.runtime.opset1.ops import maximum diff --git a/src/bindings/python/src/openvino/runtime/opset7/ops.py b/src/bindings/python/src/openvino/runtime/opset7/ops.py index b07772fb572..f5ee112beab 100644 --- a/src/bindings/python/src/openvino/runtime/opset7/ops.py +++ b/src/bindings/python/src/openvino/runtime/opset7/ops.py @@ -46,7 +46,7 @@ def einsum( :param inputs: The list of input nodes :param equation: Einsum equation - @return: The new node performing Einsum operation on the inputs + :return: The new node performing Einsum operation on the inputs """ attributes = { "equation": equation @@ -66,7 +66,7 @@ def gelu( :param data: The node with data tensor. :param approximation_mode: defines which approximation to use ('tanh' or 'erf') :param name: Optional output node name. - returns The new node performing a Gelu activation with the input tensor. + :return: The new node performing a Gelu activation with the input tensor. """ inputs = as_nodes(data) @@ -88,7 +88,7 @@ def roll( :param data: The node with data tensor. :param shift: The node with the tensor with numbers of places by which elements are shifted. :param axes: The node with the tensor with axes along which elements are shifted. - returns The new node performing a Roll operation on the input tensor. + :return: The new node performing a Roll operation on the input tensor. """ inputs = as_nodes(data, shift, axes) @@ -108,7 +108,7 @@ def gather( :param indices: N-D tensor with indices by which data is gathered :param axis: axis along which elements are gathered :param batch_dims: number of batch dimensions - @return: The new node which performs Gather + :return: The new node which performs Gather """ inputs = as_nodes(data, indices, axis) attributes = { @@ -127,7 +127,7 @@ def dft( :param data: Tensor with transformed data. :param axes: Tensor with axes to transform. :param signal_size: Tensor specifying signal size with respect to axes from the input 'axes'. - @return: The new node which performs DFT operation on the input data tensor. + :return: The new node which performs DFT operation on the input data tensor. """ if signal_size is None: inputs = as_nodes(data, axes) @@ -148,7 +148,7 @@ def idft( :param data: Tensor with transformed data. :param axes: Tensor with axes to transform. :param signal_size: Tensor specifying signal size with respect to axes from the input 'axes'. - @return: The new node which performs IDFT operation on the input data tensor. + :return: The new node which performs IDFT operation on the input data tensor. """ if signal_size is None: inputs = as_nodes(data, axes) diff --git a/src/bindings/python/src/openvino/runtime/opset8/__init__.py b/src/bindings/python/src/openvino/runtime/opset8/__init__.py index cce21756f76..9fe98e1a76a 100644 --- a/src/bindings/python/src/openvino/runtime/opset8/__init__.py +++ b/src/bindings/python/src/openvino/runtime/opset8/__init__.py @@ -84,7 +84,7 @@ from openvino.runtime.opset5.ops import log_softmax from openvino.runtime.opset5.ops import loop from openvino.runtime.opset1.ops import lrn from openvino.runtime.opset4.ops import lstm_cell -from openvino.runtime.opset1.ops import lstm_sequence +from openvino.runtime.opset5.ops import lstm_sequence from openvino.runtime.opset1.ops import matmul from openvino.runtime.opset8.ops import matrix_nms from openvino.runtime.opset8.ops import max_pool diff --git a/src/bindings/python/src/openvino/runtime/opset8/ops.py b/src/bindings/python/src/openvino/runtime/opset8/ops.py index 0a809f00bf5..0aa381592fb 100644 --- a/src/bindings/python/src/openvino/runtime/opset8/ops.py +++ b/src/bindings/python/src/openvino/runtime/opset8/ops.py @@ -62,7 +62,7 @@ def deformable_convolution( :param bilinear_interpolation_pad: The flag that determines the mode of bilinear interpolation execution. :param name: The optional new name for output node. - returns New node performing deformable convolution operation. + :return: New node performing deformable convolution operation. """ if mask is None: inputs = as_nodes(data, offsets, filters) @@ -94,7 +94,7 @@ def adaptive_avg_pool( :param data: The list of input nodes :param output_shape: the shape of spatial dimentions after operation - @return: The new node performing AdaptiveAvgPool operation on the data + :return: The new node performing AdaptiveAvgPool operation on the data """ inputs = as_nodes(data, output_shape) return _get_node_factory_opset8().create("AdaptiveAvgPool", inputs) @@ -111,7 +111,7 @@ def adaptive_max_pool( :param data: The list of input nodes :param output_shape: the shape of spatial dimentions after operation :param index_element_type: Type of indices output. - @return: The new node performing AdaptiveMaxPool operation on the data + :return: The new node performing AdaptiveMaxPool operation on the data """ inputs = as_nodes(data, output_shape) @@ -158,7 +158,7 @@ def multiclass_nms( :param background_class: Specifies the background class id, -1 meaning to keep all classes :param nms_eta: Specifies eta parameter for adpative NMS, in close range [0, 1.0] :param normalized: Specifies whether boxes are normalized or not - @return: The new node which performs MuticlassNms + :return: The new node which performs MuticlassNms """ inputs = as_nodes(boxes, scores) @@ -218,7 +218,7 @@ def matrix_nms( :param post_threshold: Specifies threshold to filter out boxes with low confidence score after decaying :param normalized: Specifies whether boxes are normalized or not - @return: The new node which performs MatrixNms + :return: The new node which performs MatrixNms """ inputs = as_nodes(boxes, scores) @@ -253,7 +253,7 @@ def gather( indicate reverse indexing from the end :param axis: axis along which elements are gathered :param batch_dims: number of batch dimensions - @return: The new node which performs Gather + :return: The new node which performs Gather """ inputs = as_nodes(data, indices, axis) attributes = { @@ -296,7 +296,7 @@ def max_pool( starting at the provided axis. Defaults to 0. :param name: The optional name for the created output node. - returns The new node performing max pooling operation. + :return: The new node performing max pooling operation. """ if auto_pad is None: auto_pad = "explicit" @@ -335,7 +335,7 @@ def random_uniform( 'i64', 'i32', 'f64', 'f32', 'f16', 'bf16'. :param global_seed: Specifies global seed value. Required to be a positive integer or 0. :param op_seed: Specifies operational seed value. Required to be a positive integer or 0. - returns The new node which performs generation of random values from uniform distribution. + :return: The new node which performs generation of random values from uniform distribution. """ inputs = as_nodes(output_shape, min_val, max_val) @@ -370,7 +370,7 @@ def slice( :param step: The node providing step values. :param axes: The optional node providing axes to slice, default [0, 1, ..., len(start)-1]. :param name: The optional name for the created output node. - returns The new node performing Slice operation. + :return: The new node performing Slice operation. """ if axes is None: inputs = as_nodes(data, start, stop, step) @@ -392,7 +392,7 @@ def gather_nd( :param data: N-D tensor with data for gathering :param indices: K-D tensor of tuples with indices by which data is gathered :param batch_dims: Scalar value of batch dimensions - @return: The new node which performs GatherND + :return: The new node which performs GatherND """ inputs = as_nodes(data, indices) @@ -413,7 +413,7 @@ def prior_box( :param image_shape: Shape of image to which prior boxes are scaled. :param attrs: The dictionary containing key, value pairs for attributes. :param name: Optional name for the output node. - returns Node representing prior box operation. + :return: Node representing prior box operation. Available attributes are: * min_size The minimum box size (in pixels). Range of values: positive floating point numbers @@ -524,7 +524,7 @@ def i420_to_bgr( :param arg_u: The node providing U plane data. Required for separate planes. :param arg_v: The node providing V plane data. Required for separate planes. :param name: The optional name for the created output node. - returns The new node performing I420toBGR operation. + :return: The new node performing I420toBGR operation. """ if arg_u is None and arg_v is None: inputs = as_nodes(arg) @@ -551,7 +551,7 @@ def i420_to_rgb( :param arg_u: The node providing U plane data. Required for separate planes. :param arg_v: The node providing V plane data. Required for separate planes. :param name: The optional name for the created output node. - returns The new node performing I420toRGB operation. + :return: The new node performing I420toRGB operation. """ if arg_u is None and arg_v is None: inputs = as_nodes(arg) @@ -576,7 +576,7 @@ def nv12_to_bgr( :param arg: The node providing single or Y plane data. :param arg_uv: The node providing UV plane data. Required for separate planes. :param name: The optional name for the created output node. - returns The new node performing NV12toBGR operation. + :return: The new node performing NV12toBGR operation. """ if arg_uv is None: inputs = as_nodes(arg) @@ -597,7 +597,7 @@ def nv12_to_rgb( :param arg: The node providing single or Y plane data. :param arg_uv: The node providing UV plane data. Required for separate planes. :param name: The optional name for the created output node. - returns The new node performing NV12toRGB operation. + :return: The new node performing NV12toRGB operation. """ if arg_uv is None: inputs = as_nodes(arg) @@ -626,7 +626,7 @@ def detection_output( :param aux_class_preds: The 2D input tensor with additional class predictions information. :param aux_box_preds: The 2D input tensor with additional box predictions information. :param name: Optional name for the output node. - returns Node representing DetectionOutput operation. + :return: Node representing DetectionOutput operation. Available attributes are: * background_label_id The background label id. Range of values: integer value @@ -751,6 +751,6 @@ def softmax(data: NodeInput, axis: int, name: Optional[str] = None) -> Node: :param data: The tensor providing input data. :param axis: An axis along which Softmax should be calculated. Can be positive or negative. :param name: Optional name for the node. - returns The new node with softmax operation applied on each element. + :return: The new node with softmax operation applied on each element. """ return _get_node_factory_opset8().create("Softmax", [as_node(data)], {"axis": axis}) diff --git a/src/bindings/python/src/openvino/runtime/utils/__init__.py b/src/bindings/python/src/openvino/runtime/utils/__init__.py index 95a08b7ef7b..2f77dab83ca 100644 --- a/src/bindings/python/src/openvino/runtime/utils/__init__.py +++ b/src/bindings/python/src/openvino/runtime/utils/__init__.py @@ -2,3 +2,6 @@ # SPDX-License-Identifier: Apache-2.0 """Generic utilities. Factor related functions out to separate files.""" + +from openvino.pyopenvino.util import numpy_to_c +from openvino.pyopenvino.util import get_constant_from_source diff --git a/src/bindings/python/src/openvino/runtime/utils/types.py b/src/bindings/python/src/openvino/runtime/utils/types.py index 94d68cf9d91..450b8bf4192 100644 --- a/src/bindings/python/src/openvino/runtime/utils/types.py +++ b/src/bindings/python/src/openvino/runtime/utils/types.py @@ -8,9 +8,8 @@ from typing import List, Union import numpy as np -from openvino.runtime.exceptions import NgraphTypeError -from openvino.runtime import Node, Shape, Output -from openvino.runtime import Type as NgraphType +from openvino.runtime.exceptions import OVTypeError +from openvino.runtime import Node, Shape, Output, Type from openvino.runtime.op import Constant log = logging.getLogger(__name__) @@ -22,19 +21,19 @@ ScalarData = Union[int, float] NodeInput = Union[Node, NumericData] openvino_to_numpy_types_map = [ - (NgraphType.boolean, np.bool), - (NgraphType.f16, np.float16), - (NgraphType.f32, np.float32), - (NgraphType.f64, np.float64), - (NgraphType.i8, np.int8), - (NgraphType.i16, np.int16), - (NgraphType.i32, np.int32), - (NgraphType.i64, np.int64), - (NgraphType.u8, np.uint8), - (NgraphType.u16, np.uint16), - (NgraphType.u32, np.uint32), - (NgraphType.u64, np.uint64), - (NgraphType.bf16, np.uint16), + (Type.boolean, np.bool), + (Type.f16, np.float16), + (Type.f32, np.float32), + (Type.f64, np.float64), + (Type.i8, np.int8), + (Type.i16, np.int16), + (Type.i32, np.int32), + (Type.i64, np.int64), + (Type.u8, np.uint8), + (Type.u16, np.uint16), + (Type.u32, np.uint32), + (Type.u64, np.uint64), + (Type.bf16, np.uint16), ] openvino_to_numpy_types_str_map = [ @@ -53,23 +52,23 @@ openvino_to_numpy_types_str_map = [ ] -def get_element_type(data_type: NumericType) -> NgraphType: +def get_element_type(data_type: NumericType) -> Type: """Return an ngraph element type for a Python type or numpy.dtype.""" if data_type is int: log.warning("Converting int type of undefined bitwidth to 32-bit ngraph integer.") - return NgraphType.i32 + return Type.i32 if data_type is float: log.warning("Converting float type of undefined bitwidth to 32-bit ngraph float.") - return NgraphType.f32 + return Type.f32 - ng_type = next( - (ng_type for (ng_type, np_type) in openvino_to_numpy_types_map if np_type == data_type), None + ov_type = next( + (ov_type for (ov_type, np_type) in openvino_to_numpy_types_map if np_type == data_type), None ) - if ng_type: - return ng_type + if ov_type: + return ov_type - raise NgraphTypeError("Unidentified data type %s", data_type) + raise OVTypeError("Unidentified data type %s", data_type) def get_element_type_str(data_type: NumericType) -> str: @@ -82,27 +81,27 @@ def get_element_type_str(data_type: NumericType) -> str: log.warning("Converting float type of undefined bitwidth to 32-bit ngraph float.") return "f32" - ng_type = next( - (ng_type for (ng_type, np_type) in openvino_to_numpy_types_str_map if np_type == data_type), + ov_type = next( + (ov_type for (ov_type, np_type) in openvino_to_numpy_types_str_map if np_type == data_type), None, ) - if ng_type: - return ng_type + if ov_type: + return ov_type - raise NgraphTypeError("Unidentified data type %s", data_type) + raise OVTypeError("Unidentified data type %s", data_type) -def get_dtype(ngraph_type: NgraphType) -> np.dtype: - """Return a numpy.dtype for an ngraph element type.""" +def get_dtype(openvino_type: Type) -> np.dtype: + """Return a numpy.dtype for an openvino element type.""" np_type = next( - (np_type for (ng_type, np_type) in openvino_to_numpy_types_map if ng_type == ngraph_type), + (np_type for (ov_type, np_type) in openvino_to_numpy_types_map if ov_type == openvino_type), None, ) if np_type: return np.dtype(np_type) - raise NgraphTypeError("Unidentified data type %s", ngraph_type) + raise OVTypeError("Unidentified data type %s", openvino_type) def get_ndarray(data: NumericData) -> np.ndarray: @@ -121,11 +120,11 @@ def get_shape(data: NumericData) -> TensorShape: return [] -def make_constant_node(value: NumericData, dtype: NumericType = None) -> Constant: - """Return an ngraph Constant node with the specified value.""" +def make_constant_node(value: NumericData, dtype: Union[NumericType, Type] = None) -> Constant: + """Return an openvino Constant node with the specified value.""" ndarray = get_ndarray(value) - if dtype: - element_type = get_element_type(dtype) + if dtype is not None: + element_type = get_element_type(dtype) if isinstance(dtype, (type, np.dtype)) else dtype else: element_type = get_element_type(ndarray.dtype) diff --git a/src/bindings/python/src/pyopenvino/CMakeLists.txt b/src/bindings/python/src/pyopenvino/CMakeLists.txt index 5ae33ca1727..e8ced27a9fe 100644 --- a/src/bindings/python/src/pyopenvino/CMakeLists.txt +++ b/src/bindings/python/src/pyopenvino/CMakeLists.txt @@ -78,6 +78,8 @@ endif() target_include_directories(${PROJECT_NAME} PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/..") target_link_libraries(${PROJECT_NAME} PRIVATE openvino::runtime ${OFFLINE_TRANSFORMATIONS_LIB}) +addVersionDefines(pyopenvino.cpp CI_BUILD_NUMBER) + # perform copy if(OpenVINO_SOURCE_DIR) add_custom_command(TARGET ${PROJECT_NAME} diff --git a/src/bindings/python/src/pyopenvino/core/async_infer_queue.cpp b/src/bindings/python/src/pyopenvino/core/async_infer_queue.cpp index 6d69aa81c6e..c6438228ea1 100644 --- a/src/bindings/python/src/pyopenvino/core/async_infer_queue.cpp +++ b/src/bindings/python/src/pyopenvino/core/async_infer_queue.cpp @@ -202,7 +202,7 @@ void regclass_AsyncInferQueue(py::module m) { py::arg("inputs"), py::arg("userdata"), R"( - Run asynchronous inference using next available InferRequest. + Run asynchronous inference using the next available InferRequest. This function releases the GIL, so another Python thread can work while this function runs in the background. @@ -262,8 +262,8 @@ void regclass_AsyncInferQueue(py::module m) { }, R"( Sets unified callback on all InferRequests from queue's pool. - Signature of such function should have two arguments, where - first one is InferRequest object and second one is userdata + The signature of such function should have two arguments, where + the first one is InferRequest object and the second one is userdata connected to InferRequest from the AsyncInferQueue's pool. .. code-block:: python diff --git a/src/bindings/python/src/pyopenvino/core/common.cpp b/src/bindings/python/src/pyopenvino/core/common.cpp index 33855935143..3ff7a32d465 100644 --- a/src/bindings/python/src/pyopenvino/core/common.cpp +++ b/src/bindings/python/src/pyopenvino/core/common.cpp @@ -6,6 +6,8 @@ #include +#include "openvino/util/common_util.hpp" + #define C_CONTIGUOUS py::detail::npy_api::constants::NPY_ARRAY_C_CONTIGUOUS_ namespace Common { @@ -88,6 +90,108 @@ ov::Tensor tensor_from_numpy(py::array& array, bool shared_memory) { return tensor; } +ov::PartialShape partial_shape_from_list(const py::list& shape) { + using value_type = ov::Dimension::value_type; + ov::PartialShape pshape; + for (py::handle dim : shape) { + if (py::isinstance(dim)) { + pshape.insert(pshape.end(), ov::Dimension(dim.cast())); + } else if (py::isinstance(dim)) { + pshape.insert(pshape.end(), Common::dimension_from_str(dim.cast())); + } else if (py::isinstance(dim)) { + pshape.insert(pshape.end(), dim.cast()); + } else if (py::isinstance(dim) || py::isinstance(dim)) { + py::list bounded_dim = dim.cast(); + if (bounded_dim.size() != 2) { + throw py::type_error("Two elements are expected in tuple(lower, upper) for dynamic dimension, but " + + std::to_string(bounded_dim.size()) + " elements were given."); + } + if (!(py::isinstance(bounded_dim[0]) && py::isinstance(bounded_dim[1]))) { + throw py::type_error("Incorrect pair of types (" + std::string(bounded_dim[0].get_type().str()) + ", " + + std::string(bounded_dim[1].get_type().str()) + + ") for dynamic dimension, ints are expected."); + } + pshape.insert(pshape.end(), + ov::Dimension(bounded_dim[0].cast(), bounded_dim[1].cast())); + } else { + throw py::type_error("Incorrect type " + std::string(dim.get_type().str()) + + " for dimension. Expected types are: " + "int, str, openvino.runtime.Dimension, list/tuple with lower and upper values for " + "dynamic dimension."); + } + } + return pshape; +} + +bool check_all_digits(const std::string& value) { + auto val = ov::util::trim(value); + for (const auto& c : val) { + if (!std::isdigit(c) || c == '-') { + return false; + } + } + return true; +} + +template +T stringToType(const std::string& valStr) { + T ret{0}; + std::istringstream ss(valStr); + if (!ss.eof()) { + ss >> ret; + } + return ret; +} + +ov::Dimension dimension_from_str(const std::string& value) { + using value_type = ov::Dimension::value_type; + auto val = ov::util::trim(value); + if (val == "?" || val == "-1") { + return {-1}; + } + if (val.find("..") == std::string::npos) { + OPENVINO_ASSERT(Common::check_all_digits(val), "Cannot parse dimension: \"", val, "\""); + return {Common::stringToType(val)}; + } + + std::string min_value_str = val.substr(0, val.find("..")); + OPENVINO_ASSERT(Common::check_all_digits(min_value_str), "Cannot parse min bound: \"", min_value_str, "\""); + + value_type min_value; + if (min_value_str.empty()) { + min_value = 0; + } else { + min_value = Common::stringToType(min_value_str); + } + + std::string max_value_str = val.substr(val.find("..") + 2); + value_type max_value; + if (max_value_str.empty()) { + max_value = -1; + } else { + max_value = Common::stringToType(max_value_str); + } + + OPENVINO_ASSERT(Common::check_all_digits(max_value_str), "Cannot parse max bound: \"", max_value_str, "\""); + + return {min_value, max_value}; +} + +ov::PartialShape partial_shape_from_str(const std::string& value) { + auto val = ov::util::trim(value); + if (val == "...") { + return ov::PartialShape::dynamic(); + } + ov::PartialShape res; + std::stringstream ss(val); + std::string field; + while (getline(ss, field, ',')) { + OPENVINO_ASSERT(!field.empty(), "Cannot get vector of dimensions! \"", val, "\" is incorrect"); + res.insert(res.end(), Common::dimension_from_str(field)); + } + return res; +} + py::array as_contiguous(py::array& array, ov::element::Type type) { switch (type) { // floating diff --git a/src/bindings/python/src/pyopenvino/core/common.hpp b/src/bindings/python/src/pyopenvino/core/common.hpp index 12108096116..8a3199643b2 100644 --- a/src/bindings/python/src/pyopenvino/core/common.hpp +++ b/src/bindings/python/src/pyopenvino/core/common.hpp @@ -33,6 +33,12 @@ ov::Tensor tensor_from_pointer(py::array& array, const ov::Shape& shape); ov::Tensor tensor_from_numpy(py::array& array, bool shared_memory); +ov::PartialShape partial_shape_from_list(const py::list& shape); + +ov::PartialShape partial_shape_from_str(const std::string& value); + +ov::Dimension dimension_from_str(const std::string& value); + py::array as_contiguous(py::array& array, ov::element::Type type); const ov::Tensor& cast_to_tensor(const py::handle& tensor); diff --git a/src/bindings/python/src/pyopenvino/core/compiled_model.cpp b/src/bindings/python/src/pyopenvino/core/compiled_model.cpp index 3203000b0e2..768768a1872 100644 --- a/src/bindings/python/src/pyopenvino/core/compiled_model.cpp +++ b/src/bindings/python/src/pyopenvino/core/compiled_model.cpp @@ -53,11 +53,11 @@ void regclass_CompiledModel(py::module m) { py::arg("inputs"), R"( Infers specified input(s) in synchronous mode. - Blocks all methods of CompiledModel while request is running. + Blocks all methods of CompiledModel while the request is running. Method creates new temporary InferRequest and run inference on it. - It is advised to use dedicated InferRequest class for performance, - optimizing workflows and creating advanced pipelines. + It is advised to use a dedicated InferRequest class for performance, + optimizing workflows, and creating advanced pipelines. :param inputs: Data to set on input tensors. :type inputs: Dict[Union[int, str, openvino.runtime.ConstOutput], openvino.runtime.Tensor] @@ -108,10 +108,10 @@ void regclass_CompiledModel(py::module m) { R"( Exports the compiled model to bytes/output stream. - Advanced version of `export_model`. It utilizes, streams from standard + Advanced version of `export_model`. It utilizes, streams from the standard Python library `io`. - Function performs flushing of the stream, writes to it and then rewinds + Function performs flushing of the stream, writes to it, and then rewinds the stream to the beginning (using seek(0)). :param model_stream: A stream object to which the model will be serialized. @@ -149,15 +149,6 @@ void regclass_CompiledModel(py::module m) { :rtype: None )"); - // todo: remove after Accuracy Checker migration to set/get_property API - cls.def( - "get_config", - [](ov::CompiledModel& self, const std::string& name) -> py::object { - PyErr_WarnEx(PyExc_DeprecationWarning, "get_config() is deprecated, use get_property() instead.", 1); - return Common::from_ov_any(self.get_property(name)).as(); - }, - py::arg("name")); - cls.def( "get_property", [](ov::CompiledModel& self, const std::string& name) -> py::object { @@ -172,26 +163,17 @@ void regclass_CompiledModel(py::module m) { :rtype: Any )"); - // todo: remove after Accuracy Checker migration to set/get_property API - cls.def( - "get_metric", - [](ov::CompiledModel& self, const std::string& name) -> py::object { - PyErr_WarnEx(PyExc_DeprecationWarning, "get_metric() is deprecated, use get_property() instead.", 1); - return Common::from_ov_any(self.get_property(name)).as(); - }, - py::arg("name")); - cls.def("get_runtime_model", &ov::CompiledModel::get_runtime_model, R"( Gets runtime model information from a device. - This object (returned model) represents the internal device specific model - which is optimized for particular accelerator. It contains device specific nodes, - runtime information and can be used only to understand how the source model - is optimized and which kernels, element types and layouts are selected. + This object (returned model) represents the internal device-specific model + which is optimized for the particular accelerator. It contains device-specific nodes, + runtime information, and can be used only to understand how the source model + is optimized and which kernels, element types, and layouts are selected. - :return: Model containing Executable Graph information. + :return: Model, containing Executable Graph information. :rtype: openvino.runtime.Model )"); @@ -219,7 +201,7 @@ void regclass_CompiledModel(py::module m) { py::arg("index"), R"( Gets input of a compiled model identified by an index. - If an input with given index is not found, this method throws an exception. + If the input with given index is not found, this method throws an exception. :param index: An input index. :type index: int @@ -232,9 +214,9 @@ void regclass_CompiledModel(py::module m) { py::arg("tensor_name"), R"( Gets input of a compiled model identified by a tensor_name. - If an input with given tensor name is not found, this method throws an exception. + If the input with given tensor name is not found, this method throws an exception. - :param tensor_name: An input tensor's name. + :param tensor_name: An input tensor name. :type tensor_name: str :return: A compiled model input. :rtype: openvino.runtime.ConstOutput @@ -253,7 +235,7 @@ void regclass_CompiledModel(py::module m) { (ov::Output(ov::CompiledModel::*)() const) & ov::CompiledModel::output, R"( Gets a single output of a compiled model. - If a model has more than one output, this method throws an exception. + If the model has more than one output, this method throws an exception. :return: A compiled model output. :rtype: openvino.runtime.ConstOutput @@ -264,7 +246,7 @@ void regclass_CompiledModel(py::module m) { py::arg("index"), R"( Gets output of a compiled model identified by an index. - If an output with given index is not found, this method throws an exception. + If the output with given index is not found, this method throws an exception. :param index: An output index. :type index: int @@ -277,9 +259,9 @@ void regclass_CompiledModel(py::module m) { py::arg("tensor_name"), R"( Gets output of a compiled model identified by a tensor_name. - If an output with given tensor name is not found, this method throws an exception. + If the output with given tensor name is not found, this method throws an exception. - :param tensor_name: An output tensor's name. + :param tensor_name: An output tensor name. :type tensor_name: str :return: A compiled model output. :rtype: openvino.runtime.ConstOutput diff --git a/src/bindings/python/src/pyopenvino/core/core.cpp b/src/bindings/python/src/pyopenvino/core/core.cpp index 722967f91cc..7163bbe129b 100644 --- a/src/bindings/python/src/pyopenvino/core/core.cpp +++ b/src/bindings/python/src/pyopenvino/core/core.cpp @@ -27,24 +27,11 @@ void regclass_Core(py::module m) { py::class_> cls(m, "Core"); cls.doc() = "openvino.runtime.Core class represents OpenVINO runtime Core entity. User applications can create several " - "Core class instances, but in this case the underlying plugins are created multiple times and not shared " + "Core class instances, but in this case, the underlying plugins are created multiple times and not shared " "between several Core instances. The recommended way is to have a single Core instance per application."; cls.def(py::init(), py::arg("xml_config_file") = ""); - // todo: remove after Accuracy Checker migration to set/get_property API - cls.def( - "set_config", - [](ov::Core& self, const std::map& config, const std::string& device_name) { - PyErr_WarnEx(PyExc_DeprecationWarning, "set_config() is deprecated, use set_property() instead.", 1); - self.set_property(device_name, {config.begin(), config.end()}); - }, - py::arg("device_name") = "", - py::arg("properties"), - R"( - Sets properties for the device. - )"); - cls.def( "set_property", [](ov::Core& self, const std::map& properties) { @@ -95,12 +82,12 @@ void regclass_Core(py::module m) { py::arg("config") = py::dict(), R"( Creates a compiled model from a source model object. - Users can create as many compiled models as they need and use them simultaneously + Users can create as many compiled models as they need, and use them simultaneously (up to the limitation of the hardware resources). :param model: Model acquired from read_model function. :type model: openvino.runtime.Model - :param device_name: Name of the device to load the model to. + :param device_name: Name of the device which will load the model. :type device_name: str :param properties: Optional dict of pairs: (property name, property value) relevant only for this load operation. :type properties: dict @@ -119,7 +106,7 @@ void regclass_Core(py::module m) { py::arg("config") = py::dict(), R"( Creates and loads a compiled model from a source model to the default OpenVINO device - selected by AUTO plugin. Users can create as many compiled models as they need and use + selected by AUTO plugin. Users can create as many compiled models as they need, and use them simultaneously (up to the limitation of the hardware resources). :param model: Model acquired from read_model function. @@ -229,8 +216,8 @@ void regclass_Core(py::module m) { :param model: A path to a model in IR / ONNX / PDPD format. :type model: str :param weights: A path to a data file For IR format (*.bin): if path is empty, - will try to read bin file with the same name as xml and if bin - file with the same name was not found, will load IR without weights. + it tries to read a bin file with the same name as xml and if the bin + file with the same name was not found, loads IR without weights. For ONNX format (*.onnx): weights parameter is not used. For PDPD format (*.pdmodel) weights parameter is not used. :type weights: str @@ -268,9 +255,8 @@ void regclass_Core(py::module m) { :param model: A string with model in IR / ONNX / PDPD format. :type model: str :param weights: A path to a data file For IR format (*.bin): if path is empty, - will try to read bin file with the same name as xml and if bin - file with the same name was not found, will load IR without weights. - For ONNX format (*.onnx): weights parameter is not used. + it tries to read a bin file with the same name as xml and if the bin + file with the same name was not found, loads IR without weights. For ONNX format (*.onnx): weights parameter is not used. For PDPD format (*.pdmodel) weights parameter is not used. :type weights: str :return: A model. @@ -293,10 +279,10 @@ void regclass_Core(py::module m) { R"( Imports a compiled model from a previously exported one. - :param model_stream: Input stream containing a model previously exported using export_model method. + :param model_stream: Input stream, containing a model previously exported, using export_model method. :type model_stream: bytes - :param device_name: Name of device to import compiled model for. - Note, if device_name device was not used to compile the original mode, an exception is thrown. + :param device_name: Name of device to which compiled model is imported. + Note: if device_name is not used to compile the original model, an exception is thrown. :type device_name: str :param properties: Optional map of pairs: (property name, property value) relevant only for this load operation. :type properties: dict, optional @@ -345,10 +331,10 @@ void regclass_Core(py::module m) { Python library `io`. - :param model_stream: Input stream containing a model previously exported using export_model method. + :param model_stream: Input stream, containing a model previously exported, using export_model method. :type model_stream: io.BytesIO - :param device_name: Name of device to import compiled model for. - Note, if device_name device was not used to compile the original mode, an exception is thrown. + :param device_name: Name of device to which compiled model is imported. + Note: if device_name is not used to compile the original model, an exception is thrown. :type device_name: str :param properties: Optional map of pairs: (property name, property value) relevant only for this load operation. :type properties: dict, optional @@ -369,16 +355,6 @@ void regclass_Core(py::module m) { new_compiled = core.import_model(user_stream, "CPU") )"); - // todo: remove after Accuracy Checker migration to set/get_property API - cls.def( - "get_config", - [](ov::Core& self, const std::string& device_name, const std::string& name) -> py::object { - PyErr_WarnEx(PyExc_DeprecationWarning, "get_config() is deprecated, use get_property() instead.", 1); - return Common::from_ov_any(self.get_property(device_name, name)).as(); - }, - py::arg("device_name"), - py::arg("name")); - cls.def( "get_property", [](ov::Core& self, const std::string& device_name, const std::string& name) -> py::object { @@ -397,16 +373,6 @@ void regclass_Core(py::module m) { :rtype: object )"); - // todo: remove after Accuracy Checker migration to set/get_property API - cls.def( - "get_metric", - [](ov::Core& self, const std::string device_name, const std::string name) -> py::object { - PyErr_WarnEx(PyExc_DeprecationWarning, "get_metric() is deprecated, use get_property() instead.", 1); - return Common::from_ov_any(self.get_property(device_name, name)).as(); - }, - py::arg("device_name"), - py::arg("name")); - cls.def("register_plugin", &ov::Core::register_plugin, py::arg("plugin_name"), diff --git a/src/bindings/python/src/pyopenvino/core/infer_request.cpp b/src/bindings/python/src/pyopenvino/core/infer_request.cpp index c9aac19a9e1..8d70c7bd23c 100644 --- a/src/bindings/python/src/pyopenvino/core/infer_request.cpp +++ b/src/bindings/python/src/pyopenvino/core/infer_request.cpp @@ -51,8 +51,8 @@ void regclass_InferRequest(py::module m) { py::arg("tensors"), R"( Sets batch of tensors for input data to infer by tensor name. - Model input shall have batch dimension and number of tensors shall - match with batch size. Current version supports set tensors to model inputs only. + Model input needs to have batch dimension and the number of tensors needs to be + matched with batch size. Current version supports set tensors to model inputs only. In case if `tensor_name` is associated with output (or any other non-input node), an exception will be thrown. @@ -60,7 +60,7 @@ void regclass_InferRequest(py::module m) { :type tensor_name: str :param tensors: Input tensors for batched infer request. The type of each tensor must match the model input element type and shape (except batch dimension). - Total size of tensors shall match with input's size. + Total size of tensors needs to match with input's size. :type tensors: List[openvino.runtime.Tensor] )"); @@ -73,8 +73,8 @@ void regclass_InferRequest(py::module m) { py::arg("tensors"), R"( Sets batch of tensors for input data to infer by tensor name. - Model input shall have batch dimension and number of tensors shall - match with batch size. Current version supports set tensors to model inputs only. + Model input needs to have batch dimension and the number of tensors needs to be + matched with batch size. Current version supports set tensors to model inputs only. In case if `port` is associated with output (or any other non-input node), an exception will be thrown. @@ -83,7 +83,7 @@ void regclass_InferRequest(py::module m) { :type port: openvino.runtime.ConstOutput :param tensors: Input tensors for batched infer request. The type of each tensor must match the model input element type and shape (except batch dimension). - Total size of tensors shall match with input's size. + Total size of tensors needs to match with input's size. :type tensors: List[openvino.runtime.Tensor] :rtype: None )"); @@ -130,12 +130,12 @@ void regclass_InferRequest(py::module m) { py::arg("tensors"), R"( Sets batch of tensors for single input data. - Model input shall have batch dimension and number of `tensors` - shall match with batch size. + Model input needs to have batch dimension and the number of `tensors` + needs to match with batch size. :param tensors: Input tensors for batched infer request. The type of each tensor must match the model input element type and shape (except batch dimension). - Total size of tensors shall match with input's size. + Total size of tensors needs to match with input's size. :type tensors: List[openvino.runtime.Tensor] )"); @@ -148,14 +148,14 @@ void regclass_InferRequest(py::module m) { py::arg("tensors"), R"( Sets batch of tensors for single input data to infer by index. - Model input shall have batch dimension and number of `tensors` - shall match with batch size. + Model input needs to have batch dimension and the number of `tensors` + needs to match with batch size. :param idx: Index of input tensor. :type idx: int :param tensors: Input tensors for batched infer request. The type of each tensor must match the model input element type and shape (except batch dimension). - Total size of tensors shall match with input's size. + Total size of tensors needs to match with input's size. )"); cls.def( @@ -513,8 +513,8 @@ void regclass_InferRequest(py::module m) { return self._request.get_profiling_info(); }, R"( - Queries performance measures per layer to get feedback of what - is the most time consuming operation, not all plugins provide + Queries performance is measured per layer to get feedback on what + is the most time-consuming operation, not all plugins provide meaningful data. :return: List of profiling information for operations in model. @@ -616,7 +616,7 @@ void regclass_InferRequest(py::module m) { return self._request.get_profiling_info(); }, R"( - Performance measures per layer to get feedback of what is the most time consuming operation. + Performance is measured per layer to get feedback on the most time-consuming operation. Not all plugins provide meaningful data! :return: Inference time. diff --git a/src/bindings/python/src/pyopenvino/core/offline_transformations.cpp b/src/bindings/python/src/pyopenvino/core/offline_transformations.cpp index 319f1ad2705..3495033cf26 100644 --- a/src/bindings/python/src/pyopenvino/core/offline_transformations.cpp +++ b/src/bindings/python/src/pyopenvino/core/offline_transformations.cpp @@ -146,7 +146,7 @@ void regmodule_offline_transformations(py::module m) { py::arg("weights_path"), py::arg("version") = "UNSPECIFIED", R"( - Serialize given model into IR. The generated .xml and .bin files will be save + Serialize given model into IR. The generated .xml and .bin files will be saved into provided paths. :param model: model which will be converted to IR representation diff --git a/src/bindings/python/src/pyopenvino/core/tensor.cpp b/src/bindings/python/src/pyopenvino/core/tensor.cpp index bedf9b2186d..7d3d77098f4 100644 --- a/src/bindings/python/src/pyopenvino/core/tensor.cpp +++ b/src/bindings/python/src/pyopenvino/core/tensor.cpp @@ -26,10 +26,10 @@ void regclass_Tensor(py::module m) { :param array: Array to create tensor from. :type array: numpy.array - :param shared_memory: If `True` this Tensor memory is being shared with a host, + :param shared_memory: If `True`, this Tensor memory is being shared with a host, that means the responsibility of keeping host memory is on the side of a user. Any action performed on the host - memory will be reflected on this Tensor's memory! + memory is reflected on this Tensor's memory! If `False`, data is being copied to this Tensor. Requires data to be C_CONTIGUOUS if `True`. :type shared_memory: bool @@ -43,8 +43,8 @@ void regclass_Tensor(py::module m) { R"( Another Tensor's special constructor. - It take an array or slice of it and shape that will be - selected starting from the first element of given array/slice. + It takes an array or slice of it, and shape that will be + selected, starting from the first element of the given array/slice. Please use it only in advanced cases if necessary! :param array: Underlaying methods will retrieve pointer on first element diff --git a/src/bindings/python/src/pyopenvino/core/variable_state.cpp b/src/bindings/python/src/pyopenvino/core/variable_state.cpp index 4550cfbb4eb..4b063e0fca8 100644 --- a/src/bindings/python/src/pyopenvino/core/variable_state.cpp +++ b/src/bindings/python/src/pyopenvino/core/variable_state.cpp @@ -19,14 +19,6 @@ void regclass_VariableState(py::module m) { R"( Reset internal variable state for relevant infer request, to a value specified as default for according node. - - Parameters - ---------- - None - - Returns - ---------- - reset : None )"); variable_st.def_property_readonly("name", @@ -34,10 +26,8 @@ void regclass_VariableState(py::module m) { R"( Gets name of current variable state. - Returns - ---------- - reset : str - A string representing a state name. + :return: A string representing a state name. + :rtype: str )"); variable_st.def_property("state", @@ -45,15 +35,5 @@ void regclass_VariableState(py::module m) { &ov::VariableState::set_state, R"( Gets/sets variable state. - - Parameters - ---------- - state : openvino.runtime.Tensor - The current state to set. - - Returns - ---------- - state : openvino.runtime.Tensor - A tensor representing a state. )"); } diff --git a/src/bindings/python/src/pyopenvino/frontend/frontend.cpp b/src/bindings/python/src/pyopenvino/frontend/frontend.cpp index d4f8fde9609..48829a01794 100644 --- a/src/bindings/python/src/pyopenvino/frontend/frontend.cpp +++ b/src/bindings/python/src/pyopenvino/frontend/frontend.cpp @@ -29,15 +29,10 @@ void regclass_frontend_FrontEnd(py::module m) { R"( Loads an input model by specified model file path. - Parameters - ---------- - path : str - Main model file path. - - Returns - ---------- - load : InputModel - Loaded input model. + :param path: Main model file path. + :type path: str + :return: Loaded input model. + :rtype: openvino.frontend.InputModel )"); fem.def("convert", @@ -46,32 +41,22 @@ void regclass_frontend_FrontEnd(py::module m) { R"( Completely convert and normalize entire function, throws if it is not possible. - Parameters - ---------- - model : InputModel - Input model. - - Returns - ---------- - convert : Model - Fully converted nGraph function. + :param model: Input model. + :type model: openvino.frontend.InputModel + :return: Fully converted OpenVINO Model. + :rtype: openvino.runtime.Model )"); fem.def("convert", static_cast&) const>(&FrontEnd::convert), - py::arg("function"), + py::arg("model"), R"( Completely convert the remaining, not converted part of a function. - Parameters - ---------- - function : Model - Partially converted nGraph function. - - Returns - ---------- - convert : Model - Fully converted nGraph function. + :param model: Partially converted OpenVINO model. + :type model: openvino.frontend.Model + :return: Fully converted OpenVINO Model. + :rtype: openvino.runtime.Model )"); fem.def("convert_partially", @@ -82,15 +67,10 @@ void regclass_frontend_FrontEnd(py::module m) { Converted parts are not normalized by additional transformations; normalize function or another form of convert function should be called to finalize the conversion process. - Parameters - ---------- - model : InputModel - Input model. - - Returns - ---------- - convert_partially : Model - Partially converted nGraph function. + :param model : Input model. + :type model: openvino.frontend.InputModel + :return: Partially converted OpenVINO Model. + :rtype: openvino.runtime.Model )"); fem.def("decode", @@ -101,27 +81,20 @@ void regclass_frontend_FrontEnd(py::module m) { Each decoding node is an nGraph node representing a single FW operation node with all attributes represented in FW-independent way. - Parameters - ---------- - model : InputModel - Input model. - - Returns - ---------- - decode : Model - nGraph function after decoding. + :param model : Input model. + :type model: openvino.frontend.InputModel + :return: OpenVINO Model after decoding. + :rtype: openvino.runtime.Model )"); fem.def("normalize", &FrontEnd::normalize, - py::arg("function"), + py::arg("model"), R"( Runs normalization passes on function that was loaded with partial conversion. - Parameters - ---------- - function : Model - Partially converted nGraph function. + :param model : Partially converted OpenVINO model. + :type model: openvino.runtime.Model )"); fem.def("get_name", @@ -130,10 +103,8 @@ void regclass_frontend_FrontEnd(py::module m) { Gets name of this FrontEnd. Can be used by clients if frontend is selected automatically by FrontEndManager::load_by_model. - Parameters - ---------- - get_name : str - Current frontend name. Empty string if not implemented. + :return: Current frontend name. Returns empty string if not implemented. + :rtype: str )"); fem.def("add_extension", diff --git a/src/bindings/python/src/pyopenvino/frontend/input_model.cpp b/src/bindings/python/src/pyopenvino/frontend/input_model.cpp index 05312dfbcd0..9bef06ea133 100644 --- a/src/bindings/python/src/pyopenvino/frontend/input_model.cpp +++ b/src/bindings/python/src/pyopenvino/frontend/input_model.cpp @@ -25,15 +25,10 @@ void regclass_frontend_InputModel(py::module m) { Returns a tensor place by a tensor name following framework conventions, or nullptr if a tensor with this name doesn't exist. - Parameters - ---------- - tensor_name : str - Name of tensor. - - Returns - ---------- - get_place_by_tensor_name : Place - Tensor place corresponding to specified tensor name. + :param tensor_name: Name of tensor. + :type tensor_name: str + :return: Tensor place corresponding to specified tensor name. + :rtype: openvino.frontend.Place )"); im.def("get_place_by_operation_name", @@ -43,15 +38,10 @@ void regclass_frontend_InputModel(py::module m) { Returns an operation place by an operation name following framework conventions, or nullptr if an operation with this name doesn't exist. - Parameters - ---------- - operation_name : str - Name of operation. - - Returns - ---------- - get_place_by_operation_name : Place - Place representing operation. + :param operation_name: Name of operation. + :type operation_name: str + :return: Place representing operation. + :rtype: openvino.frontend.Place )"); im.def("get_place_by_operation_name_and_input_port", @@ -61,18 +51,12 @@ void regclass_frontend_InputModel(py::module m) { R"( Returns an input port place by operation name and appropriate port index. - Parameters - ---------- - operation_name : str - Name of operation. - - input_port_index : int - Index of input port for this operation. - - Returns - ---------- - get_place_by_operation_name_and_input_port : Place - Place representing input port of operation. + :param operation_name: Name of operation. + :type operation_name: str + :param input_port_index: Index of input port for this operation. + :type input_port_index: int + :return: Place representing input port of operation. + :rtype: openvino.frontend.Place )"); im.def("get_place_by_operation_name_and_output_port", @@ -82,18 +66,12 @@ void regclass_frontend_InputModel(py::module m) { R"( Returns an output port place by operation name and appropriate port index. - Parameters - ---------- - operation_name : str - Name of operation. - - output_port_index : int - Index of output port for this operation. - - Returns - ---------- - get_place_by_operation_name_and_output_port : Place - Place representing output port of operation. + :param operation_name: Name of operation. + :type operation_name: str + :param output_port_index: Index of output port for this operation. + :type output_port_index: int + :return: Place representing output port of operation. + :rtype: openvino.frontend.Place )"); im.def("set_name_for_tensor", @@ -103,13 +81,10 @@ void regclass_frontend_InputModel(py::module m) { R"( Sets name for tensor. Overwrites existing names of this place. - Parameters - ---------- - tensor : Place - Tensor place. - - new_name : str - New name for this tensor. + :param tensor: Tensor place. + :type tensor: openvino.frontend.Place + :param new_name: New name for this tensor. + :type new_name: str )"); im.def("add_name_for_tensor", @@ -119,13 +94,10 @@ void regclass_frontend_InputModel(py::module m) { R"( Adds new name for tensor - Parameters - ---------- - tensor : Place - Tensor place. - - new_name : str - New name to be added to this place. + :param tensor: Tensor place. + :type tensor: openvino.frontend.Place + :param new_name: New name to be added to this place. + :type new_name: str )"); im.def("set_name_for_operation", @@ -135,13 +107,10 @@ void regclass_frontend_InputModel(py::module m) { R"( Adds new name for tensor. - Parameters - ---------- - operation : Place - Operation place. - - new_name : str - New name for this operation. + :param operation: Operation place. + :type operation: openvino.frontend.Place + :param new_name: New name for this operation. + :type new_name: str )"); im.def("free_name_for_tensor", @@ -150,10 +119,8 @@ void regclass_frontend_InputModel(py::module m) { R"( Unassign specified name from tensor place(s). - Parameters - ---------- - name : str - Name of tensor. + :param name: Name of tensor. + :type name: str )"); im.def("free_name_for_operation", @@ -162,10 +129,8 @@ void regclass_frontend_InputModel(py::module m) { R"( Unassign specified name from operation place(s). - Parameters - ---------- - name : str - Name of operation. + :param name: Name of operation. + :type name: str )"); im.def("set_name_for_dimension", @@ -176,16 +141,12 @@ void regclass_frontend_InputModel(py::module m) { R"( Set name for a particular dimension of a place (e.g. batch dimension). - Parameters - ---------- - place : Place - Model's place. - - dim_index : int - Dimension index. - - dim_name : str - Name to assign on this dimension. + :param place: Model's place. + :type place: openvino.frontend.Place + :param dim_index: Dimension index. + :type dim_index: int + :param dim_name: Name to assign on this dimension. + :type dum_name: str )"); im.def("cut_and_add_new_input", @@ -196,13 +157,10 @@ void regclass_frontend_InputModel(py::module m) { Cut immediately before this place and assign this place as new input; prune all nodes that don't contribute to any output. - Parameters - ---------- - place : Place - New place to be assigned as input. - - new_name_optional : str - Optional new name assigned to this input place. + :param place: New place to be assigned as input. + :type place: openvino.frontend.Place + :param new_name: Optional new name assigned to this input place. + :type new_name: str )"); im.def("cut_and_add_new_output", @@ -213,13 +171,10 @@ void regclass_frontend_InputModel(py::module m) { Cut immediately before this place and assign this place as new output; prune all nodes that don't contribute to any output. - Parameters - ---------- - place : Place - New place to be assigned as output. - - new_name_optional : str - Optional new name assigned to this output place. + :param place: New place to be assigned as output. + :type place: openvino.frontend.Place + :param new_name: Optional new name assigned to this output place. + :type new_name: str )"); im.def("add_output", @@ -228,10 +183,8 @@ void regclass_frontend_InputModel(py::module m) { R"( Assign this place as new output or add necessary nodes to represent a new output. - Parameters - ---------- - place : Place - Anchor point to add an output. + :param place: Anchor point to add an output. + :type place: openvino.frontend.Place )"); im.def("remove_output", @@ -241,10 +194,8 @@ void regclass_frontend_InputModel(py::module m) { Removes any sinks directly attached to this place with all inbound data flow if it is not required by any other output. - Parameters - ---------- - place : Place - Model place + :param place: Model place. + :type place: openvino.frontend.Place )"); im.def("set_partial_shape", @@ -257,13 +208,10 @@ void regclass_frontend_InputModel(py::module m) { shape of results ngraph nodes and will define shape inference when the model is converted to ngraph. - Parameters - ---------- - place : Place - Model place. - - shape : PartialShape - Partial shape for this place. + :param place: Model place. + :type place: openvino.frontend.Place + :param shape: Partial shape for this place. + :type shape: openvino.runtime.PartialShape )"); im.def("get_partial_shape", @@ -272,15 +220,10 @@ void regclass_frontend_InputModel(py::module m) { R"( Returns current partial shape used for this place. - Parameters - ---------- - place : Place - Model place - - Returns - ---------- - get_partial_shape : PartialShape - Partial shape for this place. + :param place: Model place. + :type place: openvino.frontend.Place + :return: Partial shape for this place. + :rtype: openvino.runtime.PartialShape )"); im.def("get_inputs", @@ -288,10 +231,8 @@ void regclass_frontend_InputModel(py::module m) { R"( Returns all inputs for a model. - Returns - ---------- - get_inputs : List[Place] - A list of input places. + :return: A list of input places. + :rtype: List[openvino.frontend.Place] )"); im.def("get_outputs", @@ -299,10 +240,8 @@ void regclass_frontend_InputModel(py::module m) { R"( Returns all outputs for a model. An output is a terminal place in a graph where data escapes the flow. - Returns - ---------- - get_outputs : List[Place] - A list of output places + :return: A list of output places. + :rtype: List[openvino.frontend.Place] )"); im.def("extract_subgraph", @@ -312,13 +251,10 @@ void regclass_frontend_InputModel(py::module m) { R"( Leaves only subgraph that are defined by new inputs and new outputs. - Parameters - ---------- - inputs : List[Place] - Array of new input places. - - outputs : List[Place] - Array of new output places. + :param inputs: Array of new input places. + :type inputs: List[openvino.frontend.Place] + :param outputs: Array of new output places. + :type outputs: List[openvino.frontend.Place] )"); im.def("override_all_inputs", @@ -328,10 +264,8 @@ void regclass_frontend_InputModel(py::module m) { Modifies the graph to use new inputs instead of existing ones. New inputs should completely satisfy all existing outputs. - Parameters - ---------- - inputs : List[Place] - Array of new input places. + :param inputs: Array of new input places. + :type inputs: List[openvino.frontend.Place] )"); im.def("override_all_outputs", @@ -341,10 +275,8 @@ void regclass_frontend_InputModel(py::module m) { Replaces all existing outputs with new ones removing all data flow that is not required for new outputs. - Parameters - ---------- - outputs : List[Place] - Vector with places that will become new outputs; may intersect existing outputs. + :param outputs: Vector with places that will become new outputs; may intersect existing outputs. + :type outputs: List[openvino.frontend.Place] )"); im.def("set_element_type", @@ -354,13 +286,10 @@ void regclass_frontend_InputModel(py::module m) { R"( Sets new element type for a place. - Parameters - ---------- - place : Place - Model place. - - type : ngraph.Type - New element type. + :param place: Model place. + :type place: openvino.frontend.Place + :param type: New element type. + :type type: openvino.runtime.Type )"); im.def( @@ -375,12 +304,9 @@ void regclass_frontend_InputModel(py::module m) { R"( Sets new element type for a place. - Parameters - ---------- - place : Place - Model place. - - value : ndarray - New value to assign. + :param place: Model place. + :type place: openvino.frontend.Place + :param value: New value to assign. + :type value: numpy.ndarray )"); } diff --git a/src/bindings/python/src/pyopenvino/frontend/manager.cpp b/src/bindings/python/src/pyopenvino/frontend/manager.cpp index fa109a173ae..9970a79da56 100644 --- a/src/bindings/python/src/pyopenvino/frontend/manager.cpp +++ b/src/bindings/python/src/pyopenvino/frontend/manager.cpp @@ -34,10 +34,8 @@ void regclass_frontend_FrontEndManager(py::module m) { R"( Gets list of registered frontends. - Returns - ---------- - get_available_front_ends : List[str] - List of available frontend names. + :return: List of available frontend names. + :rtype: List[str] )"); fem.def("load_by_framework", @@ -46,15 +44,10 @@ void regclass_frontend_FrontEndManager(py::module m) { R"( Loads frontend by name of framework and capabilities. - Parameters - ---------- - framework : str - Framework name. Throws exception if name is not in list of available frontends. - - Returns - ---------- - load_by_framework : FrontEnd - Frontend interface for further loading of models. + :param framework: Framework name. Throws exception if name is not in list of available frontends. + :type framework: str + :return: Frontend interface for further loading of models. + :rtype: openvino.frontend.FrontEnd )"); fem.def( @@ -66,15 +59,10 @@ void regclass_frontend_FrontEndManager(py::module m) { R"( Selects and loads appropriate frontend depending on model file extension and other file info (header). - Parameters - ---------- - model_path : str - Path to model file/directory. - - Returns - ---------- - load_by_model : FrontEnd - Frontend interface for further loading of models. 'None' if no suitable frontend is found + :param model_path: A path to a model file/directory. + :type model_path: str + :return: Frontend interface for further loading of models. 'None' if no suitable frontend is found. + :rtype: openvino.frontend.FrontEnd )"); fem.def("__repr__", [](const ov::frontend::FrontEndManager& self) -> std::string { diff --git a/src/bindings/python/src/pyopenvino/frontend/place.cpp b/src/bindings/python/src/pyopenvino/frontend/place.cpp index c525bd5fbd1..9518c2a5036 100644 --- a/src/bindings/python/src/pyopenvino/frontend/place.cpp +++ b/src/bindings/python/src/pyopenvino/frontend/place.cpp @@ -21,10 +21,8 @@ void regclass_frontend_Place(py::module m) { R"( Returns true if this place is input for a model. - Returns - ---------- - is_input : bool - True if this place is input for a model + :return: True if this place is input for a model + :rtype: bool )"); place.def("is_output", @@ -32,10 +30,8 @@ void regclass_frontend_Place(py::module m) { R"( Returns true if this place is output for a model. - Returns - ---------- - is_output : bool - True if this place is output for a model. + :return: True if this place is output for a model. + :rtype: bool )"); place.def("get_names", @@ -43,11 +39,9 @@ void regclass_frontend_Place(py::module m) { R"( All associated names (synonyms) that identify this place in the graph in a framework specific way. - Returns - ---------- - get_names : List[str] - A vector of strings each representing a name that identifies this place in the graph. - Can be empty if there are no names associated with this place or name cannot be attached. + :return: A vector of strings each representing a name that identifies this place in the graph. + Can be empty if there are no names associated with this place or name cannot be attached. + :rtype: List[str] )"); place.def("is_equal", @@ -56,15 +50,10 @@ void regclass_frontend_Place(py::module m) { R"( Returns true if another place is the same as this place. - Parameters - ---------- - other : Place - Another place object. - - Returns - ---------- - is_equal : bool - True if another place is the same as this place. + :param other: Another place object. + :type other: openvino.frontend.Place + :return: True if another place is the same as this place. + :rtype: bool )"); place.def("is_equal_data", @@ -75,15 +64,10 @@ void regclass_frontend_Place(py::module m) { Note: The same data means all places on path: output port -> output edge -> tensor -> input edge -> input port. - Parameters - ---------- - other : Place - Another place object. - - Returns - ---------- - is_equal_data : bool - True if another place points to the same data. + :param other: Another place object. + :type other: openvino.frontend.Place + :return: True if another place points to the same data. + :rtype: bool )"); place.def( @@ -110,18 +94,13 @@ void regclass_frontend_Place(py::module m) { Returns references to all operation nodes that consume data from this place for specified output port. Note: It can be called for any kind of graph place searching for the first consuming operations. - Parameters - ---------- - output_name : str - Name of output port group. May not be set if node has one output port group. - output_port_index : int - If place is an operational node it specifies which output port should be considered + :param output_name: Name of output port group. May not be set if node has one output port group. + :type output_name: str + :param output_port_index: If place is an operational node it specifies which output port should be considered May not be set if node has only one output port. - - Returns - ---------- - get_consuming_operations : List[Place] - A list with all operation node references that consumes data from this place + :type output_port_index: int + :return: A list with all operation node references that consumes data from this place + :rtype: List[openvino.frontend.Place] )"); place.def( @@ -147,18 +126,13 @@ void regclass_frontend_Place(py::module m) { Returns a tensor place that gets data from this place; applicable for operations, output ports and output edges. - Parameters - ---------- - output_name : str - Name of output port group. May not be set if node has one output port group. - output_port_index : int - Output port index if the current place is an operation node and has multiple output ports. + :param output_name: Name of output port group. May not be set if node has one output port group. + :type output_name: str + :param output_port_index: Output port index if the current place is an operation node and has multiple output ports. May not be set if place has only one output port. - - Returns - ---------- - get_consuming_operations : Place - A tensor place which hold the resulting value for this place. + :type output_port_index: int + :return: A tensor place which hold the resulting value for this place. + :rtype: openvino.frontend.Place )"); place.def( @@ -184,18 +158,13 @@ void regclass_frontend_Place(py::module m) { R"( Get an operation node place that immediately produces data for this place. - Parameters - ---------- - input_name : str - Name of port group. May not be set if node has one input port group. - input_port_index : int - If a given place is itself an operation node, this specifies a port index. + :param input_name: Name of port group. May not be set if node has one input port group. + :type input_name: str + :param input_port_index: If a given place is itself an operation node, this specifies a port index. May not be set if place has only one input port. - - Returns - ---------- - get_producing_operation : Place - An operation place that produces data for this place. + :type input_port_index: int + :return: An operation place that produces data for this place. + :rtype: openvino.frontend.Place )"); place.def("get_producing_port", @@ -203,10 +172,8 @@ void regclass_frontend_Place(py::module m) { R"( Returns a port that produces data for this place. - Returns - ---------- - get_producing_port : Place - A port place that produces data for this place. + :return: A port place that produces data for this place. + :rtype: openvino.frontend.Place )"); place.def( @@ -231,18 +198,12 @@ void regclass_frontend_Place(py::module m) { R"( For operation node returns reference to an input port with specified name and index. - Parameters - ---------- - input_name : str - Name of port group. May not be set if node has one input port group. - - input_port_index : int - Input port index in a group. May not be set if node has one input port in a group. - - Returns - ---------- - get_input_port : Place - Appropriate input port place. + :param input_name: Name of port group. May not be set if node has one input port group. + :type input_name: str + :param input_port_index: Input port index in a group. May not be set if node has one input port in a group. + :type input_port_index: int + :return: Appropriate input port place. + :rtype: openvino.frontend.Place )"); place.def( @@ -267,18 +228,12 @@ void regclass_frontend_Place(py::module m) { R"( For operation node returns reference to an output port with specified name and index. - Parameters - ---------- - output_name : str - Name of output port group. May not be set if node has one output port group. - - output_port_index : int - Output port index. May not be set if node has one output port in a group. - - Returns - ---------- - get_output_port : Place - Appropriate output port place. + :param output_name: Name of output port group. May not be set if node has one output port group. + :type output_name: str + :param output_port_index: Output port index. May not be set if node has one output port in a group. + :type output_port_index: int + :return: Appropriate output port place. + :rtype: openvino.frontend.Place )"); place.def("get_consuming_ports", @@ -286,10 +241,8 @@ void regclass_frontend_Place(py::module m) { R"( Returns all input ports that consume data flows through this place. - Returns - ---------- - get_consuming_ports : List[Place] - Input ports that consume data flows through this place. + :return: Input ports that consume data flows through this place. + :rtype: List[openvino.frontend.Place] )"); place.def( @@ -315,16 +268,11 @@ void regclass_frontend_Place(py::module m) { Returns a tensor place that supplies data for this place; applicable for operations, input ports and input edges. - Parameters - ---------- - input_name : str - Name of port group. May not be set if node has one input port group. - input_port_index : int - Input port index for operational node. May not be specified if place has only one input port. - - Returns - ---------- - get_source_tensor : Place - A tensor place which supplies data for this place. + :param input_name : Name of port group. May not be set if node has one input port group. + :type input_name: str + :param input_port_index: Input port index for operational node. May not be specified if place has only one input port. + :type input_port_index: int + :return: A tensor place which supplies data for this place. + :rtype: openvino.frontend.Place )"); } diff --git a/src/bindings/python/src/pyopenvino/graph/descriptors/tensor.cpp b/src/bindings/python/src/pyopenvino/graph/descriptors/tensor.cpp index 7e73074576b..3b6fded4057 100644 --- a/src/bindings/python/src/pyopenvino/graph/descriptors/tensor.cpp +++ b/src/bindings/python/src/pyopenvino/graph/descriptors/tensor.cpp @@ -27,10 +27,8 @@ void regclass_graph_descriptor_Tensor(py::module m) { R"( Returns the shape description. - Returns - ---------- - get_shape : Shape - The shape description. + :return: The shape description. + :rtype: openvino.runtime.Shape )"); tensor.def("get_rt_info", @@ -39,89 +37,73 @@ void regclass_graph_descriptor_Tensor(py::module m) { R"( Returns PyRTMap which is a dictionary of user defined runtime info. - Returns - ---------- - get_rt_info : PyRTMap - A dictionary of user defined data. + :return: A dictionary of user defined data. + :rtype: openvino.runtime.RTMap )"); tensor.def("size", &ov::descriptor::Tensor::size, R"( - Returns the size description + Returns the size description. - Returns - ---------- - size : size_t - The size description. + :return: The size description. + :rtype: size_t )"); tensor.def("get_partial_shape", &ov::descriptor::Tensor::get_partial_shape, R"( - Returns the partial shape description + Returns the partial shape description. - Returns - ---------- - get_partial_shape : PartialShape - PartialShape description. + :return: PartialShape description. + :rtype: openvino.runtime.PartialShape )"); tensor.def("get_element_type", &ov::descriptor::Tensor::get_element_type, R"( - Returns the element type description + Returns the element type description. - Returns - ---------- - get_element_type : Type - Type description + :return: Type description. + :rtype: openvino.runtime.Type )"); tensor.def("get_names", &ov::descriptor::Tensor::get_names, R"( - Returns names + Returns names. - Returns - ---------- - get_names : set - Set of names + :return: Get names. + :rtype: set )"); tensor.def("set_names", &ov::descriptor::Tensor::set_names, py::arg("names"), R"( - Set names for tensor + Set names for tensor. - Parameters - ---------- - names : set - Set of names + :param names: Set of names. + :type names: set )"); - tensor.def("set_names", + tensor.def("add_names", &ov::descriptor::Tensor::add_names, py::arg("names"), R"( Adds names for tensor. - Parameters - ---------- - names : set - Set of names + :param names: Add names. + :type names: set )"); tensor.def("get_any_name", &ov::descriptor::Tensor::get_any_name, R"( - Returns any of set name + Returns any of set name. - Returns - ---------- - get_any_name : string - Any name + :return: Any name. + :rtype: string )"); tensor.def_property_readonly("shape", &ov::descriptor::Tensor::get_shape); diff --git a/src/bindings/python/src/pyopenvino/graph/dimension.cpp b/src/bindings/python/src/pyopenvino/graph/dimension.cpp index bec1643aee6..4475abdb4a5 100644 --- a/src/bindings/python/src/pyopenvino/graph/dimension.cpp +++ b/src/bindings/python/src/pyopenvino/graph/dimension.cpp @@ -11,6 +11,7 @@ #include #include +#include "pyopenvino/core/common.hpp" #include "pyopenvino/graph/dimension.hpp" namespace py = pybind11; @@ -41,6 +42,10 @@ void regclass_graph_Dimension(py::module m) { :type max_dimension: int )"); + dim.def(py::init([](const std::string& value) { + return Common::dimension_from_str(value); + })); + dim.def_static("dynamic", &ov::Dimension::dynamic); dim.def_property_readonly("is_dynamic", diff --git a/src/bindings/python/src/pyopenvino/graph/model.cpp b/src/bindings/python/src/pyopenvino/graph/model.cpp index 3d4e09b39bb..b2e3af2811a 100644 --- a/src/bindings/python/src/pyopenvino/graph/model.cpp +++ b/src/bindings/python/src/pyopenvino/graph/model.cpp @@ -264,49 +264,114 @@ void regclass_graph_Model(py::module m) { [](ov::Model& self, const ov::PartialShape& partial_shape) { self.reshape(partial_shape); }, - py::arg("partial_shapes"), + py::arg("partial_shape"), R"( - :param partial_shapes: Index of Output. - :type partial_shapes: PartialShape + :param partial_shape: New shape. + :type partial_shape: PartialShape :return : void )"); function.def( "reshape", - [](ov::Model& self, const std::map& partial_shapes) { - self.reshape(partial_shapes); + [](ov::Model& self, const py::list& partial_shape) { + self.reshape(Common::partial_shape_from_list(partial_shape)); }, - py::arg("partial_shapes"), + py::arg("partial_shape"), R"( - - :param partial_shapes: Index of Output. - :type partial_shapes: Dict[int, PartialShape] - :return: void + :param partial_shape: New shape. + :type partial_shape: list + :return : void )"); function.def( "reshape", - [](ov::Model& self, const std::map& partial_shapes) { - self.reshape(partial_shapes); + [](ov::Model& self, const py::tuple& partial_shape) { + self.reshape(Common::partial_shape_from_list(partial_shape.cast())); }, - py::arg("partial_shapes"), + py::arg("partial_shape"), R"( - :param partial_shapes: Index of Output. - :type partial_shapes: Dict[string, PartialShape] - :return: void + :param partial_shape: New shape. + :type partial_shape: tuple + :return : void )"); function.def( "reshape", - [](ov::Model& self, const std::map, ov::PartialShape>& partial_shapes) { - self.reshape(partial_shapes); + [](ov::Model& self, const std::string& partial_shape) { + self.reshape(Common::partial_shape_from_str(partial_shape)); + }, + py::arg("partial_shape"), + R"( + :param partial_shape: New shape. + :type partial_shape: str + :return : void + )"); + + function.def( + "reshape", + [](ov::Model& self, const py::dict& partial_shapes) { + std::map, ov::PartialShape> new_shapes; + for (const auto& item : partial_shapes) { + std::pair, ov::PartialShape> new_shape; + // check keys + if (py::isinstance(item.first)) { + new_shape.first = self.input(item.first.cast()); + } else if (py::isinstance(item.first)) { + new_shape.first = self.input(item.first.cast()); + } else if (py::isinstance>(item.first)) { + new_shape.first = item.first.cast>(); + } else { + throw py::type_error("Incorrect key type " + std::string(item.first.get_type().str()) + + " to reshape a model, expected keys as openvino.runtime.Output, int or str."); + } + // check values + if (py::isinstance(item.second)) { + new_shape.second = item.second.cast(); + } else if (py::isinstance(item.second) || py::isinstance(item.second)) { + new_shape.second = Common::partial_shape_from_list(item.second.cast()); + } else if (py::isinstance(item.second)) { + new_shape.second = Common::partial_shape_from_str(item.second.cast()); + } else { + throw py::type_error( + "Incorrect value type " + std::string(item.second.get_type().str()) + + " to reshape a model, expected values as openvino.runtime.PartialShape, str, list or tuple."); + } + new_shapes.insert(new_shape); + } + self.reshape(new_shapes); }, py::arg("partial_shapes"), - R"( - :param partial_shapes: Index of Output. - :type partial_shapes: Dict[Output, PartialShape] - :return: void - )"); + R"( Reshape model inputs. + + The allowed types of keys in the `partial_shapes` dictionary are: + + (1) `int`, input index + (2) `str`, input tensor name + (3) `openvino.runtime.Output` + + The allowed types of values in the `partial_shapes` are: + + (1) `openvino.runtime.PartialShape` + (2) `list` consisting of dimensions + (3) `tuple` consisting of dimensions + (4) `str`, string representation of `openvino.runtime.PartialShape` + + When list or tuple are used to describe dimensions, each dimension can be written in form: + + (1) non-negative `int` which means static value for the dimension + (2) `[min, max]`, dynamic dimension where `min` specifies lower bound and `max` specifies upper bound; the range includes both `min` and `max`; using `-1` for `min` or `max` means no known bound + (3) `(min, max)`, the same as above + (4) `-1` is a dynamic dimension without known bounds + (4) `openvino.runtime.Dimension` + (5) `str` using next syntax: + '?' - to define fully dinamic dimension + '1' - to define dimension which length is 1 + '1..10' - to define bounded dimension + '..10' or '1..' to define dimension with only lower or only upper limit + + :param partial_shapes: New shapes. + :type partial_shapes: Dict[keys, values] + )"); function.def("get_output_size", &ov::Model::get_output_size, @@ -566,7 +631,7 @@ void regclass_graph_Model(py::module m) { Return -1 if parameter not matched. - :param parameter: Parameter which index is to be found. + :param parameter: Parameter, which index is to be found. :type parameter: op.Parameter :return: Index for parameter :rtype: int diff --git a/src/bindings/python/src/pyopenvino/graph/node_input.cpp b/src/bindings/python/src/pyopenvino/graph/node_input.cpp index 01bd812721a..6a91e977552 100644 --- a/src/bindings/python/src/pyopenvino/graph/node_input.cpp +++ b/src/bindings/python/src/pyopenvino/graph/node_input.cpp @@ -24,73 +24,76 @@ void regclass_graph_Input(py::module m) { R"( Get node referenced by this input handle. - Returns - ---------- - get_node : Node - Node object referenced by this input handle. + :return: Node object referenced by this input handle. + :rtype: openvino.runtime.Node )"); input.def("get_index", &ov::Input::get_index, R"( The index of the input referred to by this input handle. - Returns - ---------- - get_index : int - Index value as integer. + :return: Index value as integer. + :rtype: int )"); input.def("get_element_type", &ov::Input::get_element_type, R"( The element type of the input referred to by this input handle. - Returns - ---------- - get_element_type : Type - Type of the input. + :return: Type of the input. + :rtype: openvino.runtime.Type )"); input.def("get_shape", &ov::Input::get_shape, R"( The shape of the input referred to by this input handle. - Returns - ---------- - get_shape : Shape - Shape of the input. + :return: Shape of the input. + :rtype: openvino.runtime.Shape )"); input.def("get_partial_shape", &ov::Input::get_partial_shape, R"( The partial shape of the input referred to by this input handle. - Returns - ---------- - get_partial_shape : PartialShape - PartialShape of the input. + :return: PartialShape of the input. + :rtype: openvino.runtime.PartialShape )"); input.def("get_source_output", &ov::Input::get_source_output, R"( A handle to the output that is connected to this input. - Returns - ---------- - get_source_output : Output - Output that is connected to the input. + :return: Output that is connected to the input. + :rtype: openvino.runtime.Output )"); + input.def("get_tensor", + &ov::Input::get_tensor, + py::return_value_policy::reference_internal, + R"( + A reference to the tensor descriptor for this input. + :return: Tensor of the input. + :rtype: openvino.pyopenvino.DescriptorTensor + )"); input.def("get_rt_info", (ov::RTMap & (ov::Input::*)()) & ov::Input::get_rt_info, py::return_value_policy::reference_internal, R"( Returns RTMap which is a dictionary of user defined runtime info. - Returns - ---------- - get_rt_info : RTMap - A dictionary of user defined data. + :return: A dictionary of user defined data. + :rtype: openvino.runtime.RTMap )"); + input.def("replace_source_output", + &ov::Input::replace_source_output, + py::arg("new_source_output"), + R"( + Replaces the source output of this input. + + :param new_source_output: A handle for the output that will replace this input's source. + :type new_source_output: openvino.runtime.Input + )"); input.def_property_readonly("rt_info", (ov::RTMap & (ov::Input::*)()) & ov::Input::get_rt_info); input.def_property_readonly("rt_info", (const ov::RTMap& (ov::Input::*)() const) & ov::Input::get_rt_info, diff --git a/src/bindings/python/src/pyopenvino/graph/node_output.cpp b/src/bindings/python/src/pyopenvino/graph/node_output.cpp index bb3ad14f1f1..6ad434f56e3 100644 --- a/src/bindings/python/src/pyopenvino/graph/node_output.cpp +++ b/src/bindings/python/src/pyopenvino/graph/node_output.cpp @@ -13,3 +13,29 @@ namespace py = pybind11; template void regclass_graph_Output(py::module m, std::string typestring); template void regclass_graph_Output(py::module m, std::string typestring); + +template +void def_type_dependent_functions(py::class_, std::shared_ptr>>& output) {} + +template <> +void def_type_dependent_functions( + py::class_, std::shared_ptr>>& output) { + output.def("remove_target_input", + &ov::Output::remove_target_input, + py::arg("target_input"), + R"( + Removes a target input from the output referenced by this output handle. + + :param target_input: The target input to remove. + :type target_input: openvino.runtime.Output + )"); + output.def("replace", + &ov::Output::replace, + py::arg("replacement"), + R"( + Replace all users of this value with replacement. + + :param replacement: The node that is a replacement. + :type replacement: openvino.runtime.Output + )"); +} diff --git a/src/bindings/python/src/pyopenvino/graph/node_output.hpp b/src/bindings/python/src/pyopenvino/graph/node_output.hpp index 30305555040..37eb80ea17b 100644 --- a/src/bindings/python/src/pyopenvino/graph/node_output.hpp +++ b/src/bindings/python/src/pyopenvino/graph/node_output.hpp @@ -11,12 +11,23 @@ #include "openvino/core/node_output.hpp" #include "pyopenvino/core/common.hpp" +#include + namespace py = pybind11; using PyRTMap = ov::Node::RTMap; PYBIND11_MAKE_OPAQUE(PyRTMap); +// this function is overloaded in the corresponding cpp file with T=ov::Node +// it exposes additional functions with T = ov::Node, which are undefined with T = const ov::Node +template +void def_type_dependent_functions(py::class_, std::shared_ptr>>& output); + +template<> +void def_type_dependent_functions(py::class_, + std::shared_ptr>>& output); + template void regclass_graph_Output(py::module m, std::string typestring) { @@ -101,7 +112,7 @@ void regclass_graph_Output(py::module m, std::string typestring) output.def("get_target_inputs", &ov::Output::get_target_inputs, R"( - A set containing handles for all inputs targeted by the output + A set containing handles for all inputs, targeted by the output, referenced by this output handle. :return: Set of Inputs. @@ -117,7 +128,7 @@ void regclass_graph_Output(py::module m, std::string typestring) A reference to the tensor descriptor for this output. :return: Tensor of the output. - :rtype: openvino.pyopenvino.DescriptorTensor + :rtype: openvino.pyopenvino.DescriptorTensor )"); output.def("get_rt_info", (ov::RTMap & (ov::Output::*)()) & ov::Output::get_rt_info, @@ -128,7 +139,6 @@ void regclass_graph_Output(py::module m, std::string typestring) :return: A dictionary of user defined data. :rtype: openvino.runtime.RTMap )"); - output.def("__repr__", [typestring](const ov::Output& self) { std::stringstream shape_type_ss; @@ -155,4 +165,7 @@ void regclass_graph_Output(py::module m, std::string typestring) (const ov::RTMap&(ov::Output::*)() const) & ov::Output::get_rt_info, py::return_value_policy::reference_internal); + + // define functions avaliable only for specific type + def_type_dependent_functions(output); } diff --git a/src/bindings/python/src/pyopenvino/graph/partial_shape.cpp b/src/bindings/python/src/pyopenvino/graph/partial_shape.cpp index 1f6a1c2499c..dfc5595f7fe 100644 --- a/src/bindings/python/src/pyopenvino/graph/partial_shape.cpp +++ b/src/bindings/python/src/pyopenvino/graph/partial_shape.cpp @@ -13,6 +13,7 @@ #include "openvino/core/dimension.hpp" // ov::Dimension #include "openvino/core/shape.hpp" // ov::Shape +#include "pyopenvino/core/common.hpp" #include "pyopenvino/graph/partial_shape.hpp" namespace py = pybind11; @@ -23,15 +24,17 @@ void regclass_graph_PartialShape(py::module m) { py::class_> shape(m, "PartialShape"); shape.doc() = "openvino.runtime.PartialShape wraps ov::PartialShape"; - shape.def(py::init([](const std::vector& dimensions) { - return ov::PartialShape(std::vector(dimensions.begin(), dimensions.end())); - })); - shape.def(py::init&>()); - shape.def(py::init&>()); - shape.def(py::init&>()); - shape.def(py::init&>()); shape.def(py::init()); shape.def(py::init()); + shape.def(py::init([](py::list& shape) { + return Common::partial_shape_from_list(shape); + })); + shape.def(py::init([](py::tuple& shape) { + return Common::partial_shape_from_list(shape.cast()); + })); + shape.def(py::init([](const std::string& shape) { + return Common::partial_shape_from_str(shape); + })); shape.def_static("dynamic", &ov::PartialShape::dynamic, py::arg("rank") = ov::Dimension()); diff --git a/src/bindings/python/src/pyopenvino/graph/passes/manager.cpp b/src/bindings/python/src/pyopenvino/graph/passes/manager.cpp index 4b9d86590fa..34211baeca7 100644 --- a/src/bindings/python/src/pyopenvino/graph/passes/manager.cpp +++ b/src/bindings/python/src/pyopenvino/graph/passes/manager.cpp @@ -76,10 +76,9 @@ void regclass_graph_passes_Manager(py::module m) { py::arg("pass_name"), R"( Set the type of register pass for pass manager. - Parameters - ---------- - pass_name : str - string to set the type of a pass + + :param pass_name : String to set the type of a pass. + :type pass_name: str // )"); manager.def("register_pass", @@ -90,18 +89,17 @@ void regclass_graph_passes_Manager(py::module m) { py::arg("version") = "UNSPECIFIED", R"( Set the type of register pass for pass manager. - Parameters - ---------- - pass_name : str - string to set the type of a pass - output_files : Tuple[str, str] - tuple which contains paths where .xml and .bin files will be saved - version : str - sets the version of the IR which will be generated. - Supported versions are: - - "UNSPECIFIED" (default) : Use the latest or function version - - "IR_V10" : v10 IR - - "IR_V11" : v11 IR + + :param pass_name: String to set the type of a pass. + :type pass_name: str + :param output_files: Tuple which contains paths where .xml and .bin files will be saved. + :type output_files: Tuple[str, str] + :param version: Sets the version of the IR which will be generated. + Supported versions are: + - "UNSPECIFIED" (default) : Use the latest or function version + - "IR_V10" : v10 IR + - "IR_V11" : v11 IR + :type version: str Examples ---------- 1. Default Version @@ -121,20 +119,19 @@ void regclass_graph_passes_Manager(py::module m) { py::arg("version") = "UNSPECIFIED", R"( Set the type of register pass for pass manager. - Parameters - ---------- - pass_name : str - string to set the type of a pass - xml_path : str - path where .xml file will be saved - bin_path : str - path where .bin file will be saved - version : str - sets the version of the IR which will be generated. + + :param pass_name: String to set the type of a pass. + :type pass_name: str + :param xml_path: Path where *.xml file will be saved. + :type xml_path: str + :param bin_path: Path where *.bin file will be saved. + :type bin_path: str + :param version: Sets the version of the IR which will be generated. Supported versions are: - "UNSPECIFIED" (default) : Use the latest or function version - "IR_V10" : v10 IR - "IR_V11" : v11 IR + :type version: str Examples ---------- 1. Default Version diff --git a/src/bindings/python/src/pyopenvino/graph/preprocess/pre_post_process.cpp b/src/bindings/python/src/pyopenvino/graph/preprocess/pre_post_process.cpp index 56ef90f5db8..11af911b0dc 100644 --- a/src/bindings/python/src/pyopenvino/graph/preprocess/pre_post_process.cpp +++ b/src/bindings/python/src/pyopenvino/graph/preprocess/pre_post_process.cpp @@ -33,15 +33,10 @@ static void regclass_graph_PreProcessSteps(py::module m) { Subtracts single float value from each element in input tensor. Input tensor must have ov.Type.f32 data type. - Parameters - ---------- - value : float - Value to subtract. - - Returns - ---------- - selfan : PreProcessSteps - Reference to itself to allow chaining of calls in client's code in a builder-like manner. + :param value: Value to subtract. + :type value: float + :return: Reference to itself to allow chaining of calls in client's code in a builder-like manner. + :rtype: openvino.runtime.preprocess.PreProcessSteps )"); steps.def( @@ -54,15 +49,10 @@ static void regclass_graph_PreProcessSteps(py::module m) { Subtracts a given single float value from each element in a given channel from input tensor. Input tensor must have ov.Type.f32 data type. - Parameters - ---------- - values : List[float] - Values to subtract. - - Returns - ---------- - selfan : PreProcessSteps - Reference to itself to allow chaining of calls in client's code in a builder-like manner. + :param values: Values to subtract. + :type values: List[float] + :return: Reference to itself to allow chaining of calls in client's code in a builder-like manner. + :rtype: openvino.runtime.preprocess.PreProcessSteps )"); steps.def( @@ -75,15 +65,10 @@ static void regclass_graph_PreProcessSteps(py::module m) { Divides each element in input tensor by specified constant float value. Input tensor must have ov.Type.f32 data type. - Parameters - ---------- - value : float - Value to divide. - - Returns - ---------- - scale : PreProcessSteps - Reference to itself to allow chaining of calls in client's code in a builder-like manner. + :param value: Value used in division. + :type value: float + :return: Reference to itself to allow chaining of calls in client's code in a builder-like manner. + :rtype: openvino.runtime.preprocess.PreProcessSteps )"); steps.def( @@ -96,36 +81,26 @@ static void regclass_graph_PreProcessSteps(py::module m) { Divides each element in a given channel from input tensor by a given single float value. Input tensor must have ov.Type.f32 data type. - Parameters - ---------- - value : List[float] - Value to divide. - - Returns - ---------- - scale : PreProcessSteps - Reference to itself to allow chaining of calls in client's code in a builder-like manner. + :param values: Values which are used in division. + :type values: List[float] + :return: Reference to itself to allow chaining of calls in client's code in a builder-like manner. + :rtype: openvino.runtime.preprocess.PreProcessSteps )"); steps.def( "convert_element_type", - [](ov::preprocess::PreProcessSteps& self, ov::element::Type type) { + [](ov::preprocess::PreProcessSteps& self, ov::element::Type type = {}) { return &self.convert_element_type(type); }, - py::arg("type"), + py::arg_v("type", ov::element::undefined, "openvino.runtime.Type.undefined"), R"( Converts input tensor element type to specified type. - Input tensor must have openvino.Type.f32 data type. + Input tensor must have openvino.Type data type. - Parameters - ---------- - type : openvino.runtime.Type - Destination type. - - Returns - ---------- - convert_element_type : PreProcessSteps - Reference to itself to allow chaining of calls in client's code in a builder-like manner. + :param type: Destination type. If not specified, type will be taken from model input's element type + :type type: openvino.runtime.Type + :return: Reference to itself to allow chaining of calls in client's code in a builder-like manner. + :rtype: openvino.runtime.preprocess.PreProcessSteps )"); steps.def( @@ -137,14 +112,10 @@ static void regclass_graph_PreProcessSteps(py::module m) { R"( Adds custom preprocessing operation. - Parameters - ---------- - operation : function taking Output as input argument and returning Output after preprocessing. - - Returns - ---------- - custom : PreProcessSteps - Reference to itself to allow chaining of calls in client's code in a builder-like manner. + :param operation: Python's function which takes `openvino.runtime.Output` as input argument and returns`openvino.runtime.Output`. + :type operation: function + :return: Reference to itself, allows chaining of calls in client's code in a builder-like manner. + :rtype: openvino.runtime.preprocess.PreProcessSteps )"); steps.def( @@ -173,6 +144,14 @@ static void regclass_graph_PreProcessSteps(py::module m) { }, py::arg("alg")); + steps.def( + "crop", + [](ov::preprocess::PreProcessSteps& self, const std::vector& begin, const std::vector& end) { + return &self.crop(begin, end); + }, + py::arg("begin"), + py::arg("end")); + steps.def( "convert_layout", [](ov::preprocess::PreProcessSteps& self, const ov::Layout& layout = {}) { @@ -200,23 +179,18 @@ static void regclass_graph_PostProcessSteps(py::module m) { steps.def( "convert_element_type", - [](ov::preprocess::PostProcessSteps& self, ov::element::Type type) { + [](ov::preprocess::PostProcessSteps& self, ov::element::Type type = {}) { return &self.convert_element_type(type); }, - py::arg("type"), + py::arg_v("type", ov::element::undefined, "openvino.runtime.Type.undefined"), R"( Converts tensor element type to specified type. - Tensor must have openvino.Type.f32 data type. + Tensor must have openvino.Type data type. - Parameters - ---------- - type : Type - Destination type. - - Returns - ---------- - convert_element_type : PostProcessSteps - Reference to itself to allow chaining of calls in client's code in a builder-like manner. + :param type: Destination type. If not specified, type will be taken from model output's element type. + :type type: openvino.runtime.Type + :return: Reference to itself to allow chaining of calls in client's code in a builder-like manner. + :rtype: openvino.runtime.preprocess.PostProcessSteps )"); steps.def( @@ -242,14 +216,10 @@ static void regclass_graph_PostProcessSteps(py::module m) { R"( Adds custom postprocessing operation. - Parameters - ---------- - operation : function taking Output as input argument and returning Output after postprocessing. - - Returns - ---------- - custom : PostProcessSteps - Reference to itself to allow chaining of calls in client's code in a builder-like manner. + :param operation: Python's function which takes `openvino.runtime.Output` as input argument and returns`openvino.runtime.Output`. + :type operation: function + :return: Reference to itself, allows chaining of calls in client's code in a builder-like manner. + :rtype: openvino.runtime.preprocess.PreProcessSteps )"); } @@ -269,15 +239,10 @@ static void regclass_graph_InputTensorInfo(py::module m) { Set initial client's tensor element type. If type is not the same as model's element type, conversion of element type will be done automatically. - Parameters - ---------- - type : Type - Client's input tensor element type. - - Returns - ---------- - tensor : InputTensorInfo - Reference to itself to allow chaining of calls in client's code in a builder-like manner. + :param type: Client's input tensor element type. + :type type: openvino.runtime.Type + :return: Reference to itself, allows chaining of calls in client's code in a builder-like manner. + :rtype: openvino.runtime.preprocess.InputTensorInfo )"); info.def( @@ -322,7 +287,7 @@ static void regclass_graph_InputTensorInfo(py::module m) { return &self.set_color_format(format, sub_names); }, py::arg("format"), - py::arg("sub_names")); + py::arg("sub_names") = std::vector{}); info.def( "set_memory_type", @@ -330,6 +295,21 @@ static void regclass_graph_InputTensorInfo(py::module m) { return &self.set_memory_type(memory_type); }, py::arg("memory_type")); + + info.def( + "set_from", + [](ov::preprocess::InputTensorInfo& self, const ov::Tensor& tensor) { + return &self.set_from(tensor); + }, + py::arg("runtime_tensor")); + + info.def( + "set_from", + [](ov::preprocess::InputTensorInfo& self, py::array& numpy_array) { + // Convert to contiguous array if not already C-style. + return &self.set_from(Common::tensor_from_numpy(numpy_array, false)); + }, + py::arg("runtime_tensor")); } static void regclass_graph_OutputTensorInfo(py::module m) { @@ -347,14 +327,11 @@ static void regclass_graph_OutputTensorInfo(py::module m) { R"( Set client's output tensor element type. If type is not the same as model's element type, conversion of element type will be done automatically. - Parameters - ---------- - type : Type - Client's output tensor element type. - Returns - ---------- - tensor : OutputTensorInfo - Reference to itself to allow chaining of calls in client's code in a builder-like manner. + + :param type: Client's output tensor element type. + :type type: openvino.runtime.Type + :return: Reference to itself to allow chaining of calls in client's code in a builder-like manner. + :rtype: openvino.runtime.preprocess.OutputTensorInfo )"); info.def( diff --git a/src/bindings/python/src/pyopenvino/graph/shape.cpp b/src/bindings/python/src/pyopenvino/graph/shape.cpp index 975aad6f84c..1f306ac2649 100644 --- a/src/bindings/python/src/pyopenvino/graph/shape.cpp +++ b/src/bindings/python/src/pyopenvino/graph/shape.cpp @@ -19,6 +19,7 @@ namespace py = pybind11; void regclass_graph_Shape(py::module m) { py::class_> shape(m, "Shape"); shape.doc() = "openvino.runtime.Shape wraps ov::Shape"; + shape.def(py::init<>()); shape.def(py::init&>(), py::arg("axis_lengths")); shape.def(py::init&>(), py::arg("axis_lengths")); shape.def(py::init(), py::arg("axis_lengths")); diff --git a/src/bindings/python/src/pyopenvino/graph/types/element_type.cpp b/src/bindings/python/src/pyopenvino/graph/types/element_type.cpp index 204d125a330..b41d5c000d5 100644 --- a/src/bindings/python/src/pyopenvino/graph/types/element_type.cpp +++ b/src/bindings/python/src/pyopenvino/graph/types/element_type.cpp @@ -29,10 +29,13 @@ void regclass_graph_Type(py::module m) { type.attr("u32") = ov::element::u32; type.attr("u64") = ov::element::u64; type.attr("bf16") = ov::element::bf16; + type.attr("undefined") = ov::element::undefined; type.def("__repr__", [](const ov::element::Type& self) { std::string bitwidth = std::to_string(self.bitwidth()); - if (self.is_signed()) { + if (self == ov::element::undefined) { + return ""; + } else if (self.is_signed()) { return ""; } return ""; diff --git a/src/bindings/python/src/pyopenvino/graph/util.cpp b/src/bindings/python/src/pyopenvino/graph/util.cpp index 5bfdf7f6a16..b35bc1c5cbd 100644 --- a/src/bindings/python/src/pyopenvino/graph/util.cpp +++ b/src/bindings/python/src/pyopenvino/graph/util.cpp @@ -16,7 +16,7 @@ void* numpy_to_c(py::array a) { } void regmodule_graph_util(py::module m) { - py::module mod = m.def_submodule("util", "openvino.runtime.util"); + py::module mod = m.def_submodule("util", "openvino.runtime.utils"); mod.def("numpy_to_c", &numpy_to_c); mod.def("get_constant_from_source", &ov::get_constant_from_source, @@ -27,7 +27,7 @@ void regmodule_graph_util(py::module m) { :param index: Output node. :type index: openvino.runtime.Output :return: If it succeeded to calculate both bounds and - they are the same returns Constant operation + they are the same, returns Constant operation from the resulting bound, otherwise Null. :rtype: openvino.runtime.op.Constant or openvino.runtime.Node )"); diff --git a/src/bindings/python/src/pyopenvino/graph/util.py b/src/bindings/python/src/pyopenvino/graph/util.py deleted file mode 100644 index ecc73a2a6d1..00000000000 --- a/src/bindings/python/src/pyopenvino/graph/util.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (C) 2018-2022 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -# flake8: noqa - -from openvino.pyopenvino import util - -numpy_to_c = util.numpy_to_c diff --git a/src/bindings/python/src/pyopenvino/pyopenvino.cpp b/src/bindings/python/src/pyopenvino/pyopenvino.cpp index 012a5bf6fc5..53ac0003301 100644 --- a/src/bindings/python/src/pyopenvino/pyopenvino.cpp +++ b/src/bindings/python/src/pyopenvino/pyopenvino.cpp @@ -69,6 +69,21 @@ std::string get_version() { PYBIND11_MODULE(pyopenvino, m) { m.doc() = "Package openvino.pyopenvino which wraps openvino C++ APIs"; + std::string pyopenvino_version = CI_BUILD_NUMBER; + std::string runtime_version = get_version(); + bool is_custom_pyopenvino_version = pyopenvino_version.empty() || pyopenvino_version.find("custom_") == 0; + bool is_custom_runtime_version = runtime_version.empty() || runtime_version.find("custom_") == 0; + auto versions_compatible = + is_custom_pyopenvino_version || is_custom_runtime_version || pyopenvino_version == runtime_version; + OPENVINO_ASSERT(versions_compatible, + "OpenVINO Python version (", + pyopenvino_version, + ") mismatches with OpenVINO Runtime library version (", + runtime_version, + "). It can happen if you have 2 or more different versions of OpenVINO installed in system. " + "Please ensure that environment variables (e.g. PATH, PYTHONPATH) are set correctly so that " + "OpenVINO Runtime and Python libraries point to same release."); + m.def("get_version", &get_version); m.def("get_batch", &ov::get_batch); m.def("set_batch", &ov::set_batch); diff --git a/src/bindings/python/tests/test_frontend/test_frontend_onnx.py b/src/bindings/python/tests/test_frontend/test_frontend_onnx.py index 74914ee2aef..5026b9e2423 100644 --- a/src/bindings/python/tests/test_frontend/test_frontend_onnx.py +++ b/src/bindings/python/tests/test_frontend/test_frontend_onnx.py @@ -6,6 +6,8 @@ import onnx import numpy as np from onnx.helper import make_graph, make_model, make_tensor_value_info import pytest +from pathlib import Path +from itertools import chain from openvino.frontend import FrontEndManager from tests.runtime import get_runtime @@ -26,7 +28,19 @@ def create_onnx_model(): ] output_tensors = [make_tensor_value_info("out", onnx.TensorProto.FLOAT, (2, 2))] graph = make_graph([add, const_node, mul], "graph", input_tensors, output_tensors) - return make_model(graph, producer_name="ngraph ONNX Importer") + return make_model(graph, producer_name="ONNX Frontend") + + +def create_onnx_model_2(): + relu = onnx.helper.make_node("Relu", inputs=["in"], outputs=["out"]) + input_tensors = [ + make_tensor_value_info("in", onnx.TensorProto.FLOAT, (1, 2)), + ] + output_tensors = [ + make_tensor_value_info("out", onnx.TensorProto.FLOAT, (1, 2)), + ] + graph = make_graph([relu], "test_graph", input_tensors, output_tensors) + return make_model(graph, producer_name="ONNX Frontend") def create_onnx_model_with_subgraphs(): @@ -52,7 +66,7 @@ def create_onnx_model_with_subgraphs(): res = onnx.helper.make_tensor_value_info("res", onnx.TensorProto.FLOAT, [3]) graph = make_graph([if_node], "graph", [cond, A, B], [res]) - return make_model(graph, producer_name="ngraph ONNX Importer") + return make_model(graph, producer_name="ONNX Frontend") def create_onnx_model_with_custom_attributes(): @@ -88,7 +102,7 @@ def create_onnx_model_with_custom_attributes(): ] output_tensors = [make_tensor_value_info("out", onnx.TensorProto.FLOAT, (2, 2))] graph = make_graph([add, const_node, mul], "graph", input_tensors, output_tensors) - return make_model(graph, producer_name="ngraph ONNX Importer") + return make_model(graph, producer_name="ONNX Frontend") def create_onnx_model_for_op_extension(): @@ -124,7 +138,7 @@ def create_onnx_model_for_op_extension(): output_tensors = [make_tensor_value_info("out", onnx.TensorProto.FLOAT, (3, 3, 32, 32))] graph = make_graph([const_node, elu, avg_pool, floor, concat, mul, cast], "graph", input_tensors, output_tensors) - return make_model(graph, producer_name="ngraph ONNX Importer") + return make_model(graph, producer_name="ONNX Frontend") def run_function(function, *inputs, expected): @@ -140,6 +154,7 @@ def run_function(function, *inputs, expected): # This is because destroy of FrontEndManager will unload all plugins, no objects shall exist after this fem = FrontEndManager() onnx_model_filename = "model.onnx" +onnx_model_2_filename = "model2.onnx" onnx_model_with_custom_attributes_filename = "model_custom_attributes.onnx" onnx_model_with_subgraphs_filename = "model_subgraphs.onnx" onnx_model_for_op_extension_test = "model_op_extension.onnx" @@ -148,6 +163,7 @@ ONNX_FRONTEND_NAME = "onnx" def setup_module(): onnx.save_model(create_onnx_model(), onnx_model_filename) + onnx.save_model(create_onnx_model_2(), onnx_model_2_filename) onnx.save_model(create_onnx_model_with_custom_attributes(), onnx_model_with_custom_attributes_filename) onnx.save_model(create_onnx_model_with_subgraphs(), onnx_model_with_subgraphs_filename) @@ -156,6 +172,7 @@ def setup_module(): def teardown_module(): os.remove(onnx_model_filename) + os.remove(onnx_model_2_filename) os.remove(onnx_model_with_custom_attributes_filename) os.remove(onnx_model_with_subgraphs_filename) os.remove(onnx_model_for_op_extension_test) @@ -593,3 +610,44 @@ def test_op_extension_via_frontend_extension_map_attributes(): model = ie.read_model(onnx_model_for_op_extension_test) assert model + + +def get_builtin_extensions_path(): + win_folder_path = Path(__file__).parent.parent.parent.parent + linux_folder_path = win_folder_path.joinpath("lib") + for lib_path in chain(win_folder_path.glob("*.dll"), linux_folder_path.glob("*.so")): + if "libtest_builtin_extensions_1" in lib_path.name: + return str(lib_path) + return "" + + +@pytest.mark.skipif(len(get_builtin_extensions_path()) == 0, + reason="The extension library path was not found") +def test_so_extension_via_frontend_convert_input_model(): + skip_if_onnx_frontend_is_disabled() + + def load_model(): + fe = fem.load_by_framework(framework=ONNX_FRONTEND_NAME) + fe.add_extension(get_builtin_extensions_path()) + in_model = fe.load(onnx_model_2_filename) + return fe.convert(in_model) + + model = load_model() # model has longer lifetime than frontend + + assert any(op.get_type_name() == "Swish" for op in model.get_ops()) + assert all(op.get_type_name() != "Relu" for op in model.get_ops()) + + +@pytest.mark.skipif(len(get_builtin_extensions_path()) == 0, + reason="The extension library path was not found") +def test_so_extension_via_frontend_decode_input_model(): + skip_if_onnx_frontend_is_disabled() + + def load_decoded_model(): + fe = fem.load_by_framework(framework=ONNX_FRONTEND_NAME) + fe.add_extension(get_builtin_extensions_path()) + in_model = fe.load(onnx_model_2_filename) + return fe.decode(in_model) + + decoded_model = load_decoded_model() # decoded model has longer lifetime than frontend + assert decoded_model diff --git a/src/bindings/python/tests/test_frontend/test_frontendmanager.py b/src/bindings/python/tests/test_frontend/test_frontendmanager.py index 518e29def54..1c4bf3dca3f 100644 --- a/src/bindings/python/tests/test_frontend/test_frontendmanager.py +++ b/src/bindings/python/tests/test_frontend/test_frontendmanager.py @@ -92,7 +92,7 @@ def test_convert_partially(): func = fe.convert_partially(model=model) stat = get_fe_stat() assert stat.convert_partially == 1 - fe.convert(function=func) + fe.convert(model=func) stat = get_fe_stat() assert stat.convert == 1 @@ -106,7 +106,7 @@ def test_decode_and_normalize(): func = fe.decode(model=model) stat = get_fe_stat() assert stat.decode == 1 - fe.normalize(function=func) + fe.normalize(model=func) stat = get_fe_stat() assert stat.normalize == 1 assert stat.decode == 1 diff --git a/src/bindings/python/tests/test_inference_engine/test_compiled_model.py b/src/bindings/python/tests/test_inference_engine/test_compiled_model.py index 60b5f44e8fc..e0bb872bf4d 100644 --- a/src/bindings/python/tests/test_inference_engine/test_compiled_model.py +++ b/src/bindings/python/tests/test_inference_engine/test_compiled_model.py @@ -29,8 +29,8 @@ def test_get_property(device): pytest.skip("Can't run on ARM plugin due-to CPU dependent test") func = core.read_model(model=test_net_xml, weights=test_net_bin) exec_net = core.compile_model(func, device) - config = exec_net.get_property("PERF_COUNT") - assert config == "NO" + profiling_enabled = exec_net.get_property("PERF_COUNT") + assert not profiling_enabled def test_get_runtime_model(device): diff --git a/src/bindings/python/tests/test_inference_engine/test_core.py b/src/bindings/python/tests/test_inference_engine/test_core.py index 0c23897a06b..6340e7c30c7 100644 --- a/src/bindings/python/tests/test_inference_engine/test_core.py +++ b/src/bindings/python/tests/test_inference_engine/test_core.py @@ -315,10 +315,8 @@ def test_add_extension_template_extension(device): new_shapes = {"in_data": after_reshape} assert model.input().partial_shape == before_reshape model.reshape(new_shapes) - assert model.input().partial_shape == after_reshape - - # CVS-74584 - del model + compiled = core.compile_model(model, device) + assert compiled.input().partial_shape == after_reshape def test_add_extension(): diff --git a/src/bindings/python/tests/test_inference_engine/test_function.py b/src/bindings/python/tests/test_inference_engine/test_function.py index adaa2da2302..9863474ad6b 100644 --- a/src/bindings/python/tests/test_inference_engine/test_function.py +++ b/src/bindings/python/tests/test_inference_engine/test_function.py @@ -361,3 +361,76 @@ def test_reshape(device): core = Core() compiled = core.compile_model(model, device) assert compiled.input().partial_shape == ref_shape + + +def test_reshape_with_python_types(device): + model = create_test_model() + + def check_shape(new_shape): + for input in model.inputs: + assert input.partial_shape == new_shape + + shape1 = [1, 4] + new_shapes = {input: shape1 for input in model.inputs} + model.reshape(new_shapes) + check_shape(PartialShape(shape1)) + + shape2 = [1, 6] + new_shapes = {input.any_name: shape2 for input in model.inputs} + model.reshape(new_shapes) + check_shape(PartialShape(shape2)) + + shape3 = [1, 8] + new_shapes = {i: shape3 for i, input in enumerate(model.inputs)} + model.reshape(new_shapes) + check_shape(PartialShape(shape3)) + + shape4 = [1, -1] + new_shapes = {input: shape4 for input in model.inputs} + model.reshape(new_shapes) + check_shape(PartialShape([Dimension(1), Dimension(-1)])) + + shape5 = [1, (1, 10)] + new_shapes = {input: shape5 for input in model.inputs} + model.reshape(new_shapes) + check_shape(PartialShape([Dimension(1), Dimension(1, 10)])) + + shape6 = [Dimension(3), Dimension(3, 10)] + new_shapes = {input: shape6 for input in model.inputs} + model.reshape(new_shapes) + check_shape(PartialShape(shape6)) + + shape7 = "1..10, ?" + new_shapes = {input: shape7 for input in model.inputs} + model.reshape(new_shapes) + check_shape(PartialShape(shape7)) + + # reshape mixed keys + shape8 = [(1, 20), -1] + new_shapes = {"data1": shape8, 1: shape8} + model.reshape(new_shapes) + check_shape(PartialShape([Dimension(1, 20), Dimension(-1)])) + + # reshape with one input + param = ops.parameter([1, 3, 28, 28]) + model = Model(ops.relu(param), [param]) + + shape9 = [-1, 3, (28, 56), (28, 56)] + model.reshape(shape9) + check_shape(PartialShape([Dimension(-1), Dimension(3), Dimension(28, 56), Dimension(28, 56)])) + + shape10 = "?,3,..224,..224" + model.reshape(shape10) + check_shape(PartialShape([Dimension(-1), Dimension(3), Dimension(-1, 224), Dimension(-1, 224)])) + + # check exceptions + shape10 = [1, 1, 1, 1] + with pytest.raises(TypeError) as e: + model.reshape({model.input().node: shape10}) + assert "Incorrect key type to reshape a model, " \ + "expected keys as openvino.runtime.Output, int or str." in str(e.value) + + with pytest.raises(TypeError) as e: + model.reshape({0: range(1, 9)}) + assert "Incorrect value type to reshape a model, " \ + "expected values as openvino.runtime.PartialShape, str, list or tuple." in str(e.value) diff --git a/src/bindings/python/tests/test_inference_engine/test_infer_request.py b/src/bindings/python/tests/test_inference_engine/test_infer_request.py index abe51c294b6..4119637c04e 100644 --- a/src/bindings/python/tests/test_inference_engine/test_infer_request.py +++ b/src/bindings/python/tests/test_inference_engine/test_infer_request.py @@ -10,7 +10,7 @@ import time import openvino.runtime.opset8 as ops from openvino.runtime import Core, AsyncInferQueue, Tensor, ProfilingInfo, Model -from openvino.runtime import Type, Shape, Layout +from openvino.runtime import Type, PartialShape, Shape, Layout from openvino.preprocess import PrePostProcessor from ..conftest import model_path, read_image @@ -656,3 +656,20 @@ def test_invalid_inputs_container(device): with pytest.raises(TypeError) as e: request.infer(inputs) assert "Inputs should be either list or dict! Current type:" in str(e.value) + + +def test_infer_dynamic_model(device): + core = Core() + param = ops.parameter(PartialShape([-1, -1])) + model = Model(ops.relu(param), [param]) + compiled = core.compile_model(model, device) + assert compiled.input().partial_shape.is_dynamic + request = compiled.create_infer_request() + + shape1 = [1, 28] + request.infer([np.random.normal(size=shape1)]) + assert request.get_input_tensor().shape == Shape(shape1) + + shape2 = [1, 32] + request.infer([np.random.normal(size=shape2)]) + assert request.get_input_tensor().shape == Shape(shape2) diff --git a/src/bindings/python/tests/test_inference_engine/test_input_node.py b/src/bindings/python/tests/test_inference_engine/test_input_node.py index 59550de87fc..87b87f36983 100644 --- a/src/bindings/python/tests/test_inference_engine/test_input_node.py +++ b/src/bindings/python/tests/test_inference_engine/test_input_node.py @@ -6,6 +6,8 @@ import os from ..conftest import model_path from openvino.runtime import Input, Shape, PartialShape, Type, Parameter, \ RTMap +from openvino.pyopenvino import DescriptorTensor +import openvino.runtime.opset8 as ops from openvino.runtime import Core @@ -90,6 +92,16 @@ def test_input_get_source_output(device): assert name == "fc_out" +def test_input_get_tensor(device): + core = Core() + func = core.read_model(model=test_net_xml, weights=test_net_bin) + exec_net = core.compile_model(func, device) + input = exec_net.output(0) + input_node = input.get_node().inputs()[0] + tensor = input_node.get_tensor() + assert isinstance(tensor, DescriptorTensor) + + def test_input_get_rt_info(device): core = Core() func = core.read_model(model=test_net_xml, weights=test_net_bin) @@ -110,6 +122,20 @@ def test_input_rt_info(device): assert isinstance(rt_info, RTMap) +def test_input_replace_source_output(device): + param = ops.parameter([1, 64], Type.i64) + param.output(0).get_tensor().set_names({"a", "b"}) + + param1 = ops.parameter([1, 64], Type.i64) + param1.output(0).get_tensor().set_names({"c", "d"}) + + relu = ops.relu(param) + relu.input(0).replace_source_output(param1.output(0)) + + assert param.output(0).get_tensor().get_names() == {"a", "b"} + assert param1.output(0).get_tensor().get_names() == {"c", "d"} + + def test_input_update_rt_info(device): core = Core() func = core.read_model(model=test_net_xml, weights=test_net_bin) diff --git a/src/bindings/python/tests/test_inference_engine/test_output_node.py b/src/bindings/python/tests/test_inference_engine/test_output_node.py new file mode 100644 index 00000000000..0d734f5cc27 --- /dev/null +++ b/src/bindings/python/tests/test_inference_engine/test_output_node.py @@ -0,0 +1,36 @@ +# Copyright (C) 2018-2022 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +import os + +from ..conftest import model_path +import openvino.runtime.opset8 as ops +from openvino.runtime import Type + +is_myriad = os.environ.get("TEST_DEVICE") == "MYRIAD" +test_net_xml, test_net_bin = model_path(is_myriad) + + +def model_path(is_myriad=False): + path_to_repo = os.environ["MODELS_PATH"] + if not is_myriad: + test_xml = os.path.join(path_to_repo, "models", "test_model", "test_model_fp32.xml") + test_bin = os.path.join(path_to_repo, "models", "test_model", "test_model_fp32.bin") + else: + test_xml = os.path.join(path_to_repo, "models", "test_model", "test_model_fp16.xml") + test_bin = os.path.join(path_to_repo, "models", "test_model", "test_model_fp16.bin") + return (test_xml, test_bin) + + +def test_output_replace(device): + param = ops.parameter([1, 64], Type.i64) + param.output(0).get_tensor().set_names({"a", "b"}) + relu = ops.relu(param) + relu.output(0).get_tensor().set_names({"c", "d"}) + + new_relu = ops.relu(param) + new_relu.output(0).get_tensor().set_names({"f"}) + + relu.output(0).replace(new_relu.output(0)) + + assert new_relu.output(0).get_tensor().get_names() == {"c", "d", "f"} diff --git a/src/bindings/python/tests/test_ngraph/test_basic.py b/src/bindings/python/tests/test_ngraph/test_basic.py index 2adae148073..a355bb80bee 100644 --- a/src/bindings/python/tests/test_ngraph/test_basic.py +++ b/src/bindings/python/tests/test_ngraph/test_basic.py @@ -18,17 +18,19 @@ from openvino.runtime import Tensor from openvino.pyopenvino import DescriptorTensor from openvino.runtime.op import Parameter from tests.runtime import get_runtime +from openvino.runtime.utils.types import get_dtype from tests.test_ngraph.util import run_op_node def test_ngraph_function_api(): shape = [2, 2] parameter_a = ops.parameter(shape, dtype=np.float32, name="A") - parameter_b = ops.parameter(shape, dtype=np.float32, name="B") + parameter_b = ops.parameter(shape, dtype=Type.f32, name="B") parameter_c = ops.parameter(shape, dtype=np.float32, name="C") model = (parameter_a + parameter_b) * parameter_c assert parameter_a.element_type == Type.f32 + assert parameter_b.element_type == Type.f32 assert parameter_a.partial_shape == PartialShape([2, 2]) parameter_a.layout = ov.Layout("NC") assert parameter_a.layout == ov.Layout("NC") @@ -74,6 +76,17 @@ def test_ngraph_function_api(): np.uint16, np.uint32, np.uint64, + Type.f16, + Type.f32, + Type.f64, + Type.i8, + Type.i16, + Type.i32, + Type.i64, + Type.u8, + Type.u16, + Type.u32, + Type.u64, ], ) def test_simple_computation_on_ndarrays(dtype): @@ -86,17 +99,19 @@ def test_simple_computation_on_ndarrays(dtype): model = (parameter_a + parameter_b) * parameter_c computation = runtime.computation(model, parameter_a, parameter_b, parameter_c) - value_a = np.array([[1, 2], [3, 4]], dtype=dtype) - value_b = np.array([[5, 6], [7, 8]], dtype=dtype) - value_c = np.array([[2, 3], [4, 5]], dtype=dtype) - result = computation(value_a, value_b, value_c) - assert np.allclose(result, np.array([[12, 24], [40, 60]], dtype=dtype)) + np_dtype = get_dtype(dtype) if isinstance(dtype, Type) else dtype - value_a = np.array([[9, 10], [11, 12]], dtype=dtype) - value_b = np.array([[13, 14], [15, 16]], dtype=dtype) - value_c = np.array([[5, 4], [3, 2]], dtype=dtype) + value_a = np.array([[1, 2], [3, 4]], dtype=np_dtype) + value_b = np.array([[5, 6], [7, 8]], dtype=np_dtype) + value_c = np.array([[2, 3], [4, 5]], dtype=np_dtype) result = computation(value_a, value_b, value_c) - assert np.allclose(result, np.array([[110, 96], [78, 56]], dtype=dtype)) + assert np.allclose(result, np.array([[12, 24], [40, 60]], dtype=np_dtype)) + + value_a = np.array([[9, 10], [11, 12]], dtype=np_dtype) + value_b = np.array([[13, 14], [15, 16]], dtype=np_dtype) + value_c = np.array([[5, 4], [3, 2]], dtype=np_dtype) + result = computation(value_a, value_b, value_c) + assert np.allclose(result, np.array([[110, 96], [78, 56]], dtype=np_dtype)) def test_serialization(): diff --git a/src/bindings/python/tests/test_ngraph/test_core.py b/src/bindings/python/tests/test_ngraph/test_core.py index a8919e8900e..f505afe756b 100644 --- a/src/bindings/python/tests/test_ngraph/test_core.py +++ b/src/bindings/python/tests/test_ngraph/test_core.py @@ -2,6 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 import numpy as np +import pytest import openvino.runtime.opset8 as ov from openvino.runtime import Dimension, Model, PartialShape, Shape @@ -76,6 +77,33 @@ def test_dimension_comparisons(): assert not d2.compatible(d1) assert not d2.same_scheme(d1) + d = Dimension("?") + assert d == Dimension() + + d = Dimension("1") + assert d == Dimension(1) + + d = Dimension("..10") + assert d == Dimension(-1, 10) + + d = Dimension("10..") + assert d == Dimension(10, -1) + + d = Dimension("5..10") + assert d == Dimension(5, 10) + + with pytest.raises(RuntimeError) as e: + d = Dimension("C") + assert 'Cannot parse dimension: "C"' in str(e.value) + + with pytest.raises(RuntimeError) as e: + d = Dimension("?..5") + assert 'Cannot parse min bound: "?"' in str(e.value) + + with pytest.raises(RuntimeError) as e: + d = Dimension("5..?") + assert 'Cannot parse max bound: "?"' in str(e.value) + def test_partial_shape(): ps = PartialShape([1, 2, 3, 4]) @@ -140,6 +168,40 @@ def test_partial_shape(): assert list(ps.get_max_shape())[0] > 1000000000 assert repr(ps) == "" + shape_list = [(1, 10), [2, 5], 4, Dimension(2), "..10"] + ref_ps = PartialShape([Dimension(1, 10), Dimension(2, 5), Dimension(4), Dimension(2), Dimension(-1, 10)]) + assert PartialShape(shape_list) == ref_ps + assert PartialShape(tuple(shape_list)) == ref_ps + + with pytest.raises(TypeError) as e: + PartialShape([(1, 2, 3)]) + assert "Two elements are expected in tuple(lower, upper) " \ + "for dynamic dimension, but 3 elements were given." in str(e.value) + + with pytest.raises(TypeError) as e: + PartialShape([("?", "?")]) + assert "Incorrect pair of types (, ) " \ + "for dynamic dimension, ints are expected." in str(e.value) + + with pytest.raises(TypeError) as e: + PartialShape([range(10)]) + assert "Incorrect type for dimension. Expected types are: " \ + "int, str, openvino.runtime.Dimension, list/tuple with lower " \ + "and upper values for dynamic dimension." in str(e.value) + + ps = PartialShape("...") + assert ps == PartialShape.dynamic() + + ps = PartialShape("?, 3, ..224, 28..224") + assert ps == PartialShape([Dimension(-1), Dimension(3), Dimension(-1, 224), Dimension(28, 224)]) + + with pytest.raises(RuntimeError) as e: + ps = PartialShape("?,,3") + assert 'Cannot get vector of dimensions! "?,,3" is incorrect' in str(e.value) + + shape = Shape() + assert len(shape) == 0 + def test_partial_shape_compatible(): ps1 = PartialShape.dynamic() diff --git a/src/bindings/python/tests/test_ngraph/test_einsum.py b/src/bindings/python/tests/test_ngraph/test_einsum.py index 2ace1e58079..146969282ca 100644 --- a/src/bindings/python/tests/test_ngraph/test_einsum.py +++ b/src/bindings/python/tests/test_ngraph/test_einsum.py @@ -34,7 +34,7 @@ def einsum_op_exec(input_shapes: list, equation: str, data_type: np.dtype, ng_inputs = [] np_inputs = [] for i in range(num_inputs): - input_i = np.random.random_integers(10, size=input_shapes[i]).astype(data_type) + input_i = np.random.randint(1, 10 + 1, size=input_shapes[i]).astype(data_type) np_inputs.append(input_i) ng_inputs.append(ov.parameter(input_i.shape, dtype=data_type)) diff --git a/src/bindings/python/tests/test_ngraph/test_ops.py b/src/bindings/python/tests/test_ngraph/test_ops.py index 1871f9c8730..c9eaff88ff7 100644 --- a/src/bindings/python/tests/test_ngraph/test_ops.py +++ b/src/bindings/python/tests/test_ngraph/test_ops.py @@ -482,6 +482,30 @@ def test_constant(): assert np.allclose(result, expected) +def test_constant_opset_ov_type(): + parameter_list = [] + function = Model([ov.constant(np.arange(9).reshape(3, 3), Type.f32)], parameter_list, "test") + + runtime = get_runtime() + computation = runtime.computation(function, *parameter_list) + result = computation()[0] + + expected = np.arange(9).reshape(3, 3) + assert np.allclose(result, expected) + + +def test_constant_opset_numpy_type(): + parameter_list = [] + function = Model([ov.constant(np.arange(9).reshape(3, 3), np.float32)], parameter_list, "test") + + runtime = get_runtime() + computation = runtime.computation(function, *parameter_list) + result = computation()[0] + + expected = np.arange(9).reshape(3, 3) + assert np.allclose(result, expected) + + def test_concat(): element_type = Type.f32 diff --git a/src/bindings/python/tests/test_ngraph/test_ops_fused.py b/src/bindings/python/tests/test_ngraph/test_ops_fused.py index 82a202ef358..5e9c122ebe5 100644 --- a/src/bindings/python/tests/test_ngraph/test_ops_fused.py +++ b/src/bindings/python/tests/test_ngraph/test_ops_fused.py @@ -48,7 +48,7 @@ def test_fake_quantize(): input_high_value = np.float32(23) output_low_value = np.float32(2) output_high_value = np.float32(16) - levels = np.float32(4) + levels = np.int32(4) data_shape = [1, 2, 3, 4] bound_shape = [] @@ -114,7 +114,7 @@ def test_depth_to_space(): dtype=np.float32, ) mode = "blocks_first" - block_size = np.float32(2) + block_size = np.int32(2) data_shape = [1, 4, 2, 3] parameter_data = ov.parameter(data_shape, name="Data", dtype=np.float32) diff --git a/src/bindings/python/tests/test_ngraph/test_preprocess.py b/src/bindings/python/tests/test_ngraph/test_preprocess.py index bf0741cbfdb..4a4a490160e 100644 --- a/src/bindings/python/tests/test_ngraph/test_preprocess.py +++ b/src/bindings/python/tests/test_ngraph/test_preprocess.py @@ -90,7 +90,7 @@ def test_ngraph_preprocess_mean_scale_convert(): p = PrePostProcessor(function) inp2 = p.input(1) inp2.tensor().set_element_type(Type.i32) - inp2.preprocess().convert_element_type(Type.f32).mean(1.).scale(2.) + inp2.preprocess().convert_element_type(Type.f32).mean(1.).scale(2.).convert_element_type() inp1 = p.input(0) inp1.preprocess().convert_element_type(Type.f32).mean(1.).custom(custom_preprocess) function = p.build() @@ -159,10 +159,14 @@ def test_ngraph_preprocess_output_postprocess(): inp.tensor().set_layout(layout1) inp.preprocess().convert_element_type(Type.f32).mean([1., 2., 3.]) out = p.output() + out.tensor().set_element_type(Type.f32) out.model().set_layout(layout1) out.postprocess().convert_element_type(Type.f32) \ .convert_layout(layout2) \ - .convert_layout(layout3).custom(custom_postprocess) + .convert_layout(layout3) \ + .custom(custom_postprocess) \ + .convert_element_type(Type.f16) \ + .convert_element_type() function = p.build() input_data = np.array([[-1, -2, -3], [-4, -5, -6]]).astype(np.int32) @@ -185,7 +189,7 @@ def test_ngraph_preprocess_spatial_static_shape(): p = PrePostProcessor(function) inp = p.input() - inp.tensor().set_layout(layout).set_spatial_static_shape(2, 2).set_color_format(color_format, []) + inp.tensor().set_layout(layout).set_spatial_static_shape(2, 2).set_color_format(color_format) inp.preprocess().convert_element_type(Type.f32).mean([1., 2.]) inp.model().set_layout(layout) out = p.output() @@ -233,6 +237,60 @@ def test_ngraph_preprocess_set_shape(): assert np.equal(output, expected_output).all() +def test_ngraph_preprocess_set_from_tensor(): + shape = [1, 224, 224, 3] + inp_shape = [1, 480, 640, 3] + parameter_a = ops.parameter(shape, dtype=np.float32, name="A") + parameter_a.set_layout(ov.Layout("NHWC")) + model = parameter_a + function = Model(model, [parameter_a], "TestFunction") + + input_data = ov.Tensor(Type.i32, inp_shape) + p = PrePostProcessor(function) + inp = p.input() + inp.tensor().set_from(input_data) + inp.preprocess().resize(ResizeAlgorithm.RESIZE_LINEAR) + function = p.build() + assert function.input().shape == ov.Shape(inp_shape) + assert function.input().element_type == Type.i32 + assert function.output().shape == ov.Shape(shape) + assert function.output().element_type == Type.f32 + + +def test_ngraph_preprocess_set_from_np_infer(): + shape = [1, 1, 1] + parameter_a = ops.parameter(shape, dtype=np.float32, name="A") + model = parameter_a + function = Model(model, [parameter_a], "TestFunction") + + @custom_preprocess_function + def custom_crop(out_node: Output): + start = ops.constant(np.array([1, 1, 1]), dtype=np.int32) + stop = ops.constant(np.array([2, 2, 2]), dtype=np.int32) + step = ops.constant(np.array([1, 1, 1]), dtype=np.int32) + axis = ops.constant(np.array([0, 1, 2]), dtype=np.int32) + return ops.slice(out_node, start, stop, step, axis) + + input_data = np.array([[[0, 1, 2], [3, 4, 5], [6, 7, 8]], + [[9, 10, 11], [12, 13, 14], [15, 16, 17]], + [[18, 19, 20], [21, 22, 23], [24, 25, 26]]]).astype(np.int32) + + p = PrePostProcessor(function) + inp = p.input() + inp.tensor().set_from(input_data) + inp.preprocess().convert_element_type().custom(custom_crop) + function = p.build() + assert function.input().shape == ov.Shape([3, 3, 3]) + assert function.input().element_type == Type.i32 + + expected_output = np.array([[[13]]]).astype(np.float32) + + runtime = get_runtime() + computation = runtime.computation(function) + output = computation(input_data) + assert np.equal(output, expected_output).all() + + def test_ngraph_preprocess_set_memory_type(): shape = [1, 1, 1] parameter_a = ops.parameter(shape, dtype=np.int32, name="A") @@ -342,6 +400,27 @@ def test_ngraph_preprocess_reverse_channels(): assert np.equal(output, expected_output).all() +def test_ngraph_preprocess_crop(): + orig_shape = [1, 2, 1, 1] + tensor_shape = [1, 2, 3, 3] + parameter_a = ops.parameter(orig_shape, dtype=np.float32, name="A") + model = ops.relu(parameter_a) + function = Model(model, [parameter_a], "TestFunction") + + p = PrePostProcessor(function) + p.input().tensor().set_shape(tensor_shape) + p.input().preprocess().crop([0, 0, 1, 1], [1, 2, -1, -1]) + function = p.build() + + input_data = np.arange(18).astype(np.float32).reshape(tensor_shape) + expected_output = np.array([4, 13]).astype(np.float32).reshape(orig_shape) + + runtime = get_runtime() + computation = runtime.computation(function) + output = computation(input_data) + assert np.equal(output, expected_output).all() + + def test_ngraph_preprocess_resize_algorithm(): shape = [1, 1, 3, 3] parameter_a = ops.parameter(shape, dtype=np.float32, name="A") diff --git a/src/bindings/python/tests/test_ngraph/test_utils.py b/src/bindings/python/tests/test_ngraph/test_utils.py index a1e62eca3af..07d31ffc5f2 100644 --- a/src/bindings/python/tests/test_ngraph/test_utils.py +++ b/src/bindings/python/tests/test_ngraph/test_utils.py @@ -12,7 +12,7 @@ def test_get_constant_from_source_success(): input2 = ov.opset8.parameter(Shape([25]), dtype=dtype, name="input_2") shape_of = ov.opset8.shape_of(input2, name="shape_of") reshape = ov.opset8.reshape(input1, shape_of, special_zero=True) - folded_const = ov.util.get_constant_from_source(reshape.input(1).get_source_output()) + folded_const = ov.utils.get_constant_from_source(reshape.input(1).get_source_output()) assert folded_const is not None assert folded_const.get_vector() == [25] @@ -23,6 +23,6 @@ def test_get_constant_from_source_failed(): input1 = ov.opset8.parameter(Shape([5, 5]), dtype=dtype, name="input_1") input2 = ov.opset8.parameter(Shape([1]), dtype=dtype, name="input_2") reshape = ov.opset8.reshape(input1, input2, special_zero=True) - folded_const = ov.util.get_constant_from_source(reshape.input(1).get_source_output()) + folded_const = ov.utils.get_constant_from_source(reshape.input(1).get_source_output()) assert folded_const is None diff --git a/src/bindings/python/tests/test_onnx/test_ops_unary.py b/src/bindings/python/tests/test_onnx/test_ops_unary.py index ae79735a96a..6ecc0035d6a 100644 --- a/src/bindings/python/tests/test_onnx/test_ops_unary.py +++ b/src/bindings/python/tests/test_onnx/test_ops_unary.py @@ -7,7 +7,7 @@ import onnx.mapping import pytest from onnx.helper import make_graph, make_model, make_node, make_tensor_value_info -from openvino.runtime.exceptions import NgraphTypeError +from openvino.runtime.exceptions import OVTypeError from tests.runtime import get_runtime from tests.test_onnx.utils import get_node_model, import_onnx_model, run_model, run_node @@ -425,7 +425,7 @@ def test_cast_errors(): graph = make_graph([node], "compute_graph", input_tensors, output_tensors) model = make_model(graph, producer_name="NgraphBackend") - with pytest.raises((RuntimeError, NgraphTypeError)): + with pytest.raises((RuntimeError, OVTypeError)): import_onnx_model(model) # unsupported output tensor data type: diff --git a/src/bindings/python/tests_compatibility/test_inference_engine/test_IENetwork.py b/src/bindings/python/tests_compatibility/test_inference_engine/test_IENetwork.py index c9520f6bc48..a861d6d20b9 100644 --- a/src/bindings/python/tests_compatibility/test_inference_engine/test_IENetwork.py +++ b/src/bindings/python/tests_compatibility/test_inference_engine/test_IENetwork.py @@ -135,17 +135,21 @@ def test_batch_size_after_reshape(): def test_serialize(): - ie = IECore() - net = ie.read_network(model=test_net_xml, weights=test_net_bin) - net.serialize("./serialized_net.xml", "./serialized_net.bin") - serialized_net = ie.read_network(model="./serialized_net.xml", weights="./serialized_net.bin") - func_net = ng.function_from_cnn(net) - ops_net = func_net.get_ordered_ops() - ops_net_names = [op.friendly_name for op in ops_net] - func_serialized_net = ng.function_from_cnn(serialized_net) - ops_serialized_net = func_serialized_net.get_ordered_ops() - ops_serialized_net_names = [op.friendly_name for op in ops_serialized_net] - assert ops_serialized_net_names == ops_net_names + def run(): + ie = IECore() + net = ie.read_network(model=test_net_xml, weights=test_net_bin) + net.serialize("./serialized_net.xml", "./serialized_net.bin") + serialized_net = ie.read_network(model="./serialized_net.xml", weights="./serialized_net.bin") + func_net = ng.function_from_cnn(net) + ops_net = func_net.get_ordered_ops() + ops_net_names = [op.friendly_name for op in ops_net] + func_serialized_net = ng.function_from_cnn(serialized_net) + ops_serialized_net = func_serialized_net.get_ordered_ops() + ops_serialized_net_names = [op.friendly_name for op in ops_serialized_net] + assert ops_serialized_net_names == ops_net_names + + run() + # xml/bin files shall not be acquired after by 'net' here, can be removed os.remove("./serialized_net.xml") os.remove("./serialized_net.bin") diff --git a/src/bindings/python/wheel/setup.py b/src/bindings/python/wheel/setup.py index 7e3b41cfdab..e3ff90622fb 100644 --- a/src/bindings/python/wheel/setup.py +++ b/src/bindings/python/wheel/setup.py @@ -27,7 +27,7 @@ WHEEL_LIBS_PACKAGE = 'openvino.libs' PYTHON_VERSION = f'python{sys.version_info.major}.{sys.version_info.minor}' LIBS_DIR = 'bin' if platform.system() == 'Windows' else 'lib' -CONFIG = 'Release' if platform.system() == 'Windows' else '' +CONFIG = 'Release' if platform.system() in {'Windows' , 'Darwin'} else '' machine = platform.machine() if machine == 'x86_64' or machine == 'AMD64': @@ -428,6 +428,15 @@ def get_package_dir(install_cfg): return py_package_path +def concat_files(output_file, input_files): + with open(output_file, 'w', encoding='utf-8') as outfile: + for filename in input_files: + with open(filename, 'r', encoding='utf-8') as infile: + content = infile.read() + outfile.write(content) + return output_file + + platforms = ['linux', 'win32', 'darwin'] if not any(pl in sys.platform for pl in platforms): sys.exit(f'Unsupported platform: {sys.platform}, expected: linux, win32, darwin') @@ -442,6 +451,17 @@ package_data: typing.Dict[str, list] = {} pkg_name = os.getenv('WHEEL_PACKAGE_NAME', 'openvino') ext_modules = find_prebuilt_extensions(get_dir_list(PY_INSTALL_CFG)) if pkg_name == 'openvino' else [] +description_md = SCRIPT_DIR.parents[3] / 'docs' / 'install_guides' / 'pypi-openvino-rt.md' +md_files = [description_md, SCRIPT_DIR.parents[3] / 'docs' / 'install_guides' / 'pre-release-note.md'] +docs_url = 'https://docs.openvino.ai/latest/index.html' + +if(os.getenv('CI_BUILD_DEV_TAG')): + output = Path.cwd() / 'build' / 'pypi-openvino-rt.md' + output.parent.mkdir(exist_ok=True) + description_md = concat_files(output, md_files) + docs_url = 'https://docs.openvino.ai/nightly/index.html' + + setup( version=os.getenv('WHEEL_VERSION', '0.0.0'), build=os.getenv('WHEEL_BUILD', '000'), @@ -451,10 +471,10 @@ setup( author=os.getenv('WHEEL_AUTHOR', 'Intel(R) Corporation'), description=os.getenv('WHEEL_DESC', 'OpenVINO(TM) Runtime'), install_requires=get_dependencies(os.getenv('WHEEL_REQUIREMENTS', SCRIPT_DIR.parents[0] / 'requirements.txt')), - long_description=get_description(os.getenv('WHEEL_OVERVIEW', SCRIPT_DIR.parents[3] / 'docs/install_guides/pypi-openvino-rt.md')), + long_description=get_description(os.getenv('WHEEL_OVERVIEW', description_md)), long_description_content_type='text/markdown', download_url=os.getenv('WHEEL_DOWNLOAD_URL', 'https://github.com/openvinotoolkit/openvino/tags'), - url=os.getenv('WHEEL_URL', 'https://docs.openvino.ai/latest/index.html'), + url=os.getenv('WHEEL_URL', docs_url), cmdclass={ 'build': CustomBuild, 'install': CustomInstall, diff --git a/src/common/conditional_compilation/CMakeLists.txt b/src/common/conditional_compilation/CMakeLists.txt index 20b2532a77c..f99eb92a90b 100644 --- a/src/common/conditional_compilation/CMakeLists.txt +++ b/src/common/conditional_compilation/CMakeLists.txt @@ -49,5 +49,5 @@ endif() ov_install_static_lib(${TARGET_NAME} core) file(GLOB_RECURSE hdrs ${CMAKE_CURRENT_SOURCE_DIR}/include/*.h ${CMAKE_CURRENT_SOURCE_DIR}/include/*.hpp) -add_cpplint_target(${TARGET_NAME}_cpplint FOR_SOURCES ${hdrs}) +add_clang_format_target(${TARGET_NAME}_clang FOR_SOURCES ${hdrs}) openvino_developer_export_targets(COMPONENT core TARGETS openvino::conditional_compilation) diff --git a/src/common/conditional_compilation/include/openvino/cc/factory.h b/src/common/conditional_compilation/include/openvino/cc/factory.h index 822098aa834..532c6916f4e 100644 --- a/src/common/conditional_compilation/include/openvino/cc/factory.h +++ b/src/common/conditional_compilation/include/openvino/cc/factory.h @@ -3,19 +3,20 @@ // #pragma once -#include "selective_build.h" -#include #include +#include #include #include +#include "selective_build.h" + namespace openvino { namespace cc { -template +template class Factory; -template +template class Factory { Factory(Factory const&) = delete; Factory& operator=(Factory const&) = delete; @@ -23,27 +24,25 @@ class Factory { public: using builder_t = std::function; - Factory(const std::string & name) - : name(name) {} + Factory(const std::string& name) : name(name) {} #ifdef SELECTIVE_BUILD - #define registerNodeIfRequired(Module, Name, key, Impl) \ - OV_PP_EXPAND(OV_PP_CAT(registerImpl, OV_CC_SCOPE_IS_ENABLED(OV_PP_CAT3(Module, _, Name)))(key)) - #define createNodeIfRegistered(Module, key, ...) createImpl(key, __VA_ARGS__) +# define registerNodeIfRequired(Module, Name, key, Impl) \ + OV_PP_EXPAND(OV_PP_CAT(registerImpl, OV_CC_SCOPE_IS_ENABLED(OV_PP_CAT3(Module, _, Name))) < Impl > (key)) +# define createNodeIfRegistered(Module, key, ...) createImpl(key, __VA_ARGS__) - template - void registerImpl0(const Key &) { - } + template + void registerImpl0(const Key&) {} - template - void registerImpl1(const Key & key) { + template + void registerImpl1(const Key& key) { builders[key] = [](Args... args) -> T { - Impl *impl = new Impl(args...); + Impl* impl = new Impl(args...); return static_cast(impl); }; } - T createImpl(const Key & key, Args... args) { + T createImpl(const Key& key, Args... args) { auto builder = builders.find(key); if (builder != builders.end()) { return builder->second(args...); @@ -52,21 +51,22 @@ public: } #elif defined(SELECTIVE_BUILD_ANALYZER) - #define registerNodeIfRequired(Module, Name, key, Impl) registerImpl(key, OV_PP_TOSTRING(Name)) - #define createNodeIfRegistered(Module, key, ...) createImpl(key, __VA_ARGS__) +# define registerNodeIfRequired(Module, Name, key, Impl) \ + registerImpl(key, OV_PP_TOSTRING(Name)) +# define createNodeIfRegistered(Module, key, ...) createImpl(key, __VA_ARGS__) - template - void registerImpl(const Key & key, const char *typeName) { + template + void registerImpl(const Key& key, const char* typeName) { const std::string task_name = "REG$" + name + "$" + to_string(key) + "$" + typeName; openvino::itt::ScopedTask task(openvino::itt::handle(task_name)); builders[key] = [](Args... args) -> T { - Impl *impl = new Impl(args...); + Impl* impl = new Impl(args...); return static_cast(impl); }; } - template - T createImpl(const Key & key, Args... args) { + template + T createImpl(const Key& key, Args... args) { auto builder = builders.find(key); if (builder != builders.end()) { const std::string task_name = "CREATE$" + name + "$" + to_string(key); @@ -78,18 +78,18 @@ public: #else - #define registerNodeIfRequired(Module, Name, key, Impl) registerImpl(key) - #define createNodeIfRegistered(Module, key, ...) createImpl(key, __VA_ARGS__) +# define registerNodeIfRequired(Module, Name, key, Impl) registerImpl(key) +# define createNodeIfRegistered(Module, key, ...) createImpl(key, __VA_ARGS__) - template - void registerImpl(const Key & key) { + template + void registerImpl(const Key& key) { builders[key] = [](Args... args) -> T { - Impl *impl = new Impl(args...); + Impl* impl = new Impl(args...); return static_cast(impl); }; } - T createImpl(const Key & key, Args... args) { + T createImpl(const Key& key, Args... args) { auto builder = builders.find(key); if (builder != builders.end()) { return builder->second(args...); @@ -98,8 +98,8 @@ public: } #endif - template - void foreach(Fn fn) const { + template + void foreach (Fn fn) const { for (auto itm : builders) fn(itm); } @@ -109,18 +109,16 @@ public: } private: - const std::string & to_string(const std::string & str) const noexcept { + const std::string& to_string(const std::string& str) const noexcept { return str; } - template::value, bool>::type = true> + template ::value, bool>::type = true> std::string to_string(V val) const { return std::to_string(static_cast(val)); } - template::value, bool>::type = true> + template ::value, bool>::type = true> std::string to_string(V val) const { return std::to_string(val); } diff --git a/src/common/conditional_compilation/include/openvino/cc/ngraph/itt.hpp b/src/common/conditional_compilation/include/openvino/cc/ngraph/itt.hpp index 710020e6069..9c60ca4d200 100644 --- a/src/common/conditional_compilation/include/openvino/cc/ngraph/itt.hpp +++ b/src/common/conditional_compilation/include/openvino/cc/ngraph/itt.hpp @@ -4,36 +4,4 @@ #pragma once -#include -#include - -OV_CC_DOMAINS(ngraph_pass); - -/* - * RUN_ON_FUNCTION_SCOPE macro allows to disable the run_on_function pass - * MATCHER_SCOPE macro allows to disable the MatcherPass if matcher isn't applied - */ -#if defined(SELECTIVE_BUILD_ANALYZER) - -#define RUN_ON_FUNCTION_SCOPE(region) OV_SCOPE(ngraph_pass, OV_PP_CAT(region, _run_on_function)) -#define MATCHER_SCOPE(region) const std::string matcher_name(OV_PP_TOSTRING(region)) - -#elif defined(SELECTIVE_BUILD) - -#define MATCHER_SCOPE_(scope, region) \ - if (OV_CC_SCOPE_IS_ENABLED(OV_PP_CAT3(scope, _, region)) == 0) \ - throw ngraph::ngraph_error(std::string(OV_PP_TOSTRING(OV_PP_CAT3(scope, _, region))) + \ - " is disabled!") - -#define MATCHER_SCOPE(region) \ - const std::string matcher_name(OV_PP_TOSTRING(region)); \ - if (OV_CC_SCOPE_IS_ENABLED(OV_PP_CAT3(ngraph_pass, _, region)) == 0) \ - return -#define RUN_ON_FUNCTION_SCOPE(region) \ - MATCHER_SCOPE_(ngraph_pass, OV_PP_CAT(region, _run_on_function)) - -#else - -#define MATCHER_SCOPE(region) const std::string matcher_name(OV_PP_TOSTRING(region)) -#define RUN_ON_FUNCTION_SCOPE(region) -#endif +#include diff --git a/src/common/conditional_compilation/include/openvino/cc/pass/itt.hpp b/src/common/conditional_compilation/include/openvino/cc/pass/itt.hpp new file mode 100644 index 00000000000..4305459b4c0 --- /dev/null +++ b/src/common/conditional_compilation/include/openvino/cc/pass/itt.hpp @@ -0,0 +1,43 @@ +// Copyright (C) 2018-2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include + +#include + +OV_CC_DOMAINS(ov_pass); + +/* + * RUN_ON_MODEL_SCOPE macro allows to disable the run_on_function pass + * RUN_ON_FUNCTION_SCOPE macro allows to disable the run_on_function pass + * MATCHER_SCOPE macro allows to disable the MatcherPass if matcher isn't applied + */ +#if defined(SELECTIVE_BUILD_ANALYZER) + +# define RUN_ON_FUNCTION_SCOPE(region) OV_SCOPE(ov_pass, OV_PP_CAT(region, _run_on_function)) +# define MATCHER_SCOPE(region) const std::string matcher_name(OV_PP_TOSTRING(region)) +# define RUN_ON_MODEL_SCOPE(region) OV_SCOPE(ov_pass, OV_PP_CAT(region, _run_on_model)) + +#elif defined(SELECTIVE_BUILD) + +# define MATCHER_SCOPE_(scope, region) \ + if (OV_CC_SCOPE_IS_ENABLED(OV_PP_CAT3(scope, _, region)) == 0) \ + throw ngraph::ngraph_error(std::string(OV_PP_TOSTRING(OV_PP_CAT3(scope, _, region))) + " is disabled!") + +# define MATCHER_SCOPE(region) \ + const std::string matcher_name(OV_PP_TOSTRING(region)); \ + if (OV_CC_SCOPE_IS_ENABLED(OV_PP_CAT3(ov_pass, _, region)) == 0) \ + return +# define RUN_ON_FUNCTION_SCOPE(region) MATCHER_SCOPE_(ov_pass, OV_PP_CAT(region, _run_on_function)) + +# define RUN_ON_MODEL_SCOPE(region) MATCHER_SCOPE_(ov_pass, OV_PP_CAT(region, _run_on_model)) + +#else + +# define MATCHER_SCOPE(region) const std::string matcher_name(OV_PP_TOSTRING(region)) +# define RUN_ON_FUNCTION_SCOPE(region) +# define RUN_ON_MODEL_SCOPE(region) +#endif diff --git a/src/common/conditional_compilation/include/openvino/cc/selective_build.h b/src/common/conditional_compilation/include/openvino/cc/selective_build.h index 061068174f3..720c9ee1e39 100644 --- a/src/common/conditional_compilation/include/openvino/cc/selective_build.h +++ b/src/common/conditional_compilation/include/openvino/cc/selective_build.h @@ -55,19 +55,19 @@ * */ -#include #include +#include -#define OV_CC_EXPAND OV_PP_EXPAND -#define OV_CC_CAT OV_PP_CAT +#define OV_CC_EXPAND OV_PP_EXPAND +#define OV_CC_CAT OV_PP_CAT #define OV_CC_TOSTRING OV_PP_TOSTRING #ifdef SELECTIVE_BUILD_ANALYZER -# include +# include #endif -#include #include +#include namespace openvino { namespace cc { @@ -76,31 +76,29 @@ namespace cc { namespace internal { -template +template struct case_wrapper { using type = T; - const C value {}; + const C value{}; - case_wrapper(C && val) - : value(std::forward(val)) - {} + case_wrapper(C&& val) : value(std::forward(val)) {} }; -template -case_wrapper make_case_wrapper(C && val) { +template +case_wrapper make_case_wrapper(C&& val) { return case_wrapper(std::forward(val)); } -template class Fn, typename Ctx, typename T, typename Case> -bool match(Ctx&& ctx, T&& val, Case && cs) { +template