diff --git a/docs/conf.py b/docs/conf.py index be9dc1caed1..97446abbd48 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -164,12 +164,18 @@ def autodoc_skip_member(app, what, name, obj, skip, options): return name in exclude_pyapi_methods +shutil.copy("../../../docs/home.rst",".") + +def replace_index_with_redirect(app,exception): + shutil.copy("../../../docs/index.html","../_build/index.html") + def setup(app): logger = logging.getLogger(__name__) app.add_config_value('doxygen_mapping_file', doxygen_mapping_file, rebuild=True) app.add_config_value('repositories', repositories, rebuild=True) app.connect('autodoc-skip-member', autodoc_skip_member) + app.connect('build-finished',replace_index_with_redirect) app.add_js_file('js/custom.js') app.add_js_file('js/graphs.js') app.add_js_file('js/graphs_ov_tf.js') diff --git a/docs/home.rst b/docs/home.rst new file mode 100644 index 00000000000..d25f9583203 --- /dev/null +++ b/docs/home.rst @@ -0,0 +1,125 @@ +.. OpenVINO Toolkit documentation master file, created by + sphinx-quickstart on Wed Jul 7 10:46:56 2021. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +.. meta:: + :google-site-verification: _YqumYQ98cmXUTwtzM_0WIIadtDc6r_TMYGbmGgNvrk + +OpenVINO™ Documentation +======================= + +.. raw:: html + +
+ + +
+ +

+ OpenVINO™ is an open-source toolkit for optimizing and deploying AI inference. +

+

+ + OpenVINO allows to process models built with Caffe, Keras, mxnet, TensorFlow, ONNX, and PyTorch. They can be easily optimized and deployed on devices running Windows, Linux, or MacOS. +
+

Check the full range of supported hardware in the + Supported Devices page and see how it stacks up in our + Performance Benchmarks page.
+ Supports deployment on Windows, Linux, and macOS. +

+ +
+

OpenVINO Workflow

+
+
+ +
+
+ + link to model processing introduction + +
+
+ +
+
+ + link to an optimization guide + +
+
+ +
+
+ + link to deployment introduction + +
+
+ + +
+
+

Want to know more?

+
+
+ +
+ +

Get Started

+

Learn how to download, install, and configure OpenVINO.

+
+ +

Open Model Zoo

+

Browse through over 200 publicly available neural networks and pick the right one for your solution.

+
+ +

Model Optimizer

+

Learn how to convert your model and optimize it for use with OpenVINO.

+
+ +

Tutorials

+

Learn how to use OpenVINO based on our training material.

+
+ +

Samples

+

Try OpenVINO using ready-made applications explaining various use cases.

+
+ +

DL Workbench

+

Learn about the alternative, web-based version of OpenVINO. DL Workbench container installation Required.

+
+ +

OpenVINO™ Runtime

+

Learn about OpenVINO's inference mechanism which executes the IR, ONNX, Paddle models on target devices.

+
+ +

Tune & Optimize

+

Model-level (e.g. quantization) and Runtime (i.e. application) -level optimizations to make your inference as fast as possible.

+
+ +

Performance
Benchmarks

+

View performance benchmark results for various models on Intel platforms.

+
+
+
+
+ + + + +.. toctree:: + :maxdepth: 2 + :hidden: + + get_started + documentation + tutorials + api/api_reference + model_zoo + resources diff --git a/docs/index.html b/docs/index.html new file mode 100644 index 00000000000..2e615a41d77 --- /dev/null +++ b/docs/index.html @@ -0,0 +1 @@ + diff --git a/docs/index.rst b/docs/index.rst index d25f9583203..1425c26bb26 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,125 +1,6 @@ -.. OpenVINO Toolkit documentation master file, created by - sphinx-quickstart on Wed Jul 7 10:46:56 2021. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -.. meta:: - :google-site-verification: _YqumYQ98cmXUTwtzM_0WIIadtDc6r_TMYGbmGgNvrk - OpenVINO™ Documentation ======================= -.. raw:: html - -
- - -
- -

- OpenVINO™ is an open-source toolkit for optimizing and deploying AI inference. -

-

- - OpenVINO allows to process models built with Caffe, Keras, mxnet, TensorFlow, ONNX, and PyTorch. They can be easily optimized and deployed on devices running Windows, Linux, or MacOS. -
-

Check the full range of supported hardware in the - Supported Devices page and see how it stacks up in our - Performance Benchmarks page.
- Supports deployment on Windows, Linux, and macOS. -

- -
-

OpenVINO Workflow

-
-
- -
-
- - link to model processing introduction - -
-
- -
-
- - link to an optimization guide - -
-
- -
-
- - link to deployment introduction - -
-
- - -
-
-

Want to know more?

-
-
- -
- -

Get Started

-

Learn how to download, install, and configure OpenVINO.

-
- -

Open Model Zoo

-

Browse through over 200 publicly available neural networks and pick the right one for your solution.

-
- -

Model Optimizer

-

Learn how to convert your model and optimize it for use with OpenVINO.

-
- -

Tutorials

-

Learn how to use OpenVINO based on our training material.

-
- -

Samples

-

Try OpenVINO using ready-made applications explaining various use cases.

-
- -

DL Workbench

-

Learn about the alternative, web-based version of OpenVINO. DL Workbench container installation Required.

-
- -

OpenVINO™ Runtime

-

Learn about OpenVINO's inference mechanism which executes the IR, ONNX, Paddle models on target devices.

-
- -

Tune & Optimize

-

Model-level (e.g. quantization) and Runtime (i.e. application) -level optimizations to make your inference as fast as possible.

-
- -

Performance
Benchmarks

-

View performance benchmark results for various models on Intel platforms.

-
-
-
-
- - - - .. toctree:: - :maxdepth: 2 - :hidden: - - get_started - documentation - tutorials - api/api_reference - model_zoo - resources + + home