mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge branch 'master' into feature-autosummary-packages
This commit is contained in:
commit
1a67b87910
90
CHANGES
90
CHANGES
@ -32,11 +32,15 @@ Incompatible changes
|
||||
|
||||
* Ignore filenames without file extension given to ``Builder.build_specific()``
|
||||
API directly
|
||||
* #6230: The anchor of term in glossary directive is changed if it is consisted
|
||||
by non-ASCII characters
|
||||
* #4550: html: Centering tables by default using CSS
|
||||
|
||||
Deprecated
|
||||
----------
|
||||
|
||||
* ``sphinx.builders.latex.LaTeXBuilder.apply_transforms()``
|
||||
* ``sphinx.builders._epub_base.EpubBuilder.esc()``
|
||||
* ``sphinx.directives.Acks``
|
||||
* ``sphinx.directives.Author``
|
||||
* ``sphinx.directives.Centered``
|
||||
@ -54,11 +58,24 @@ Deprecated
|
||||
* ``sphinx.directives.TabularColumns``
|
||||
* ``sphinx.directives.TocTree``
|
||||
* ``sphinx.directives.VersionChange``
|
||||
* ``sphinx.domains.python.PyClassmember``
|
||||
* ``sphinx.domains.python.PyModulelevel``
|
||||
* ``sphinx.domains.std.StandardDomain._resolve_citation_xref()``
|
||||
* ``sphinx.domains.std.StandardDomain.note_citations()``
|
||||
* ``sphinx.domains.std.StandardDomain.note_citation_refs()``
|
||||
* ``sphinx.domains.std.StandardDomain.note_labels()``
|
||||
* ``sphinx.environment.NoUri``
|
||||
* ``sphinx.ext.apidoc.format_directive()``
|
||||
* ``sphinx.ext.apidoc.format_heading()``
|
||||
* ``sphinx.ext.autodoc.importer.MockFinder``
|
||||
* ``sphinx.ext.autodoc.importer.MockLoader``
|
||||
* ``sphinx.ext.autodoc.importer.mock()``
|
||||
* ``sphinx.ext.autosummary.autolink_role()``
|
||||
* ``sphinx.ext.imgmath.DOC_BODY``
|
||||
* ``sphinx.ext.imgmath.DOC_BODY_PREVIEW``
|
||||
* ``sphinx.ext.imgmath.DOC_HEAD``
|
||||
* ``sphinx.transforms.CitationReferences``
|
||||
* ``sphinx.transforms.SmartQuotesSkipper``
|
||||
* ``sphinx.util.docfields.DocFieldTransformer.preprocess_fieldtypes()``
|
||||
* ``sphinx.util.node.find_source_node()``
|
||||
* ``sphinx.util.i18n.find_catalog()``
|
||||
@ -71,39 +88,78 @@ Features added
|
||||
--------------
|
||||
|
||||
* Add a helper class ``sphinx.transforms.post_transforms.SphinxPostTransform``
|
||||
* Add a helper method ``SphinxDirective.set_source_info()``
|
||||
* Add helper methods
|
||||
|
||||
- ``PythonDomain.note_module()``
|
||||
- ``PythonDomain.note_object()``
|
||||
- ``SphinxDirective.set_source_info()``
|
||||
|
||||
* #6180: Support ``--keep-going`` with BuildDoc setup command
|
||||
* ``math`` directive now supports ``:class:`` option
|
||||
* #6310: imgmath: let :confval:`imgmath_use_preview` work also with the SVG
|
||||
format for images rendering inline math
|
||||
* todo: ``todo`` directive now supports ``:name:`` option
|
||||
* Enable override via environment of ``SPHINXOPTS`` and ``SPHINXBUILD`` Makefile
|
||||
variables (refs: #6232, #6303)
|
||||
* #6287: autodoc: Unable to document bound instance methods exported as module
|
||||
functions
|
||||
* #6289: autodoc: :confval:`autodoc_default_options` now supports
|
||||
``imported-members`` option
|
||||
* #4777: autodoc: Support coroutine
|
||||
* #6212 autosummary: Add :confval:`autosummary_imported_members` to display
|
||||
imported members on autosummary
|
||||
* #6271: ``make clean`` is catastrophically broken if building into '.'
|
||||
* #4777: py domain: Add ``:async:`` option to :rst:dir:`py:function` directive
|
||||
* py domain: Add new options to :rst:dir:`py:method` directive
|
||||
|
||||
- ``:async:``
|
||||
- ``:classmethod:``
|
||||
- ``:property:``
|
||||
- ``:staticmethod:``
|
||||
|
||||
* rst domain: Add :rst:dir:`directive:option` directive to describe the option
|
||||
for directive
|
||||
* #6306: html: Add a label to search form for accessability purposes
|
||||
|
||||
Bugs fixed
|
||||
----------
|
||||
|
||||
* #6230: Inappropriate node_id has been generated by glossary directive if term
|
||||
is consisted by non-ASCII characters
|
||||
* #6213: ifconfig: contents after headings are not shown
|
||||
* commented term in glossary directive is wrongly recognized
|
||||
* #6299: rst domain: rst:directive directive generates waste space
|
||||
* #6331: man: invalid output when doctest follows rubric
|
||||
* #6351: "Hyperlink target is not referenced" message is shown even if
|
||||
referenced
|
||||
* #6165: autodoc: ``tab_width`` setting of docutils has been ignored
|
||||
* Generated Makefiles lack a final EOL (refs: #6232)
|
||||
|
||||
Testing
|
||||
--------
|
||||
|
||||
Release 2.0.1 (in development)
|
||||
==============================
|
||||
|
||||
Dependencies
|
||||
------------
|
||||
|
||||
Incompatible changes
|
||||
--------------------
|
||||
|
||||
Deprecated
|
||||
----------
|
||||
|
||||
Features added
|
||||
--------------
|
||||
Release 2.0.1 (released Apr 08, 2019)
|
||||
=====================================
|
||||
|
||||
Bugs fixed
|
||||
----------
|
||||
|
||||
* LaTeX: some system labels are not translated
|
||||
* RemovedInSphinx30Warning is marked as pending
|
||||
* deprecation warnings are not emitted
|
||||
|
||||
Testing
|
||||
--------
|
||||
- sphinx.application.CONFIG_FILENAME
|
||||
- sphinx.builders.htmlhelp
|
||||
- :confval:`viewcode_import`
|
||||
|
||||
* #6208: C++, properly parse full xrefs that happen to have a short xref as prefix.
|
||||
* #6220, #6225: napoleon: AttributeError is raised for raised section having
|
||||
references
|
||||
* #6245: circular import error on importing SerializingHTMLBuilder
|
||||
* #6243: LaTeX: 'releasename' setting for latex_elements is ignored
|
||||
* #6244: html: Search function is broken with 3rd party themes
|
||||
* #6263: html: HTML5Translator crashed with invalid field node
|
||||
* #6262: html theme: The style of field lists has changed in bizstyle theme
|
||||
|
||||
Release 2.0.0 (released Mar 29, 2019)
|
||||
=====================================
|
||||
|
2
doc/_templates/index.html
vendored
2
doc/_templates/index.html
vendored
@ -97,6 +97,8 @@
|
||||
<p>{%trans%}A Japanese book about Sphinx has been published by O'Reilly:
|
||||
<a href="https://www.oreilly.co.jp/books/9784873116488/">Sphinxをはじめよう /
|
||||
Learning Sphinx</a>.{%endtrans%}</p>
|
||||
<p>{%trans%}In 2019 the second edition of a German book about Sphinx was published:
|
||||
<a href="https://literatur.hasecke.com/post/software-dokumentation-mit-sphinx/">Software-Dokumentation mit Sphinx</a>.{%endtrans%}</p>
|
||||
<!-- <p><img src="{{ pathto("_static/bookcover.png", 1) }}"/></p> -->
|
||||
|
||||
|
||||
|
@ -31,7 +31,8 @@ This is the current list of contributed extensions in that repository:
|
||||
- actdiag: embed activity diagrams by using actdiag_
|
||||
- adadomain: an extension for Ada support (Sphinx 1.0 needed)
|
||||
- ansi: parse ANSI color sequences inside documents
|
||||
- argdoc: automatically generate documentation for command-line arguments, descriptions, and help text
|
||||
- argdoc: automatically generate documentation for command-line arguments,
|
||||
descriptions and help text
|
||||
- astah: embed diagram by using astah
|
||||
- autoanysrc: Gather reST documentation from any source files
|
||||
- autorun: Execute code in a ``runblock`` directive
|
||||
@ -64,7 +65,8 @@ This is the current list of contributed extensions in that repository:
|
||||
- imgur: embed Imgur images, albums, and metadata in documents
|
||||
- inlinesyntaxhighlight_: inline syntax highlighting
|
||||
- lassodomain: a domain for documenting Lasso_ source code
|
||||
- libreoffice: an extension to include any drawing supported by LibreOffice (e.g. odg, vsd, ...)
|
||||
- libreoffice: an extension to include any drawing supported by LibreOffice
|
||||
(e.g. odg, vsd, ...)
|
||||
- lilypond: an extension inserting music scripts from Lilypond_ in PNG format
|
||||
- makedomain_: a domain for `GNU Make`_
|
||||
- matlabdomain: document MATLAB_ code
|
||||
@ -100,8 +102,8 @@ This is the current list of contributed extensions in that repository:
|
||||
- zopeext: provide an ``autointerface`` directive for using `Zope interfaces`_
|
||||
|
||||
|
||||
See the :doc:`extension tutorials <../development/tutorials/index>` on getting started with writing your
|
||||
own extensions.
|
||||
See the :doc:`extension tutorials <../development/tutorials/index>` on getting
|
||||
started with writing your own extensions.
|
||||
|
||||
|
||||
.. _aafigure: https://launchpad.net/aafigure
|
||||
|
@ -145,7 +145,7 @@ Sphinx core events
|
||||
------------------
|
||||
|
||||
These events are known to the core. The arguments shown are given to the
|
||||
registered event handlers. Use :meth:`.connect` in an extension's ``setup``
|
||||
registered event handlers. Use :meth:`.Sphinx.connect` in an extension's ``setup``
|
||||
function (note that ``conf.py`` can also have a ``setup`` function) to connect
|
||||
handlers to the events. Example:
|
||||
|
||||
|
@ -38,3 +38,8 @@ Builder API
|
||||
.. automethod:: write_doc
|
||||
.. automethod:: finish
|
||||
|
||||
**Attributes**
|
||||
|
||||
.. attribute:: events
|
||||
|
||||
An :class:`.EventManager` object.
|
||||
|
@ -31,6 +31,11 @@ The following is a list of deprecated interfaces.
|
||||
- 4.0
|
||||
- N/A
|
||||
|
||||
* - ``sphinx.builders._epub_base.EpubBuilder.esc()``
|
||||
- 2.1
|
||||
- 4.0
|
||||
- ``html.escape()``
|
||||
|
||||
* - ``sphinx.directives.Acks``
|
||||
- 2.1
|
||||
- 4.0
|
||||
@ -116,10 +121,55 @@ The following is a list of deprecated interfaces.
|
||||
- 4.0
|
||||
- ``sphinx.directives.other.VersionChange``
|
||||
|
||||
* - ``sphinx.domains.python.PyClassmember``
|
||||
- 2.1
|
||||
- 4.0
|
||||
- ``sphinx.domains.python.PyAttribute``,
|
||||
``sphinx.domains.python.PyMethod``,
|
||||
``sphinx.domains.python.PyClassMethod``,
|
||||
``sphinx.domains.python.PyObject`` and
|
||||
``sphinx.domains.python.PyStaticMethod``
|
||||
|
||||
* - ``sphinx.domains.python.PyModulelevel``
|
||||
- 2.1
|
||||
- 4.0
|
||||
- ``sphinx.domains.python.PyFunction``,
|
||||
``sphinx.domains.python.PyObject`` and
|
||||
``sphinx.domains.python.PyVariable``
|
||||
|
||||
* - ``sphinx.domains.std.StandardDomain._resolve_citation_xref()``
|
||||
- 2.1
|
||||
- 4.0
|
||||
- ``sphinx.domains.citation.CitationDomain.resolve_xref()``
|
||||
|
||||
* - ``sphinx.domains.std.StandardDomain.note_citations()``
|
||||
- 2.1
|
||||
- 4.0
|
||||
- ``sphinx.domains.citation.CitationDomain.note_citation()``
|
||||
|
||||
* - ``sphinx.domains.std.StandardDomain.note_citation_refs()``
|
||||
- 2.1
|
||||
- 4.0
|
||||
- ``sphinx.domains.citation.CitationDomain.note_citation_reference()``
|
||||
|
||||
* - ``sphinx.domains.std.StandardDomain.note_labels()``
|
||||
- 2.1
|
||||
- 4.0
|
||||
- ``sphinx.domains.std.StandardDomain.process_doc()``
|
||||
|
||||
* - ``sphinx.environment.NoUri``
|
||||
- 2.1
|
||||
- 4.0
|
||||
- ``sphinx.errors.NoUri``
|
||||
* - ``sphinx.ext.apidoc.format_directive()``
|
||||
- 2.1
|
||||
- 4.0
|
||||
- N/A
|
||||
|
||||
* - ``sphinx.ext.apidoc.format_heading()``
|
||||
- 2.1
|
||||
- 4.0
|
||||
- N/A
|
||||
|
||||
* - ``sphinx.ext.autodoc.importer.MockFinder``
|
||||
- 2.1
|
||||
@ -141,6 +191,31 @@ The following is a list of deprecated interfaces.
|
||||
- 4.0
|
||||
- ``sphinx.ext.autosummary.AutoLink``
|
||||
|
||||
* - ``sphinx.ext.imgmath.DOC_BODY``
|
||||
- 2.1
|
||||
- 4.0
|
||||
- N/A
|
||||
|
||||
* - ``sphinx.ext.imgmath.DOC_BODY_PREVIEW``
|
||||
- 2.1
|
||||
- 4.0
|
||||
- N/A
|
||||
|
||||
* - ``sphinx.ext.imgmath.DOC_HEAD``
|
||||
- 2.1
|
||||
- 4.0
|
||||
- N/A
|
||||
|
||||
* - ``sphinx.transforms.CitationReferences``
|
||||
- 2.1
|
||||
- 4.0
|
||||
- ``sphinx.domains.citation.CitationReferenceTransform``
|
||||
|
||||
* - ``sphinx.transforms.SmartQuotesSkipper``
|
||||
- 2.1
|
||||
- 4.0
|
||||
- ``sphinx.domains.citation.CitationDefinitionTransform``
|
||||
|
||||
* - ``sphinx.util.docfields.DocFieldTransformer.preprocess_fieldtypes()``
|
||||
- 2.1
|
||||
- 4.0
|
||||
|
@ -27,6 +27,10 @@ Build environment API
|
||||
|
||||
Directory for storing pickled doctrees.
|
||||
|
||||
.. attribute:: events
|
||||
|
||||
An :class:`.EventManager` object.
|
||||
|
||||
.. attribute:: found_docs
|
||||
|
||||
A set of all existing docnames.
|
||||
|
@ -97,7 +97,8 @@ extension. These are:
|
||||
|
||||
The config is available as ``app.config`` or ``env.config``.
|
||||
|
||||
To see an example of use of these objects, refer to :doc:`../development/tutorials/index`.
|
||||
To see an example of use of these objects, refer to
|
||||
:doc:`../development/tutorials/index`.
|
||||
|
||||
.. _build-phases:
|
||||
|
||||
|
@ -147,5 +147,6 @@ return ``node.children`` from the Directive.
|
||||
|
||||
.. seealso::
|
||||
|
||||
`Creating directives <http://docutils.sourceforge.net/docs/howto/rst-directives.html>`_
|
||||
HOWTO of the Docutils documentation
|
||||
`Creating directives`_ HOWTO of the Docutils documentation
|
||||
|
||||
.. _Creating directives: http://docutils.sourceforge.net/docs/howto/rst-directives.html
|
||||
|
@ -29,3 +29,9 @@ components (e.g. :class:`.Config`, :class:`.BuildEnvironment` and so on) easily.
|
||||
|
||||
.. autoclass:: sphinx.transforms.post_transforms.images.ImageConverter
|
||||
:members:
|
||||
|
||||
Utility components
|
||||
------------------
|
||||
|
||||
.. autoclass:: sphinx.events.EventManager
|
||||
:members:
|
||||
|
@ -51,11 +51,11 @@ Using Sphinx with...
|
||||
--------------------
|
||||
|
||||
Read the Docs
|
||||
https://readthedocs.org is a documentation hosting service based around
|
||||
`Read the Docs <https://readthedocs.org>`_ is a documentation hosting service based around
|
||||
Sphinx. They will host sphinx documentation, along with supporting a number
|
||||
of other features including version support, PDF generation, and more. The
|
||||
`Getting Started
|
||||
<https://read-the-docs.readthedocs.io/en/latest/getting_started.html>`_
|
||||
<https://docs.readthedocs.io/en/stable/intro/getting-started-with-sphinx.html>`_
|
||||
guide is a good place to start.
|
||||
|
||||
Epydoc
|
||||
|
@ -354,8 +354,8 @@ are in HTML form), these variables are also available:
|
||||
|
||||
.. data:: body
|
||||
|
||||
A string containing the content of the page in HTML form as produced by the HTML builder,
|
||||
before the theme is applied.
|
||||
A string containing the content of the page in HTML form as produced by the
|
||||
HTML builder, before the theme is applied.
|
||||
|
||||
.. data:: display_toc
|
||||
|
||||
@ -382,8 +382,9 @@ are in HTML form), these variables are also available:
|
||||
|
||||
.. data:: page_source_suffix
|
||||
|
||||
The suffix of the file that was rendered. Since we support a list of :confval:`source_suffix`,
|
||||
this will allow you to properly link to the original source file.
|
||||
The suffix of the file that was rendered. Since we support a list of
|
||||
:confval:`source_suffix`, this will allow you to properly link to the
|
||||
original source file.
|
||||
|
||||
.. data:: parents
|
||||
|
||||
|
@ -387,14 +387,17 @@ There are also config values that you can set:
|
||||
|
||||
The supported options are ``'members'``, ``'member-order'``,
|
||||
``'undoc-members'``, ``'private-members'``, ``'special-members'``,
|
||||
``'inherited-members'``, ``'show-inheritance'``, ``'ignore-module-all'`` and
|
||||
``'exclude-members'``.
|
||||
``'inherited-members'``, ``'show-inheritance'``, ``'ignore-module-all'``,
|
||||
``'imported-members'`` and ``'exclude-members'``.
|
||||
|
||||
.. versionadded:: 1.8
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
Accepts ``True`` as a value.
|
||||
|
||||
.. versionchanged:: 2.1
|
||||
Added ``'imported-members'``.
|
||||
|
||||
.. confval:: autodoc_docstring_signature
|
||||
|
||||
Functions imported from C modules cannot be introspected, and therefore the
|
||||
|
@ -165,6 +165,16 @@ also use these config values:
|
||||
:confval:`autodoc_mock_imports` for more details. It defaults to
|
||||
:confval:`autodoc_mock_imports`.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
.. confval:: autosummary_imported_members
|
||||
|
||||
A boolean flag indicating whether to document classes and functions imported
|
||||
in modules. Default is ``False``
|
||||
|
||||
.. versionadded:: 2.1
|
||||
|
||||
|
||||
Customizing templates
|
||||
---------------------
|
||||
|
||||
|
@ -8,6 +8,11 @@
|
||||
|
||||
This extension is quite simple, and features only one directive:
|
||||
|
||||
.. warning::
|
||||
|
||||
This directive is designed to control only content of document. It could
|
||||
not control sections, labels and so on.
|
||||
|
||||
.. rst:directive:: ifconfig
|
||||
|
||||
Include content of the directive only if the Python expression given as an
|
||||
|
@ -15,7 +15,8 @@ Math support for HTML outputs in Sphinx
|
||||
So mathbase extension is no longer needed.
|
||||
|
||||
Since mathematical notation isn't natively supported by HTML in any way, Sphinx
|
||||
gives a math support to HTML document with several extensions.
|
||||
gives a math support to HTML document with several extensions. These use the
|
||||
reStructuredText math :rst:dir:`directive <math>` and :rst:role:`role <math>`.
|
||||
|
||||
:mod:`sphinx.ext.imgmath` -- Render math as images
|
||||
--------------------------------------------------
|
||||
@ -29,13 +30,39 @@ This extension renders math via LaTeX and dvipng_ or dvisvgm_ into PNG or SVG
|
||||
images. This of course means that the computer where the docs are built must
|
||||
have both programs available.
|
||||
|
||||
There are various config values you can set to influence how the images are
|
||||
built:
|
||||
There are various configuration values you can set to influence how the images
|
||||
are built:
|
||||
|
||||
.. confval:: imgmath_image_format
|
||||
|
||||
The output image format. The default is ``'png'``. It should be either
|
||||
``'png'`` or ``'svg'``.
|
||||
The output image format. The default is ``'png'``. It should be either
|
||||
``'png'`` or ``'svg'``. The image is produced by first executing ``latex``
|
||||
on the TeX mathematical mark-up then (depending on the requested format)
|
||||
either `dvipng`_ or `dvisvgm`_.
|
||||
|
||||
.. confval:: imgmath_use_preview
|
||||
|
||||
``dvipng`` and ``dvisvgm`` both have the ability to collect from LaTeX the
|
||||
"depth" of the rendered math: an inline image should use this "depth" in a
|
||||
``vertical-align`` style to get correctly aligned with surrounding text.
|
||||
|
||||
This mechanism requires the `LaTeX preview package`_ (available as
|
||||
``preview-latex-style`` on Ubuntu xenial). Therefore, the default for this
|
||||
option is ``False`` but it is strongly recommended to set it to ``True``.
|
||||
|
||||
.. versionchanged:: 2.1
|
||||
|
||||
This option can be used with the ``'svg'`` :confval:`imgmath_image_format`.
|
||||
|
||||
.. confval:: imgmath_add_tooltips
|
||||
|
||||
Default: ``True``. If false, do not add the LaTeX code as an "alt" attribute
|
||||
for math images.
|
||||
|
||||
.. confval:: imgmath_font_size
|
||||
|
||||
The font size (in ``pt``) of the displayed math. The default value is
|
||||
``12``. It must be a positive integer.
|
||||
|
||||
.. confval:: imgmath_latex
|
||||
|
||||
@ -53,20 +80,6 @@ built:
|
||||
This value should only contain the path to the latex executable, not further
|
||||
arguments; use :confval:`imgmath_latex_args` for that purpose.
|
||||
|
||||
.. confval:: imgmath_dvipng
|
||||
|
||||
The command name with which to invoke ``dvipng``. The default is
|
||||
``'dvipng'``; you may need to set this to a full path if ``dvipng`` is not in
|
||||
the executable search path. This option is only used when
|
||||
``imgmath_image_format`` is set to ``'png'``.
|
||||
|
||||
.. confval:: imgmath_dvisvgm
|
||||
|
||||
The command name with which to invoke ``dvisvgm``. The default is
|
||||
``'dvisvgm'``; you may need to set this to a full path if ``dvisvgm`` is not
|
||||
in the executable search path. This option is only used when
|
||||
``imgmath_image_format`` is ``'svg'``.
|
||||
|
||||
.. confval:: imgmath_latex_args
|
||||
|
||||
Additional arguments to give to latex, as a list. The default is an empty
|
||||
@ -74,48 +87,43 @@ built:
|
||||
|
||||
.. confval:: imgmath_latex_preamble
|
||||
|
||||
Additional LaTeX code to put into the preamble of the short LaTeX files that
|
||||
are used to translate the math snippets. This is empty by default. Use it
|
||||
e.g. to add more packages whose commands you want to use in the math.
|
||||
Additional LaTeX code to put into the preamble of the LaTeX files used to
|
||||
translate the math snippets. This is left empty by default. Use it
|
||||
e.g. to add packages which modify the fonts used for math, such as
|
||||
``'\\usepackage{newtxsf}'`` for sans-serif fonts, or
|
||||
``'\\usepackage{fouriernc}'`` for serif fonts. Indeed, the default LaTeX
|
||||
math fonts have rather thin glyphs which (in HTML output) often do not
|
||||
match well with the font for text.
|
||||
|
||||
.. confval:: imgmath_dvipng
|
||||
|
||||
The command name to invoke ``dvipng``. The default is
|
||||
``'dvipng'``; you may need to set this to a full path if ``dvipng`` is not in
|
||||
the executable search path. This option is only used when
|
||||
``imgmath_image_format`` is set to ``'png'``.
|
||||
|
||||
.. confval:: imgmath_dvipng_args
|
||||
|
||||
Additional arguments to give to dvipng, as a list. The default value is
|
||||
``['-gamma', '1.5', '-D', '110', '-bg', 'Transparent']`` which makes the
|
||||
image a bit darker and larger then it is by default, and produces PNGs with a
|
||||
image a bit darker and larger then it is by default (this compensates
|
||||
somewhat for the thinness of default LaTeX math fonts), and produces PNGs with a
|
||||
transparent background. This option is used only when
|
||||
``imgmath_image_format`` is ``'png'``.
|
||||
|
||||
.. confval:: imgmath_dvisvgm
|
||||
|
||||
The command name to invoke ``dvisvgm``. The default is
|
||||
``'dvisvgm'``; you may need to set this to a full path if ``dvisvgm`` is not
|
||||
in the executable search path. This option is only used when
|
||||
``imgmath_image_format`` is ``'svg'``.
|
||||
|
||||
.. confval:: imgmath_dvisvgm_args
|
||||
|
||||
Additional arguments to give to dvisvgm, as a list. The default value is
|
||||
``['--no-fonts']``. This option is used only when ``imgmath_image_format``
|
||||
is ``'svg'``.
|
||||
|
||||
.. confval:: imgmath_use_preview
|
||||
|
||||
``dvipng`` has the ability to determine the "depth" of the rendered text: for
|
||||
example, when typesetting a fraction inline, the baseline of surrounding text
|
||||
should not be flush with the bottom of the image, rather the image should
|
||||
extend a bit below the baseline. This is what TeX calls "depth". When this
|
||||
is enabled, the images put into the HTML document will get a
|
||||
``vertical-align`` style that correctly aligns the baselines.
|
||||
|
||||
Unfortunately, this only works when the `preview-latex package`_ is
|
||||
installed. Therefore, the default for this option is ``False``.
|
||||
|
||||
Currently this option is only used when ``imgmath_image_format`` is
|
||||
``'png'``.
|
||||
|
||||
.. confval:: imgmath_add_tooltips
|
||||
|
||||
Default: ``True``. If false, do not add the LaTeX code as an "alt" attribute
|
||||
for math images.
|
||||
|
||||
.. confval:: imgmath_font_size
|
||||
|
||||
The font size (in ``pt``) of the displayed math. The default value is
|
||||
``12``. It must be a positive integer.
|
||||
Additional arguments to give to dvisvgm, as a list. The default value is
|
||||
``['--no-fonts']``, which means that ``dvisvgm`` will render glyphs as path
|
||||
elements (cf the `dvisvgm FAQ`_). This option is used only when
|
||||
``imgmath_image_format`` is ``'svg'``.
|
||||
|
||||
|
||||
:mod:`sphinx.ext.mathjax` -- Render math via JavaScript
|
||||
@ -131,7 +139,13 @@ MathJax_ is then loaded and transforms the LaTeX markup to readable math live in
|
||||
the browser.
|
||||
|
||||
Because MathJax (and the necessary fonts) is very large, it is not included in
|
||||
Sphinx.
|
||||
Sphinx but is set to automatically include it from a third-party site.
|
||||
|
||||
.. attention::
|
||||
|
||||
You should use the math :rst:dir:`directive <math>` and
|
||||
:rst:role:`role <math>`, not the native MathJax ``$$``, ``\(``, etc.
|
||||
|
||||
|
||||
.. confval:: mathjax_path
|
||||
|
||||
@ -140,8 +154,9 @@ Sphinx.
|
||||
|
||||
The default is the ``https://`` URL that loads the JS files from the
|
||||
`cdnjs`__ Content Delivery Network. See the `MathJax Getting Started
|
||||
page`__ for details. If you want MathJax to be available offline, you have
|
||||
to download it and set this value to a different path.
|
||||
page`__ for details. If you want MathJax to be available offline or
|
||||
without including resources from a third-party site, you have to
|
||||
download it and set this value to a different path.
|
||||
|
||||
__ https://cdnjs.com
|
||||
|
||||
@ -209,7 +224,8 @@ package jsMath_. It provides this config value:
|
||||
|
||||
|
||||
.. _dvipng: https://savannah.nongnu.org/projects/dvipng/
|
||||
.. _dvisvgm: http://dvisvgm.bplaced.net/
|
||||
.. _dvisvgm: https://dvisvgm.de/
|
||||
.. _dvisvgm FAQ: https://dvisvgm.de/FAQ
|
||||
.. _MathJax: https://www.mathjax.org/
|
||||
.. _jsMath: http://www.math.union.edu/~dpvc/jsmath/
|
||||
.. _preview-latex package: https://www.gnu.org/software/auctex/preview-latex.html
|
||||
.. _LaTeX preview package: https://www.gnu.org/software/auctex/preview-latex.html
|
||||
|
@ -169,6 +169,13 @@ The following directives are provided for module and class contents:
|
||||
This information can (in any ``py`` directive) optionally be given in a
|
||||
structured form, see :ref:`info-field-lists`.
|
||||
|
||||
The ``async`` option can be given (with no value) to indicate the function is
|
||||
an async method.
|
||||
|
||||
.. versionchanged:: 2.1
|
||||
|
||||
``:async:`` option added.
|
||||
|
||||
.. rst:directive:: .. py:data:: name
|
||||
|
||||
Describes global data in a module, including both variables and values used
|
||||
@ -216,6 +223,20 @@ The following directives are provided for module and class contents:
|
||||
described for ``function``. See also :ref:`signatures` and
|
||||
:ref:`info-field-lists`.
|
||||
|
||||
The ``async`` option can be given (with no value) to indicate the method is
|
||||
an async method.
|
||||
|
||||
The ``classmethod`` option and ``staticmethod`` option can be given (with
|
||||
no value) to indicate the method is a class method (or a static method).
|
||||
|
||||
The ``property`` option can be given (with no value) to indicate the method
|
||||
is a property.
|
||||
|
||||
.. versionchanged:: 2.1
|
||||
|
||||
``:async:``, ``:classmethod:``, ``:property:`` and ``:staticmethod:``
|
||||
options added.
|
||||
|
||||
.. rst:directive:: .. py:staticmethod:: name(parameters)
|
||||
|
||||
Like :rst:dir:`py:method`, but indicates that the method is a static method.
|
||||
@ -1062,15 +1083,16 @@ These roles link to the given declaration types:
|
||||
|
||||
.. admonition:: Note on References with Templates Parameters/Arguments
|
||||
|
||||
These roles follow the Sphinx :ref:`xref-syntax` rules. This means care must be
|
||||
taken when referencing a (partial) template specialization, e.g. if the link looks like
|
||||
this: ``:cpp:class:`MyClass<int>```.
|
||||
These roles follow the Sphinx :ref:`xref-syntax` rules. This means care must
|
||||
be taken when referencing a (partial) template specialization, e.g. if the
|
||||
link looks like this: ``:cpp:class:`MyClass<int>```.
|
||||
This is interpreted as a link to ``int`` with a title of ``MyClass``.
|
||||
In this case, escape the opening angle bracket with a backslash,
|
||||
like this: ``:cpp:class:`MyClass\<int>```.
|
||||
|
||||
When a custom title is not needed it may be useful to use the roles for inline expressions,
|
||||
:rst:role:`cpp:expr` and :rst:role:`cpp:texpr`, where angle brackets do not need escaping.
|
||||
When a custom title is not needed it may be useful to use the roles for
|
||||
inline expressions, :rst:role:`cpp:expr` and :rst:role:`cpp:texpr`, where
|
||||
angle brackets do not need escaping.
|
||||
|
||||
Declarations without template parameters and template arguments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@ -1402,6 +1424,43 @@ The reStructuredText domain (name **rst**) provides the following directives:
|
||||
|
||||
Bar description.
|
||||
|
||||
.. rst:directive:: .. rst:directive:option:: name
|
||||
|
||||
Describes an option for reST directive. The *name* can be a single option
|
||||
name or option name with arguments which separated with colon (``:``).
|
||||
For example::
|
||||
|
||||
.. rst:directive:: toctree
|
||||
|
||||
.. rst:directive:option:: caption: caption of ToC
|
||||
|
||||
.. rst:directive:option:: glob
|
||||
|
||||
will be rendered as:
|
||||
|
||||
.. rst:directive:: toctree
|
||||
:noindex:
|
||||
|
||||
.. rst:directive:option:: caption: caption of ToC
|
||||
|
||||
.. rst:directive:option:: glob
|
||||
|
||||
.. rubric:: options
|
||||
|
||||
.. rst:directive:option:: type
|
||||
:type: description for the option of directive
|
||||
|
||||
Describe the type of option value.
|
||||
|
||||
For example::
|
||||
|
||||
.. rst:directive:: toctree
|
||||
|
||||
.. rst:directive:option:: maxdepth
|
||||
:type: integer or no value
|
||||
|
||||
.. versionadded:: 2.1
|
||||
|
||||
.. rst:directive:: .. rst:role:: name
|
||||
|
||||
Describes a reST role. For example::
|
||||
|
@ -55,6 +55,11 @@ strict_optional = False
|
||||
filterwarnings =
|
||||
all
|
||||
ignore::DeprecationWarning:docutils.io
|
||||
markers =
|
||||
sphinx
|
||||
apidoc
|
||||
setup_command
|
||||
test_params
|
||||
|
||||
[coverage:run]
|
||||
branch = True
|
||||
|
3
setup.py
3
setup.py
@ -38,6 +38,9 @@ extras_require = {
|
||||
':sys_platform=="win32"': [
|
||||
'colorama>=0.3.5',
|
||||
],
|
||||
'docs': [
|
||||
'sphinxcontrib-websupport',
|
||||
],
|
||||
'test': [
|
||||
'pytest',
|
||||
'pytest-cov',
|
||||
|
@ -23,7 +23,6 @@ from docutils.parsers.rst import Directive, roles
|
||||
import sphinx
|
||||
from sphinx import package_dir, locale
|
||||
from sphinx.config import Config
|
||||
from sphinx.config import CONFIG_FILENAME # NOQA # for compatibility (RemovedInSphinx30)
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||
from sphinx.environment import BuildEnvironment
|
||||
from sphinx.errors import ApplicationError, ConfigError, VersionRequirementError
|
||||
@ -73,6 +72,7 @@ builtin_extensions = (
|
||||
'sphinx.config',
|
||||
'sphinx.domains.c',
|
||||
'sphinx.domains.changeset',
|
||||
'sphinx.domains.citation',
|
||||
'sphinx.domains.cpp',
|
||||
'sphinx.domains.javascript',
|
||||
'sphinx.domains.math',
|
||||
@ -182,7 +182,7 @@ class Sphinx:
|
||||
self.warningiserror = warningiserror
|
||||
logging.setup(self, self._status, self._warning)
|
||||
|
||||
self.events = EventManager()
|
||||
self.events = EventManager(self)
|
||||
|
||||
# keep last few messages for traceback
|
||||
# This will be filled by sphinx.util.logging.LastMessagesWriter
|
||||
@ -249,7 +249,7 @@ class Sphinx:
|
||||
|
||||
# now that we know all config values, collect them from conf.py
|
||||
self.config.init_values()
|
||||
self.emit('config-inited', self.config)
|
||||
self.events.emit('config-inited', self.config)
|
||||
|
||||
# create the project
|
||||
self.project = Project(self.srcdir, self.config.source_suffix)
|
||||
@ -319,7 +319,7 @@ class Sphinx:
|
||||
# type: () -> None
|
||||
self.builder.set_environment(self.env)
|
||||
self.builder.init()
|
||||
self.emit('builder-inited')
|
||||
self.events.emit('builder-inited')
|
||||
|
||||
# ---- main "build" method -------------------------------------------------
|
||||
|
||||
@ -360,10 +360,10 @@ class Sphinx:
|
||||
envfile = path.join(self.doctreedir, ENV_PICKLE_FILENAME)
|
||||
if path.isfile(envfile):
|
||||
os.unlink(envfile)
|
||||
self.emit('build-finished', err)
|
||||
self.events.emit('build-finished', err)
|
||||
raise
|
||||
else:
|
||||
self.emit('build-finished', None)
|
||||
self.events.emit('build-finished', None)
|
||||
self.builder.cleanup()
|
||||
|
||||
# ---- general extensibility interface -------------------------------------
|
||||
@ -420,13 +420,7 @@ class Sphinx:
|
||||
Return the return values of all callbacks as a list. Do not emit core
|
||||
Sphinx events in extensions!
|
||||
"""
|
||||
try:
|
||||
logger.debug('[app] emitting event: %r%s', event, repr(args)[:100])
|
||||
except Exception:
|
||||
# not every object likes to be repr()'d (think
|
||||
# random stuff coming via autodoc)
|
||||
pass
|
||||
return self.events.emit(event, self, *args)
|
||||
return self.events.emit(event, *args)
|
||||
|
||||
def emit_firstresult(self, event, *args):
|
||||
# type: (str, Any) -> Any
|
||||
@ -436,7 +430,7 @@ class Sphinx:
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
return self.events.emit_firstresult(event, self, *args)
|
||||
return self.events.emit_firstresult(event, *args)
|
||||
|
||||
# registering addon parts
|
||||
|
||||
|
@ -43,6 +43,7 @@ if False:
|
||||
from sphinx.application import Sphinx # NOQA
|
||||
from sphinx.config import Config # NOQA
|
||||
from sphinx.environment import BuildEnvironment # NOQA
|
||||
from sphinx.events import EventManager # NOQA
|
||||
from sphinx.util.i18n import CatalogInfo # NOQA
|
||||
from sphinx.util.tags import Tags # NOQA
|
||||
|
||||
@ -93,6 +94,7 @@ class Builder:
|
||||
|
||||
self.app = app # type: Sphinx
|
||||
self.env = None # type: BuildEnvironment
|
||||
self.events = app.events # type: EventManager
|
||||
self.config = app.config # type: Config
|
||||
self.tags = app.tags # type: Tags
|
||||
self.tags.add(self.format)
|
||||
@ -399,7 +401,7 @@ class Builder:
|
||||
added, changed, removed = self.env.get_outdated_files(updated)
|
||||
|
||||
# allow user intervention as well
|
||||
for docs in self.app.emit('env-get-outdated', self, added, changed, removed):
|
||||
for docs in self.events.emit('env-get-outdated', self, added, changed, removed):
|
||||
changed.update(set(docs) & self.env.found_docs)
|
||||
|
||||
# if files were added or removed, all documents with globbed toctrees
|
||||
@ -416,13 +418,13 @@ class Builder:
|
||||
|
||||
# clear all files no longer present
|
||||
for docname in removed:
|
||||
self.app.emit('env-purge-doc', self.env, docname)
|
||||
self.events.emit('env-purge-doc', self.env, docname)
|
||||
self.env.clear_doc(docname)
|
||||
|
||||
# read all new and changed files
|
||||
docnames = sorted(added | changed)
|
||||
# allow changing and reordering the list of docs to read
|
||||
self.app.emit('env-before-read-docs', self.env, docnames)
|
||||
self.events.emit('env-before-read-docs', self.env, docnames)
|
||||
|
||||
# check if we should do parallel or serial read
|
||||
if parallel_available and len(docnames) > 5 and self.app.parallel > 1:
|
||||
@ -439,7 +441,7 @@ class Builder:
|
||||
raise SphinxError('master file %s not found' %
|
||||
self.env.doc2path(self.config.master_doc))
|
||||
|
||||
for retval in self.app.emit('env-updated', self.env):
|
||||
for retval in self.events.emit('env-updated', self.env):
|
||||
if retval is not None:
|
||||
docnames.extend(retval)
|
||||
|
||||
@ -453,7 +455,7 @@ class Builder:
|
||||
for docname in status_iterator(docnames, __('reading sources... '), "purple",
|
||||
len(docnames), self.app.verbosity):
|
||||
# remove all inventory entries for that file
|
||||
self.app.emit('env-purge-doc', self.env, docname)
|
||||
self.events.emit('env-purge-doc', self.env, docname)
|
||||
self.env.clear_doc(docname)
|
||||
self.read_doc(docname)
|
||||
|
||||
@ -461,7 +463,7 @@ class Builder:
|
||||
# type: (List[str], int) -> None
|
||||
# clear all outdated docs at once
|
||||
for docname in docnames:
|
||||
self.app.emit('env-purge-doc', self.env, docname)
|
||||
self.events.emit('env-purge-doc', self.env, docname)
|
||||
self.env.clear_doc(docname)
|
||||
|
||||
def read_process(docs):
|
||||
|
@ -8,6 +8,7 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import html
|
||||
import os
|
||||
import re
|
||||
import warnings
|
||||
@ -178,7 +179,9 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
def esc(self, name):
|
||||
# type: (str) -> str
|
||||
"""Replace all characters not allowed in text an attribute values."""
|
||||
# Like cgi.escape, but also replace apostrophe
|
||||
warnings.warn(
|
||||
'%s.esc() is deprecated. Use html.escape() instead.' % self.__class__.__name__,
|
||||
RemovedInSphinx40Warning)
|
||||
name = name.replace('&', '&')
|
||||
name = name.replace('<', '<')
|
||||
name = name.replace('>', '>')
|
||||
@ -201,8 +204,8 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
if (self.toctree_template % level) in classes:
|
||||
result.append({
|
||||
'level': level,
|
||||
'refuri': self.esc(refuri),
|
||||
'text': ssp(self.esc(doctree.astext()))
|
||||
'refuri': html.escape(refuri),
|
||||
'text': ssp(html.escape(doctree.astext()))
|
||||
})
|
||||
break
|
||||
elif isinstance(doctree, nodes.Element):
|
||||
@ -241,21 +244,21 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
"""
|
||||
refnodes.insert(0, {
|
||||
'level': 1,
|
||||
'refuri': self.esc(self.config.master_doc + self.out_suffix),
|
||||
'text': ssp(self.esc(
|
||||
'refuri': html.escape(self.config.master_doc + self.out_suffix),
|
||||
'text': ssp(html.escape(
|
||||
self.env.titles[self.config.master_doc].astext()))
|
||||
})
|
||||
for file, text in reversed(self.config.epub_pre_files):
|
||||
refnodes.insert(0, {
|
||||
'level': 1,
|
||||
'refuri': self.esc(file),
|
||||
'text': ssp(self.esc(text))
|
||||
'refuri': html.escape(file),
|
||||
'text': ssp(html.escape(text))
|
||||
})
|
||||
for file, text in self.config.epub_post_files:
|
||||
refnodes.append({
|
||||
'level': 1,
|
||||
'refuri': self.esc(file),
|
||||
'text': ssp(self.esc(text))
|
||||
'refuri': html.escape(file),
|
||||
'text': ssp(html.escape(text))
|
||||
})
|
||||
|
||||
def fix_fragment(self, prefix, fragment):
|
||||
@ -511,15 +514,15 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
file properly escaped.
|
||||
"""
|
||||
metadata = {} # type: Dict[str, Any]
|
||||
metadata['title'] = self.esc(self.config.epub_title)
|
||||
metadata['author'] = self.esc(self.config.epub_author)
|
||||
metadata['uid'] = self.esc(self.config.epub_uid)
|
||||
metadata['lang'] = self.esc(self.config.epub_language)
|
||||
metadata['publisher'] = self.esc(self.config.epub_publisher)
|
||||
metadata['copyright'] = self.esc(self.config.epub_copyright)
|
||||
metadata['scheme'] = self.esc(self.config.epub_scheme)
|
||||
metadata['id'] = self.esc(self.config.epub_identifier)
|
||||
metadata['date'] = self.esc(format_date("%Y-%m-%d"))
|
||||
metadata['title'] = html.escape(self.config.epub_title)
|
||||
metadata['author'] = html.escape(self.config.epub_author)
|
||||
metadata['uid'] = html.escape(self.config.epub_uid)
|
||||
metadata['lang'] = html.escape(self.config.epub_language)
|
||||
metadata['publisher'] = html.escape(self.config.epub_publisher)
|
||||
metadata['copyright'] = html.escape(self.config.epub_copyright)
|
||||
metadata['scheme'] = html.escape(self.config.epub_scheme)
|
||||
metadata['id'] = html.escape(self.config.epub_identifier)
|
||||
metadata['date'] = html.escape(format_date("%Y-%m-%d"))
|
||||
metadata['manifest_items'] = []
|
||||
metadata['spines'] = []
|
||||
metadata['guides'] = []
|
||||
@ -566,9 +569,9 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
type='epub', subtype='unknown_project_files')
|
||||
continue
|
||||
filename = filename.replace(os.sep, '/')
|
||||
item = ManifestItem(self.esc(filename),
|
||||
self.esc(self.make_id(filename)),
|
||||
self.esc(self.media_types[ext]))
|
||||
item = ManifestItem(html.escape(filename),
|
||||
html.escape(self.make_id(filename)),
|
||||
html.escape(self.media_types[ext]))
|
||||
metadata['manifest_items'].append(item)
|
||||
self.files.append(filename)
|
||||
|
||||
@ -579,21 +582,21 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
continue
|
||||
if refnode['refuri'] in self.ignored_files:
|
||||
continue
|
||||
spine = Spine(self.esc(self.make_id(refnode['refuri'])), True)
|
||||
spine = Spine(html.escape(self.make_id(refnode['refuri'])), True)
|
||||
metadata['spines'].append(spine)
|
||||
spinefiles.add(refnode['refuri'])
|
||||
for info in self.domain_indices:
|
||||
spine = Spine(self.esc(self.make_id(info[0] + self.out_suffix)), True)
|
||||
spine = Spine(html.escape(self.make_id(info[0] + self.out_suffix)), True)
|
||||
metadata['spines'].append(spine)
|
||||
spinefiles.add(info[0] + self.out_suffix)
|
||||
if self.use_index:
|
||||
spine = Spine(self.esc(self.make_id('genindex' + self.out_suffix)), True)
|
||||
spine = Spine(html.escape(self.make_id('genindex' + self.out_suffix)), True)
|
||||
metadata['spines'].append(spine)
|
||||
spinefiles.add('genindex' + self.out_suffix)
|
||||
# add auto generated files
|
||||
for name in self.files:
|
||||
if name not in spinefiles and name.endswith(self.out_suffix):
|
||||
spine = Spine(self.esc(self.make_id(name)), False)
|
||||
spine = Spine(html.escape(self.make_id(name)), False)
|
||||
metadata['spines'].append(spine)
|
||||
|
||||
# add the optional cover
|
||||
@ -601,18 +604,18 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
if self.config.epub_cover:
|
||||
image, html_tmpl = self.config.epub_cover
|
||||
image = image.replace(os.sep, '/')
|
||||
metadata['cover'] = self.esc(self.make_id(image))
|
||||
metadata['cover'] = html.escape(self.make_id(image))
|
||||
if html_tmpl:
|
||||
spine = Spine(self.esc(self.make_id(self.coverpage_name)), True)
|
||||
spine = Spine(html.escape(self.make_id(self.coverpage_name)), True)
|
||||
metadata['spines'].insert(0, spine)
|
||||
if self.coverpage_name not in self.files:
|
||||
ext = path.splitext(self.coverpage_name)[-1]
|
||||
self.files.append(self.coverpage_name)
|
||||
item = ManifestItem(self.esc(self.coverpage_name),
|
||||
self.esc(self.make_id(self.coverpage_name)),
|
||||
self.esc(self.media_types[ext]))
|
||||
item = ManifestItem(html.escape(self.coverpage_name),
|
||||
html.escape(self.make_id(self.coverpage_name)),
|
||||
html.escape(self.media_types[ext]))
|
||||
metadata['manifest_items'].append(item)
|
||||
ctx = {'image': self.esc(image), 'title': self.config.project}
|
||||
ctx = {'image': html.escape(image), 'title': self.config.project}
|
||||
self.handle_page(
|
||||
path.splitext(self.coverpage_name)[0], ctx, html_tmpl)
|
||||
spinefiles.add(self.coverpage_name)
|
||||
@ -628,17 +631,17 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
auto_add_cover = False
|
||||
if type == 'toc':
|
||||
auto_add_toc = False
|
||||
metadata['guides'].append(Guide(self.esc(type),
|
||||
self.esc(title),
|
||||
self.esc(uri)))
|
||||
metadata['guides'].append(Guide(html.escape(type),
|
||||
html.escape(title),
|
||||
html.escape(uri)))
|
||||
if auto_add_cover and html_tmpl:
|
||||
metadata['guides'].append(Guide('cover',
|
||||
self.guide_titles['cover'],
|
||||
self.esc(self.coverpage_name)))
|
||||
html.escape(self.coverpage_name)))
|
||||
if auto_add_toc and self.refnodes:
|
||||
metadata['guides'].append(Guide('toc',
|
||||
self.guide_titles['toc'],
|
||||
self.esc(self.refnodes[0]['refuri'])))
|
||||
html.escape(self.refnodes[0]['refuri'])))
|
||||
|
||||
# write the project file
|
||||
copy_asset_file(path.join(self.template_dir, 'content.opf_t'),
|
||||
@ -707,7 +710,7 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
"""
|
||||
metadata = {} # type: Dict[str, Any]
|
||||
metadata['uid'] = self.config.epub_uid
|
||||
metadata['title'] = self.esc(self.config.epub_title)
|
||||
metadata['title'] = html.escape(self.config.epub_title)
|
||||
metadata['level'] = level
|
||||
metadata['navpoints'] = navpoints
|
||||
return metadata
|
||||
|
@ -11,6 +11,7 @@
|
||||
from os import path
|
||||
|
||||
from sphinx.builders.html import StandaloneHTMLBuilder
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning, deprecated_alias
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.osutil import SEP, os_path
|
||||
|
||||
@ -55,6 +56,14 @@ class DirectoryHTMLBuilder(StandaloneHTMLBuilder):
|
||||
self.globalcontext['no_search_suffix'] = True
|
||||
|
||||
|
||||
# for compatibility
|
||||
deprecated_alias('sphinx.builders.html',
|
||||
{
|
||||
'DirectoryHTMLBuilder': DirectoryHTMLBuilder,
|
||||
},
|
||||
RemovedInSphinx40Warning)
|
||||
|
||||
|
||||
def setup(app):
|
||||
# type: (Sphinx) -> Dict[str, Any]
|
||||
app.setup_extension('sphinx.builders.html')
|
||||
|
@ -9,6 +9,7 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import html
|
||||
import warnings
|
||||
from collections import namedtuple
|
||||
from os import path
|
||||
@ -98,12 +99,12 @@ class Epub3Builder(_epub_base.EpubBuilder):
|
||||
writing_mode = self.config.epub_writing_mode
|
||||
|
||||
metadata = super().content_metadata()
|
||||
metadata['description'] = self.esc(self.config.epub_description)
|
||||
metadata['contributor'] = self.esc(self.config.epub_contributor)
|
||||
metadata['description'] = html.escape(self.config.epub_description)
|
||||
metadata['contributor'] = html.escape(self.config.epub_contributor)
|
||||
metadata['page_progression_direction'] = PAGE_PROGRESSION_DIRECTIONS.get(writing_mode)
|
||||
metadata['ibook_scroll_axis'] = IBOOK_SCROLL_AXIS.get(writing_mode)
|
||||
metadata['date'] = self.esc(format_date("%Y-%m-%dT%H:%M:%SZ"))
|
||||
metadata['version'] = self.esc(self.config.version)
|
||||
metadata['date'] = html.escape(format_date("%Y-%m-%dT%H:%M:%SZ"))
|
||||
metadata['version'] = html.escape(self.config.version)
|
||||
metadata['epub_version'] = self.config.epub_version
|
||||
return metadata
|
||||
|
||||
@ -166,8 +167,8 @@ class Epub3Builder(_epub_base.EpubBuilder):
|
||||
properly escaped.
|
||||
"""
|
||||
metadata = {} # type: Dict
|
||||
metadata['lang'] = self.esc(self.config.epub_language)
|
||||
metadata['toc_locale'] = self.esc(self.guide_titles['toc'])
|
||||
metadata['lang'] = html.escape(self.config.epub_language)
|
||||
metadata['toc_locale'] = html.escape(self.guide_titles['toc'])
|
||||
metadata['navlist'] = navlist
|
||||
return metadata
|
||||
|
||||
|
@ -24,7 +24,7 @@ from docutils.utils import relative_path
|
||||
|
||||
from sphinx import package_dir, __display_version__
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning, deprecated_alias
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||
from sphinx.environment.adapters.asset import ImageAdapter
|
||||
from sphinx.environment.adapters.indexentries import IndexEntries
|
||||
from sphinx.environment.adapters.toctree import TocTree
|
||||
@ -653,7 +653,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
def gen_additional_pages(self):
|
||||
# type: () -> None
|
||||
# pages from extensions
|
||||
for pagelist in self.app.emit('html-collect-pages'):
|
||||
for pagelist in self.events.emit('html-collect-pages'):
|
||||
for pagename, context, template in pagelist:
|
||||
self.handle_page(pagename, context, template)
|
||||
|
||||
@ -1187,23 +1187,9 @@ def validate_math_renderer(app):
|
||||
|
||||
|
||||
# for compatibility
|
||||
from sphinx.builders.dirhtml import DirectoryHTMLBuilder # NOQA
|
||||
from sphinx.builders.singlehtml import SingleFileHTMLBuilder # NOQA
|
||||
from sphinxcontrib.serializinghtml import ( # NOQA
|
||||
LAST_BUILD_FILENAME, JSONHTMLBuilder, PickleHTMLBuilder, SerializingHTMLBuilder
|
||||
)
|
||||
|
||||
deprecated_alias('sphinx.builders.html',
|
||||
{
|
||||
'LAST_BUILD_FILENAME': LAST_BUILD_FILENAME,
|
||||
'DirectoryHTMLBuilder': DirectoryHTMLBuilder,
|
||||
'JSONHTMLBuilder': JSONHTMLBuilder,
|
||||
'PickleHTMLBuilder': PickleHTMLBuilder,
|
||||
'SerializingHTMLBuilder': SerializingHTMLBuilder,
|
||||
'SingleFileHTMLBuilder': SingleFileHTMLBuilder,
|
||||
'WebHTMLBuilder': PickleHTMLBuilder,
|
||||
},
|
||||
RemovedInSphinx40Warning)
|
||||
import sphinx.builders.dirhtml # NOQA
|
||||
import sphinx.builders.singlehtml # NOQA
|
||||
import sphinxcontrib.serializinghtml # NOQA
|
||||
|
||||
|
||||
def setup(app):
|
||||
|
@ -24,7 +24,7 @@ if False:
|
||||
from sphinx.application import Sphinx # NOQA
|
||||
|
||||
|
||||
deprecated_alias('sphinx.builders.devhelp',
|
||||
deprecated_alias('sphinx.builders.htmlhelp',
|
||||
{
|
||||
'chm_locales': chm_locales,
|
||||
'chm_htmlescape': chm_htmlescape,
|
||||
|
@ -208,7 +208,7 @@ class LaTeXBuilder(Builder):
|
||||
self.context['indexname'] = _('Index')
|
||||
if self.config.release:
|
||||
# Show the release label only if release value exists
|
||||
self.context['releasename'] = _('Release')
|
||||
self.context.setdefault('releasename', _('Release'))
|
||||
|
||||
def init_babel(self):
|
||||
# type: () -> None
|
||||
|
@ -16,6 +16,7 @@ from sphinx import addnodes
|
||||
from sphinx.builders.latex.nodes import (
|
||||
captioned_literal_block, footnotemark, footnotetext, math_reference, thebibliography
|
||||
)
|
||||
from sphinx.domains.citation import CitationDomain
|
||||
from sphinx.transforms import SphinxTransform
|
||||
from sphinx.transforms.post_transforms import SphinxPostTransform
|
||||
from sphinx.util.nodes import NodeMatcher
|
||||
@ -545,10 +546,10 @@ class CitationReferenceTransform(SphinxPostTransform):
|
||||
|
||||
def run(self, **kwargs):
|
||||
# type: (Any) -> None
|
||||
matcher = NodeMatcher(addnodes.pending_xref, refdomain='std', reftype='citation')
|
||||
citations = self.env.get_domain('std').data['citations']
|
||||
domain = cast(CitationDomain, self.env.get_domain('citation'))
|
||||
matcher = NodeMatcher(addnodes.pending_xref, refdomain='citation', reftype='ref')
|
||||
for node in self.document.traverse(matcher): # type: addnodes.pending_xref
|
||||
docname, labelid, _ = citations.get(node['reftarget'], ('', '', 0))
|
||||
docname, labelid, _ = domain.citations.get(node['reftarget'], ('', '', 0))
|
||||
if docname:
|
||||
citation_ref = nodes.citation_reference('', '', *node.children,
|
||||
docname=docname, refname=labelid)
|
||||
|
@ -13,6 +13,7 @@ from os import path
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx.builders.html import StandaloneHTMLBuilder
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning, deprecated_alias
|
||||
from sphinx.environment.adapters.toctree import TocTree
|
||||
from sphinx.locale import __
|
||||
from sphinx.util import logging
|
||||
@ -201,6 +202,14 @@ class SingleFileHTMLBuilder(StandaloneHTMLBuilder):
|
||||
self.handle_page('opensearch', {}, 'opensearch.xml', outfilename=fn)
|
||||
|
||||
|
||||
# for compatibility
|
||||
deprecated_alias('sphinx.builders.html',
|
||||
{
|
||||
'SingleFileHTMLBuilder': SingleFileHTMLBuilder,
|
||||
},
|
||||
RemovedInSphinx40Warning)
|
||||
|
||||
|
||||
def setup(app):
|
||||
# type: (Sphinx) -> Dict[str, Any]
|
||||
app.setup_extension('sphinx.builders.html')
|
||||
|
@ -72,11 +72,19 @@ class Make:
|
||||
|
||||
def build_clean(self):
|
||||
# type: () -> int
|
||||
srcdir = path.abspath(self.srcdir)
|
||||
builddir = path.abspath(self.builddir)
|
||||
if not path.exists(self.builddir):
|
||||
return 0
|
||||
elif not path.isdir(self.builddir):
|
||||
print("Error: %r is not a directory!" % self.builddir)
|
||||
return 1
|
||||
elif srcdir == builddir:
|
||||
print("Error: %r is same as source directory!" % self.builddir)
|
||||
return 1
|
||||
elif path.commonpath([srcdir, builddir]) == builddir:
|
||||
print("Error: %r directory contains source directory!" % self.builddir)
|
||||
return 1
|
||||
print("Removing everything under %r..." % self.builddir)
|
||||
for item in os.listdir(self.builddir):
|
||||
rmtree(self.builddir_join(item))
|
||||
|
@ -166,7 +166,7 @@ class ObjectDescription(SphinxDirective):
|
||||
node['objtype'] = node['desctype'] = self.objtype
|
||||
node['noindex'] = noindex = ('noindex' in self.options)
|
||||
|
||||
self.names = [] # type: List[str]
|
||||
self.names = [] # type: List[Any]
|
||||
signatures = self.get_signatures()
|
||||
for i, sig in enumerate(signatures):
|
||||
# add a signature node for each signature in the current unit
|
||||
|
167
sphinx/domains/citation.py
Normal file
167
sphinx/domains/citation.py
Normal file
@ -0,0 +1,167 @@
|
||||
"""
|
||||
sphinx.domains.citation
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The citation domain.
|
||||
|
||||
:copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from typing import cast
|
||||
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.domains import Domain
|
||||
from sphinx.locale import __
|
||||
from sphinx.transforms import SphinxTransform
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.nodes import copy_source_info, make_refnode
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Dict, List, Set, Tuple, Union # NOQA
|
||||
from sphinx.application import Sphinx # NOQA
|
||||
from sphinx.builders import Builder # NOQA
|
||||
from sphinx.environment import BuildEnvironment # NOQA
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CitationDomain(Domain):
|
||||
"""Domain for citations."""
|
||||
|
||||
name = 'citation'
|
||||
label = 'citation'
|
||||
|
||||
dangling_warnings = {
|
||||
'ref': 'citation not found: %(target)s',
|
||||
}
|
||||
|
||||
@property
|
||||
def citations(self):
|
||||
# type: () -> Dict[str, Tuple[str, str, int]]
|
||||
return self.data.setdefault('citations', {})
|
||||
|
||||
@property
|
||||
def citation_refs(self):
|
||||
# type: () -> Dict[str, Set[str]]
|
||||
return self.data.setdefault('citation_refs', {})
|
||||
|
||||
def clear_doc(self, docname):
|
||||
# type: (str) -> None
|
||||
for key, (fn, _l, lineno) in list(self.citations.items()):
|
||||
if fn == docname:
|
||||
del self.citations[key]
|
||||
for key, docnames in list(self.citation_refs.items()):
|
||||
if docnames == {docname}:
|
||||
del self.citation_refs[key]
|
||||
elif docname in docnames:
|
||||
docnames.remove(docname)
|
||||
|
||||
def merge_domaindata(self, docnames, otherdata):
|
||||
# type: (List[str], Dict) -> None
|
||||
# XXX duplicates?
|
||||
for key, data in otherdata['citations'].items():
|
||||
if data[0] in docnames:
|
||||
self.citations[key] = data
|
||||
for key, data in otherdata['citation_refs'].items():
|
||||
citation_refs = self.citation_refs.setdefault(key, set())
|
||||
for docname in data:
|
||||
if docname in docnames:
|
||||
citation_refs.add(docname)
|
||||
|
||||
def note_citation(self, node):
|
||||
# type: (nodes.citation) -> None
|
||||
label = node[0].astext()
|
||||
if label in self.citations:
|
||||
path = self.env.doc2path(self.citations[label][0])
|
||||
logger.warning(__('duplicate citation %s, other instance in %s'), label, path,
|
||||
location=node, type='ref', subtype='citation')
|
||||
self.citations[label] = (node['docname'], node['ids'][0], node.line)
|
||||
|
||||
def note_citation_reference(self, node):
|
||||
# type: (addnodes.pending_xref) -> None
|
||||
docnames = self.citation_refs.setdefault(node['reftarget'], set())
|
||||
docnames.add(self.env.docname)
|
||||
|
||||
def check_consistency(self):
|
||||
# type: () -> None
|
||||
for name, (docname, labelid, lineno) in self.citations.items():
|
||||
if name not in self.citation_refs:
|
||||
logger.warning(__('Citation [%s] is not referenced.'), name,
|
||||
type='ref', subtype='citation', location=(docname, lineno))
|
||||
|
||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
# type: (BuildEnvironment, str, Builder, str, str, addnodes.pending_xref, nodes.Element) -> nodes.Element # NOQA
|
||||
docname, labelid, lineno = self.citations.get(target, ('', '', 0))
|
||||
if not docname:
|
||||
return None
|
||||
|
||||
return make_refnode(builder, fromdocname, docname,
|
||||
labelid, contnode)
|
||||
|
||||
def resolve_any_xref(self, env, fromdocname, builder, target, node, contnode):
|
||||
# type: (BuildEnvironment, str, Builder, str, addnodes.pending_xref, nodes.Element) -> List[Tuple[str, nodes.Element]] # NOQA
|
||||
refnode = self.resolve_xref(env, fromdocname, builder, 'ref', target, node, contnode)
|
||||
if refnode is None:
|
||||
return []
|
||||
else:
|
||||
return [('ref', refnode)]
|
||||
|
||||
|
||||
class CitationDefinitionTransform(SphinxTransform):
|
||||
"""Mark citation definition labels as not smartquoted."""
|
||||
default_priority = 619
|
||||
|
||||
def apply(self, **kwargs):
|
||||
# type: (Any) -> None
|
||||
domain = cast(CitationDomain, self.env.get_domain('citation'))
|
||||
for node in self.document.traverse(nodes.citation):
|
||||
# register citation node to domain
|
||||
node['docname'] = self.env.docname
|
||||
domain.note_citation(node)
|
||||
|
||||
# mark citation labels as not smartquoted
|
||||
label = cast(nodes.label, node[0])
|
||||
label['support_smartquotes'] = False
|
||||
|
||||
|
||||
class CitationReferenceTransform(SphinxTransform):
|
||||
"""
|
||||
Replace citation references by pending_xref nodes before the default
|
||||
docutils transform tries to resolve them.
|
||||
"""
|
||||
default_priority = 619
|
||||
|
||||
def apply(self, **kwargs):
|
||||
# type: (Any) -> None
|
||||
domain = cast(CitationDomain, self.env.get_domain('citation'))
|
||||
for node in self.document.traverse(nodes.citation_reference):
|
||||
target = node.astext()
|
||||
ref = addnodes.pending_xref(target, refdomain='citation', reftype='ref',
|
||||
reftarget=target, refwarn=True,
|
||||
support_smartquotes=False,
|
||||
ids=node["ids"],
|
||||
classes=node.get('classes', []))
|
||||
ref += nodes.inline(target, '[%s]' % target)
|
||||
copy_source_info(node, ref)
|
||||
node.replace_self(ref)
|
||||
|
||||
# register reference node to domain
|
||||
domain.note_citation_reference(ref)
|
||||
|
||||
|
||||
def setup(app):
|
||||
# type: (Sphinx) -> Dict[str, Any]
|
||||
app.add_domain(CitationDomain)
|
||||
app.add_transform(CitationDefinitionTransform)
|
||||
app.add_transform(CitationReferenceTransform)
|
||||
|
||||
return {
|
||||
'version': 'builtin',
|
||||
'env_version': 1,
|
||||
'parallel_read_safe': True,
|
||||
'parallel_write_safe': True,
|
||||
}
|
@ -6391,6 +6391,7 @@ class DefinitionParser:
|
||||
# if there are '()' left, just skip them
|
||||
self.skip_ws()
|
||||
self.skip_string('()')
|
||||
self.assert_end()
|
||||
templatePrefix = self._check_template_consistency(name, templatePrefix,
|
||||
fullSpecShorthand=True)
|
||||
res1 = ASTNamespace(name, templatePrefix)
|
||||
@ -6403,6 +6404,7 @@ class DefinitionParser:
|
||||
# if there are '()' left, just skip them
|
||||
self.skip_ws()
|
||||
self.skip_string('()')
|
||||
self.assert_end()
|
||||
return res2, False
|
||||
except DefinitionError as e2:
|
||||
errs = []
|
||||
@ -7145,7 +7147,6 @@ class CPPDomain(Domain):
|
||||
parser = DefinitionParser(target, warner, env.config)
|
||||
try:
|
||||
ast, isShorthand = parser.parse_xref_object()
|
||||
parser.assert_end()
|
||||
except DefinitionError as e:
|
||||
def findWarning(e): # as arg to stop flake8 from complaining
|
||||
if typ != 'any' and typ != 'func':
|
||||
@ -7154,7 +7155,6 @@ class CPPDomain(Domain):
|
||||
parser2 = DefinitionParser(target[:-2], warner, env.config)
|
||||
try:
|
||||
parser2.parse_xref_object()
|
||||
parser2.assert_end()
|
||||
except DefinitionError as e2:
|
||||
return target[:-2], e2
|
||||
# strange, that we don't get the error now, use the original
|
||||
|
@ -9,11 +9,14 @@
|
||||
"""
|
||||
|
||||
import re
|
||||
import warnings
|
||||
from typing import cast
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import directives
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||
from sphinx.directives import ObjectDescription
|
||||
from sphinx.domains import Domain, ObjType, Index, IndexEntry
|
||||
from sphinx.locale import _, __
|
||||
@ -309,14 +312,13 @@ class PyObject(ObjectDescription):
|
||||
return fullname, prefix
|
||||
|
||||
def get_index_text(self, modname, name):
|
||||
# type: (str, str) -> str
|
||||
# type: (str, Tuple[str, str]) -> str
|
||||
"""Return the text for the index entry of the object."""
|
||||
raise NotImplementedError('must be implemented in subclasses')
|
||||
|
||||
def add_target_and_index(self, name_cls, sig, signode):
|
||||
# type: (str, str, addnodes.desc_signature) -> None
|
||||
modname = self.options.get(
|
||||
'module', self.env.ref_context.get('py:module'))
|
||||
# type: (Tuple[str, str], str, addnodes.desc_signature) -> None
|
||||
modname = self.options.get('module', self.env.ref_context.get('py:module'))
|
||||
fullname = (modname and modname + '.' or '') + name_cls[0]
|
||||
# note target
|
||||
if fullname not in self.state.document.ids:
|
||||
@ -324,15 +326,9 @@ class PyObject(ObjectDescription):
|
||||
signode['ids'].append(fullname)
|
||||
signode['first'] = (not self.names)
|
||||
self.state.document.note_explicit_target(signode)
|
||||
objects = self.env.domaindata['py']['objects']
|
||||
if fullname in objects:
|
||||
self.state_machine.reporter.warning(
|
||||
'duplicate object description of %s, ' % fullname +
|
||||
'other instance in ' +
|
||||
self.env.doc2path(objects[fullname][0]) +
|
||||
', use :noindex: for one of them',
|
||||
line=self.lineno)
|
||||
objects[fullname] = (self.env.docname, self.objtype)
|
||||
|
||||
domain = cast(PythonDomain, self.env.get_domain('py'))
|
||||
domain.note_object(fullname, self.objtype)
|
||||
|
||||
indextext = self.get_index_text(modname, name_cls)
|
||||
if indextext:
|
||||
@ -405,12 +401,19 @@ class PyModulelevel(PyObject):
|
||||
Description of an object on module level (functions, data).
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
# type: () -> List[nodes.Node]
|
||||
warnings.warn('PyClassmember is deprecated.',
|
||||
RemovedInSphinx40Warning)
|
||||
|
||||
return super().run()
|
||||
|
||||
def needs_arglist(self):
|
||||
# type: () -> bool
|
||||
return self.objtype == 'function'
|
||||
|
||||
def get_index_text(self, modname, name_cls):
|
||||
# type: (str, str) -> str
|
||||
# type: (str, Tuple[str, str]) -> str
|
||||
if self.objtype == 'function':
|
||||
if not modname:
|
||||
return _('%s() (built-in function)') % name_cls[0]
|
||||
@ -423,6 +426,46 @@ class PyModulelevel(PyObject):
|
||||
return ''
|
||||
|
||||
|
||||
class PyFunction(PyObject):
|
||||
"""Description of a function."""
|
||||
|
||||
option_spec = PyObject.option_spec.copy()
|
||||
option_spec.update({
|
||||
'async': directives.flag,
|
||||
})
|
||||
|
||||
def get_signature_prefix(self, sig):
|
||||
# type: (str) -> str
|
||||
if 'async' in self.options:
|
||||
return 'async '
|
||||
else:
|
||||
return ''
|
||||
|
||||
def needs_arglist(self):
|
||||
# type: () -> bool
|
||||
return True
|
||||
|
||||
def get_index_text(self, modname, name_cls):
|
||||
# type: (str, Tuple[str, str]) -> str
|
||||
name, cls = name_cls
|
||||
if modname:
|
||||
return _('%s() (in module %s)') % (name, modname)
|
||||
else:
|
||||
return _('%s() (built-in function)') % name
|
||||
|
||||
|
||||
class PyVariable(PyObject):
|
||||
"""Description of a variable."""
|
||||
|
||||
def get_index_text(self, modname, name_cls):
|
||||
# type: (str, Tuple[str, str]) -> str
|
||||
name, cls = name_cls
|
||||
if modname:
|
||||
return _('%s (in module %s)') % (name, modname)
|
||||
else:
|
||||
return _('%s (built-in variable)') % name
|
||||
|
||||
|
||||
class PyClasslike(PyObject):
|
||||
"""
|
||||
Description of a class-like object (classes, interfaces, exceptions).
|
||||
@ -435,7 +478,7 @@ class PyClasslike(PyObject):
|
||||
return self.objtype + ' '
|
||||
|
||||
def get_index_text(self, modname, name_cls):
|
||||
# type: (str, str) -> str
|
||||
# type: (str, Tuple[str, str]) -> str
|
||||
if self.objtype == 'class':
|
||||
if not modname:
|
||||
return _('%s (built-in class)') % name_cls[0]
|
||||
@ -451,6 +494,13 @@ class PyClassmember(PyObject):
|
||||
Description of a class member (methods, attributes).
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
# type: () -> List[nodes.Node]
|
||||
warnings.warn('PyClassmember is deprecated.',
|
||||
RemovedInSphinx40Warning)
|
||||
|
||||
return super().run()
|
||||
|
||||
def needs_arglist(self):
|
||||
# type: () -> bool
|
||||
return self.objtype.endswith('method')
|
||||
@ -464,7 +514,7 @@ class PyClassmember(PyObject):
|
||||
return ''
|
||||
|
||||
def get_index_text(self, modname, name_cls):
|
||||
# type: (str, str) -> str
|
||||
# type: (str, Tuple[str, str]) -> str
|
||||
name, cls = name_cls
|
||||
add_modules = self.env.config.add_module_names
|
||||
if self.objtype == 'method':
|
||||
@ -521,6 +571,109 @@ class PyClassmember(PyObject):
|
||||
return ''
|
||||
|
||||
|
||||
class PyMethod(PyObject):
|
||||
"""Description of a method."""
|
||||
|
||||
option_spec = PyObject.option_spec.copy()
|
||||
option_spec.update({
|
||||
'async': directives.flag,
|
||||
'classmethod': directives.flag,
|
||||
'property': directives.flag,
|
||||
'staticmethod': directives.flag,
|
||||
})
|
||||
|
||||
def needs_arglist(self):
|
||||
# type: () -> bool
|
||||
if 'property' in self.options:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def get_signature_prefix(self, sig):
|
||||
# type: (str) -> str
|
||||
prefix = []
|
||||
if 'async' in self.options:
|
||||
prefix.append('async')
|
||||
if 'classmethod' in self.options:
|
||||
prefix.append('classmethod')
|
||||
if 'property' in self.options:
|
||||
prefix.append('property')
|
||||
if 'staticmethod' in self.options:
|
||||
prefix.append('static')
|
||||
|
||||
if prefix:
|
||||
return ' '.join(prefix) + ' '
|
||||
else:
|
||||
return ''
|
||||
|
||||
def get_index_text(self, modname, name_cls):
|
||||
# type: (str, Tuple[str, str]) -> str
|
||||
name, cls = name_cls
|
||||
try:
|
||||
clsname, methname = name.rsplit('.', 1)
|
||||
if modname and self.env.config.add_module_names:
|
||||
clsname = '.'.join([modname, clsname])
|
||||
except ValueError:
|
||||
if modname:
|
||||
return _('%s() (in module %s)') % (name, modname)
|
||||
else:
|
||||
return '%s()' % name
|
||||
|
||||
if 'classmethod' in self.options:
|
||||
return _('%s() (%s class method)') % (methname, clsname)
|
||||
elif 'property' in self.options:
|
||||
return _('%s() (%s property)') % (methname, clsname)
|
||||
elif 'staticmethod' in self.options:
|
||||
return _('%s() (%s static method)') % (methname, clsname)
|
||||
else:
|
||||
return _('%s() (%s method)') % (methname, clsname)
|
||||
|
||||
|
||||
class PyClassMethod(PyMethod):
|
||||
"""Description of a classmethod."""
|
||||
|
||||
option_spec = PyObject.option_spec.copy()
|
||||
|
||||
def run(self):
|
||||
# type: () -> List[nodes.Node]
|
||||
self.name = 'py:method'
|
||||
self.options['classmethod'] = True
|
||||
|
||||
return super().run()
|
||||
|
||||
|
||||
class PyStaticMethod(PyMethod):
|
||||
"""Description of a staticmethod."""
|
||||
|
||||
option_spec = PyObject.option_spec.copy()
|
||||
|
||||
def run(self):
|
||||
# type: () -> List[nodes.Node]
|
||||
self.name = 'py:method'
|
||||
self.options['staticmethod'] = True
|
||||
|
||||
return super().run()
|
||||
|
||||
|
||||
class PyAttribute(PyObject):
|
||||
"""Description of an attribute."""
|
||||
|
||||
def get_index_text(self, modname, name_cls):
|
||||
# type: (str, Tuple[str, str]) -> str
|
||||
name, cls = name_cls
|
||||
try:
|
||||
clsname, attrname = name.rsplit('.', 1)
|
||||
if modname and self.env.config.add_module_names:
|
||||
clsname = '.'.join([modname, clsname])
|
||||
except ValueError:
|
||||
if modname:
|
||||
return _('%s (in module %s)') % (name, modname)
|
||||
else:
|
||||
return name
|
||||
|
||||
return _('%s (%s attribute)') % (attrname, clsname)
|
||||
|
||||
|
||||
class PyDecoratorMixin:
|
||||
"""
|
||||
Mixin for decorator directives.
|
||||
@ -575,18 +728,20 @@ class PyModule(SphinxDirective):
|
||||
|
||||
def run(self):
|
||||
# type: () -> List[nodes.Node]
|
||||
domain = cast(PythonDomain, self.env.get_domain('py'))
|
||||
|
||||
modname = self.arguments[0].strip()
|
||||
noindex = 'noindex' in self.options
|
||||
self.env.ref_context['py:module'] = modname
|
||||
ret = [] # type: List[nodes.Node]
|
||||
if not noindex:
|
||||
self.env.domaindata['py']['modules'][modname] = (self.env.docname,
|
||||
self.options.get('synopsis', ''),
|
||||
self.options.get('platform', ''),
|
||||
'deprecated' in self.options)
|
||||
# make a duplicate entry in 'objects' to facilitate searching for
|
||||
# the module in PythonDomain.find_obj()
|
||||
self.env.domaindata['py']['objects'][modname] = (self.env.docname, 'module')
|
||||
# note module to the domain
|
||||
domain.note_module(modname,
|
||||
self.options.get('synopsis', ''),
|
||||
self.options.get('platform', ''),
|
||||
'deprecated' in self.options)
|
||||
domain.note_object(modname, 'module')
|
||||
|
||||
targetnode = nodes.target('', '', ids=['module-' + modname],
|
||||
ismod=True)
|
||||
self.state.document.note_explicit_target(targetnode)
|
||||
@ -737,14 +892,14 @@ class PythonDomain(Domain):
|
||||
} # type: Dict[str, ObjType]
|
||||
|
||||
directives = {
|
||||
'function': PyModulelevel,
|
||||
'data': PyModulelevel,
|
||||
'function': PyFunction,
|
||||
'data': PyVariable,
|
||||
'class': PyClasslike,
|
||||
'exception': PyClasslike,
|
||||
'method': PyClassmember,
|
||||
'classmethod': PyClassmember,
|
||||
'staticmethod': PyClassmember,
|
||||
'attribute': PyClassmember,
|
||||
'method': PyMethod,
|
||||
'classmethod': PyClassMethod,
|
||||
'staticmethod': PyStaticMethod,
|
||||
'attribute': PyAttribute,
|
||||
'module': PyModule,
|
||||
'currentmodule': PyCurrentModule,
|
||||
'decorator': PyDecoratorFunction,
|
||||
@ -769,24 +924,55 @@ class PythonDomain(Domain):
|
||||
PythonModuleIndex,
|
||||
]
|
||||
|
||||
@property
|
||||
def objects(self):
|
||||
# type: () -> Dict[str, Tuple[str, str]]
|
||||
return self.data.setdefault('objects', {}) # fullname -> docname, objtype
|
||||
|
||||
def note_object(self, name, objtype, location=None):
|
||||
# type: (str, str, Any) -> None
|
||||
"""Note a python object for cross reference.
|
||||
|
||||
.. versionadded:: 2.1
|
||||
"""
|
||||
if name in self.objects:
|
||||
docname = self.objects[name][0]
|
||||
logger.warning(__('duplicate object description of %s, '
|
||||
'other instance in %s, use :noindex: for one of them'),
|
||||
name, docname, location=location)
|
||||
self.objects[name] = (self.env.docname, objtype)
|
||||
|
||||
@property
|
||||
def modules(self):
|
||||
# type: () -> Dict[str, Tuple[str, str, str, bool]]
|
||||
return self.data.setdefault('modules', {}) # modname -> docname, synopsis, platform, deprecated # NOQA
|
||||
|
||||
def note_module(self, name, synopsis, platform, deprecated):
|
||||
# type: (str, str, str, bool) -> None
|
||||
"""Note a python module for cross reference.
|
||||
|
||||
.. versionadded:: 2.1
|
||||
"""
|
||||
self.modules[name] = (self.env.docname, synopsis, platform, deprecated)
|
||||
|
||||
def clear_doc(self, docname):
|
||||
# type: (str) -> None
|
||||
for fullname, (fn, _l) in list(self.data['objects'].items()):
|
||||
for fullname, (fn, _l) in list(self.objects.items()):
|
||||
if fn == docname:
|
||||
del self.data['objects'][fullname]
|
||||
for modname, (fn, _x, _x, _x) in list(self.data['modules'].items()):
|
||||
del self.objects[fullname]
|
||||
for modname, (fn, _x, _x, _y) in list(self.modules.items()):
|
||||
if fn == docname:
|
||||
del self.data['modules'][modname]
|
||||
del self.modules[modname]
|
||||
|
||||
def merge_domaindata(self, docnames, otherdata):
|
||||
# type: (List[str], Dict) -> None
|
||||
# XXX check duplicates?
|
||||
for fullname, (fn, objtype) in otherdata['objects'].items():
|
||||
if fn in docnames:
|
||||
self.data['objects'][fullname] = (fn, objtype)
|
||||
self.objects[fullname] = (fn, objtype)
|
||||
for modname, data in otherdata['modules'].items():
|
||||
if data[0] in docnames:
|
||||
self.data['modules'][modname] = data
|
||||
self.modules[modname] = data
|
||||
|
||||
def find_obj(self, env, modname, classname, name, type, searchmode=0):
|
||||
# type: (BuildEnvironment, str, str, str, str, int) -> List[Tuple[str, Any]]
|
||||
@ -800,7 +986,6 @@ class PythonDomain(Domain):
|
||||
if not name:
|
||||
return []
|
||||
|
||||
objects = self.data['objects']
|
||||
matches = [] # type: List[Tuple[str, Any]]
|
||||
|
||||
newname = None
|
||||
@ -812,44 +997,44 @@ class PythonDomain(Domain):
|
||||
if objtypes is not None:
|
||||
if modname and classname:
|
||||
fullname = modname + '.' + classname + '.' + name
|
||||
if fullname in objects and objects[fullname][1] in objtypes:
|
||||
if fullname in self.objects and self.objects[fullname][1] in objtypes:
|
||||
newname = fullname
|
||||
if not newname:
|
||||
if modname and modname + '.' + name in objects and \
|
||||
objects[modname + '.' + name][1] in objtypes:
|
||||
if modname and modname + '.' + name in self.objects and \
|
||||
self.objects[modname + '.' + name][1] in objtypes:
|
||||
newname = modname + '.' + name
|
||||
elif name in objects and objects[name][1] in objtypes:
|
||||
elif name in self.objects and self.objects[name][1] in objtypes:
|
||||
newname = name
|
||||
else:
|
||||
# "fuzzy" searching mode
|
||||
searchname = '.' + name
|
||||
matches = [(oname, objects[oname]) for oname in objects
|
||||
matches = [(oname, self.objects[oname]) for oname in self.objects
|
||||
if oname.endswith(searchname) and
|
||||
objects[oname][1] in objtypes]
|
||||
self.objects[oname][1] in objtypes]
|
||||
else:
|
||||
# NOTE: searching for exact match, object type is not considered
|
||||
if name in objects:
|
||||
if name in self.objects:
|
||||
newname = name
|
||||
elif type == 'mod':
|
||||
# only exact matches allowed for modules
|
||||
return []
|
||||
elif classname and classname + '.' + name in objects:
|
||||
elif classname and classname + '.' + name in self.objects:
|
||||
newname = classname + '.' + name
|
||||
elif modname and modname + '.' + name in objects:
|
||||
elif modname and modname + '.' + name in self.objects:
|
||||
newname = modname + '.' + name
|
||||
elif modname and classname and \
|
||||
modname + '.' + classname + '.' + name in objects:
|
||||
modname + '.' + classname + '.' + name in self.objects:
|
||||
newname = modname + '.' + classname + '.' + name
|
||||
# special case: builtin exceptions have module "exceptions" set
|
||||
elif type == 'exc' and '.' not in name and \
|
||||
'exceptions.' + name in objects:
|
||||
'exceptions.' + name in self.objects:
|
||||
newname = 'exceptions.' + name
|
||||
# special case: object methods
|
||||
elif type in ('func', 'meth') and '.' not in name and \
|
||||
'object.' + name in objects:
|
||||
'object.' + name in self.objects:
|
||||
newname = 'object.' + name
|
||||
if newname is not None:
|
||||
matches.append((newname, objects[newname]))
|
||||
matches.append((newname, self.objects[newname]))
|
||||
return matches
|
||||
|
||||
def resolve_xref(self, env, fromdocname, builder,
|
||||
@ -896,7 +1081,7 @@ class PythonDomain(Domain):
|
||||
def _make_module_refnode(self, builder, fromdocname, name, contnode):
|
||||
# type: (Builder, str, str, nodes.Node) -> nodes.Element
|
||||
# get additional info for modules
|
||||
docname, synopsis, platform, deprecated = self.data['modules'][name]
|
||||
docname, synopsis, platform, deprecated = self.modules[name]
|
||||
title = name
|
||||
if synopsis:
|
||||
title += ': ' + synopsis
|
||||
@ -909,9 +1094,9 @@ class PythonDomain(Domain):
|
||||
|
||||
def get_objects(self):
|
||||
# type: () -> Iterator[Tuple[str, str, str, str, str, int]]
|
||||
for modname, info in self.data['modules'].items():
|
||||
for modname, info in self.modules.items():
|
||||
yield (modname, modname, 'module', info[0], 'module-' + modname, 0)
|
||||
for refname, (docname, type) in self.data['objects'].items():
|
||||
for refname, (docname, type) in self.objects.items():
|
||||
if type != 'module': # modules are already handled
|
||||
yield (refname, refname, type, docname, refname, 1)
|
||||
|
||||
|
@ -9,12 +9,16 @@
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import cast
|
||||
|
||||
from docutils.parsers.rst import directives
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.directives import ObjectDescription
|
||||
from sphinx.domains import Domain, ObjType
|
||||
from sphinx.locale import _
|
||||
from sphinx.locale import _, __
|
||||
from sphinx.roles import XRefRole
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.nodes import make_refnode
|
||||
|
||||
if False:
|
||||
@ -26,6 +30,8 @@ if False:
|
||||
from sphinx.environment import BuildEnvironment # NOQA
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
dir_sig_re = re.compile(r'\.\. (.+?)::(.*)$')
|
||||
|
||||
|
||||
@ -43,14 +49,9 @@ class ReSTMarkup(ObjectDescription):
|
||||
signode['first'] = (not self.names)
|
||||
self.state.document.note_explicit_target(signode)
|
||||
|
||||
objects = self.env.domaindata['rst']['objects']
|
||||
key = (self.objtype, name)
|
||||
if key in objects:
|
||||
self.state_machine.reporter.warning(
|
||||
'duplicate description of %s %s, ' % (self.objtype, name) +
|
||||
'other instance in ' + self.env.doc2path(objects[key]),
|
||||
line=self.lineno)
|
||||
objects[key] = self.env.docname
|
||||
domain = cast(ReSTDomain, self.env.get_domain('rst'))
|
||||
domain.note_object(self.objtype, name, location=(self.env.docname, self.lineno))
|
||||
|
||||
indextext = self.get_index_text(self.objtype, name)
|
||||
if indextext:
|
||||
self.indexnode['entries'].append(('single', indextext,
|
||||
@ -58,10 +59,6 @@ class ReSTMarkup(ObjectDescription):
|
||||
|
||||
def get_index_text(self, objectname, name):
|
||||
# type: (str, str) -> str
|
||||
if self.objtype == 'directive':
|
||||
return _('%s (directive)') % name
|
||||
elif self.objtype == 'role':
|
||||
return _('%s (role)') % name
|
||||
return ''
|
||||
|
||||
|
||||
@ -80,7 +77,10 @@ def parse_directive(d):
|
||||
if not m:
|
||||
return (dir, '')
|
||||
parsed_dir, parsed_args = m.groups()
|
||||
return (parsed_dir.strip(), ' ' + parsed_args.strip())
|
||||
if parsed_args.strip():
|
||||
return (parsed_dir.strip(), ' ' + parsed_args.strip())
|
||||
else:
|
||||
return (parsed_dir.strip(), '')
|
||||
|
||||
|
||||
class ReSTDirective(ReSTMarkup):
|
||||
@ -96,6 +96,78 @@ class ReSTDirective(ReSTMarkup):
|
||||
signode += addnodes.desc_addname(args, args)
|
||||
return name
|
||||
|
||||
def get_index_text(self, objectname, name):
|
||||
# type: (str, str) -> str
|
||||
return _('%s (directive)') % name
|
||||
|
||||
def before_content(self):
|
||||
# type: () -> None
|
||||
if self.names:
|
||||
directives = self.env.ref_context.setdefault('rst:directives', [])
|
||||
directives.append(self.names[0])
|
||||
|
||||
def after_content(self):
|
||||
# type: () -> None
|
||||
directives = self.env.ref_context.setdefault('rst:directives', [])
|
||||
if directives:
|
||||
directives.pop()
|
||||
|
||||
|
||||
class ReSTDirectiveOption(ReSTMarkup):
|
||||
"""
|
||||
Description of an option for reST directive.
|
||||
"""
|
||||
option_spec = ReSTMarkup.option_spec.copy()
|
||||
option_spec.update({
|
||||
'type': directives.unchanged,
|
||||
})
|
||||
|
||||
def handle_signature(self, sig, signode):
|
||||
# type: (str, addnodes.desc_signature) -> str
|
||||
try:
|
||||
name, argument = re.split(r'\s*:\s+', sig.strip(), 1)
|
||||
except ValueError:
|
||||
name, argument = sig, None
|
||||
|
||||
signode += addnodes.desc_name(':%s:' % name, ':%s:' % name)
|
||||
if argument:
|
||||
signode += addnodes.desc_annotation(' ' + argument, ' ' + argument)
|
||||
if self.options.get('type'):
|
||||
text = ' (%s)' % self.options['type']
|
||||
signode += addnodes.desc_annotation(text, text)
|
||||
return name
|
||||
|
||||
def add_target_and_index(self, name, sig, signode):
|
||||
# type: (str, str, addnodes.desc_signature) -> None
|
||||
targetname = '-'.join([self.objtype, self.current_directive, name])
|
||||
if targetname not in self.state.document.ids:
|
||||
signode['names'].append(targetname)
|
||||
signode['ids'].append(targetname)
|
||||
signode['first'] = (not self.names)
|
||||
self.state.document.note_explicit_target(signode)
|
||||
|
||||
domain = cast(ReSTDomain, self.env.get_domain('rst'))
|
||||
domain.note_object(self.objtype, name, location=(self.env.docname, self.lineno))
|
||||
|
||||
if self.current_directive:
|
||||
key = name[0].upper()
|
||||
pair = [_('%s (directive)') % self.current_directive,
|
||||
_(':%s: (directive option)') % name]
|
||||
self.indexnode['entries'].append(('pair', '; '.join(pair), targetname, '', key))
|
||||
else:
|
||||
key = name[0].upper()
|
||||
text = _(':%s: (directive option)') % name
|
||||
self.indexnode['entries'].append(('single', text, targetname, '', key))
|
||||
|
||||
@property
|
||||
def current_directive(self):
|
||||
# type: () -> str
|
||||
directives = self.env.ref_context.get('rst:directives')
|
||||
if directives:
|
||||
return directives[-1]
|
||||
else:
|
||||
return ''
|
||||
|
||||
|
||||
class ReSTRole(ReSTMarkup):
|
||||
"""
|
||||
@ -106,6 +178,10 @@ class ReSTRole(ReSTMarkup):
|
||||
signode += addnodes.desc_name(':%s:' % sig, ':%s:' % sig)
|
||||
return sig
|
||||
|
||||
def get_index_text(self, objectname, name):
|
||||
# type: (str, str) -> str
|
||||
return _('%s (role)') % name
|
||||
|
||||
|
||||
class ReSTDomain(Domain):
|
||||
"""ReStructuredText domain."""
|
||||
@ -113,11 +189,13 @@ class ReSTDomain(Domain):
|
||||
label = 'reStructuredText'
|
||||
|
||||
object_types = {
|
||||
'directive': ObjType(_('directive'), 'dir'),
|
||||
'role': ObjType(_('role'), 'role'),
|
||||
'directive': ObjType(_('directive'), 'dir'),
|
||||
'directive:option': ObjType(_('directive-option'), 'dir'),
|
||||
'role': ObjType(_('role'), 'role'),
|
||||
}
|
||||
directives = {
|
||||
'directive': ReSTDirective,
|
||||
'directive:option': ReSTDirectiveOption,
|
||||
'role': ReSTRole,
|
||||
}
|
||||
roles = {
|
||||
@ -126,42 +204,54 @@ class ReSTDomain(Domain):
|
||||
}
|
||||
initial_data = {
|
||||
'objects': {}, # fullname -> docname, objtype
|
||||
} # type: Dict[str, Dict[str, Tuple[str, ObjType]]]
|
||||
} # type: Dict[str, Dict[Tuple[str, str], str]]
|
||||
|
||||
@property
|
||||
def objects(self):
|
||||
# type: () -> Dict[Tuple[str, str], str]
|
||||
return self.data.setdefault('objects', {}) # (objtype, fullname) -> docname
|
||||
|
||||
def note_object(self, objtype, name, location=None):
|
||||
# type: (str, str, Any) -> None
|
||||
if (objtype, name) in self.objects:
|
||||
docname = self.objects[objtype, name]
|
||||
logger.warning(__('duplicate description of %s %s, other instance in %s') %
|
||||
(objtype, name, docname), location=location)
|
||||
|
||||
self.objects[objtype, name] = self.env.docname
|
||||
|
||||
def clear_doc(self, docname):
|
||||
# type: (str) -> None
|
||||
for (typ, name), doc in list(self.data['objects'].items()):
|
||||
for (typ, name), doc in list(self.objects.items()):
|
||||
if doc == docname:
|
||||
del self.data['objects'][typ, name]
|
||||
del self.objects[typ, name]
|
||||
|
||||
def merge_domaindata(self, docnames, otherdata):
|
||||
# type: (List[str], Dict) -> None
|
||||
# XXX check duplicates
|
||||
for (typ, name), doc in otherdata['objects'].items():
|
||||
if doc in docnames:
|
||||
self.data['objects'][typ, name] = doc
|
||||
self.objects[typ, name] = doc
|
||||
|
||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
# type: (BuildEnvironment, str, Builder, str, str, addnodes.pending_xref, nodes.Element) -> nodes.Element # NOQA
|
||||
objects = self.data['objects']
|
||||
objtypes = self.objtypes_for_role(typ)
|
||||
for objtype in objtypes:
|
||||
if (objtype, target) in objects:
|
||||
return make_refnode(builder, fromdocname,
|
||||
objects[objtype, target],
|
||||
todocname = self.objects.get((objtype, target))
|
||||
if todocname:
|
||||
return make_refnode(builder, fromdocname, todocname,
|
||||
objtype + '-' + target,
|
||||
contnode, target + ' ' + objtype)
|
||||
return None
|
||||
|
||||
def resolve_any_xref(self, env, fromdocname, builder, target, node, contnode):
|
||||
# type: (BuildEnvironment, str, Builder, str, addnodes.pending_xref, nodes.Element) -> List[Tuple[str, nodes.Element]] # NOQA
|
||||
objects = self.data['objects']
|
||||
results = [] # type: List[Tuple[str, nodes.Element]]
|
||||
for objtype in self.object_types:
|
||||
if (objtype, target) in self.data['objects']:
|
||||
todocname = self.objects.get((objtype, target))
|
||||
if todocname:
|
||||
results.append(('rst:' + self.role_for_objtype(objtype),
|
||||
make_refnode(builder, fromdocname,
|
||||
objects[objtype, target],
|
||||
make_refnode(builder, fromdocname, todocname,
|
||||
objtype + '-' + target,
|
||||
contnode, target + ' ' + objtype)))
|
||||
return results
|
||||
|
@ -10,6 +10,7 @@
|
||||
|
||||
import re
|
||||
import unicodedata
|
||||
import warnings
|
||||
from copy import copy
|
||||
from typing import cast
|
||||
|
||||
@ -18,9 +19,9 @@ from docutils.parsers.rst import directives
|
||||
from docutils.statemachine import StringList
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||
from sphinx.directives import ObjectDescription
|
||||
from sphinx.domains import Domain, ObjType
|
||||
from sphinx.errors import NoUri
|
||||
from sphinx.locale import _, __
|
||||
from sphinx.roles import XRefRole
|
||||
from sphinx.util import ws_re, logging, docname_join
|
||||
@ -255,6 +256,9 @@ def make_glossary_term(env, textnodes, index_key, source, lineno, new_id=None):
|
||||
termtext = term.astext()
|
||||
if new_id is None:
|
||||
new_id = nodes.make_id('term-' + termtext)
|
||||
if new_id == 'term':
|
||||
# the term is not good for node_id. Generate it by sequence number instead.
|
||||
new_id = 'term-' + str(len(gloss_entries))
|
||||
if new_id in gloss_entries:
|
||||
new_id = 'term-' + str(len(gloss_entries))
|
||||
gloss_entries.add(new_id)
|
||||
@ -300,6 +304,7 @@ class Glossary(SphinxDirective):
|
||||
# first, collect single entries
|
||||
entries = [] # type: List[Tuple[List[Tuple[str, str, int]], StringList]]
|
||||
in_definition = True
|
||||
in_comment = False
|
||||
was_empty = True
|
||||
messages = [] # type: List[nodes.Node]
|
||||
for line, (source, lineno) in zip(self.content, self.content.items):
|
||||
@ -313,27 +318,33 @@ class Glossary(SphinxDirective):
|
||||
if line and not line[0].isspace():
|
||||
# enable comments
|
||||
if line.startswith('.. '):
|
||||
in_comment = True
|
||||
continue
|
||||
else:
|
||||
in_comment = False
|
||||
|
||||
# first term of definition
|
||||
if in_definition:
|
||||
if not was_empty:
|
||||
messages.append(self.state.reporter.system_message(
|
||||
2, 'glossary term must be preceded by empty line',
|
||||
messages.append(self.state.reporter.warning(
|
||||
_('glossary term must be preceded by empty line'),
|
||||
source=source, line=lineno))
|
||||
entries.append(([(line, source, lineno)], StringList()))
|
||||
in_definition = False
|
||||
# second term and following
|
||||
else:
|
||||
if was_empty:
|
||||
messages.append(self.state.reporter.system_message(
|
||||
2, 'glossary terms must not be separated by empty '
|
||||
'lines', source=source, line=lineno))
|
||||
messages.append(self.state.reporter.warning(
|
||||
_('glossary terms must not be separated by empty lines'),
|
||||
source=source, line=lineno))
|
||||
if entries:
|
||||
entries[-1][0].append((line, source, lineno))
|
||||
else:
|
||||
messages.append(self.state.reporter.system_message(
|
||||
2, 'glossary seems to be misformatted, check '
|
||||
'indentation', source=source, line=lineno))
|
||||
messages.append(self.state.reporter.warning(
|
||||
_('glossary seems to be misformatted, check indentation'),
|
||||
source=source, line=lineno))
|
||||
elif in_comment:
|
||||
pass
|
||||
else:
|
||||
if not in_definition:
|
||||
# first line of definition, determines indentation
|
||||
@ -342,9 +353,9 @@ class Glossary(SphinxDirective):
|
||||
if entries:
|
||||
entries[-1][1].append(line[indent_len:], source, lineno)
|
||||
else:
|
||||
messages.append(self.state.reporter.system_message(
|
||||
2, 'glossary seems to be misformatted, check '
|
||||
'indentation', source=source, line=lineno))
|
||||
messages.append(self.state.reporter.warning(
|
||||
_('glossary seems to be misformatted, check indentation'),
|
||||
source=source, line=lineno))
|
||||
was_empty = False
|
||||
|
||||
# now, parse all the entries into a big definition list
|
||||
@ -494,8 +505,6 @@ class StandardDomain(Domain):
|
||||
initial_data = {
|
||||
'progoptions': {}, # (program, name) -> docname, labelid
|
||||
'objects': {}, # (type, name) -> docname, labelid
|
||||
'citations': {}, # citation_name -> docname, labelid, lineno
|
||||
'citation_refs': {}, # citation_name -> list of docnames
|
||||
'labels': { # labelname -> docname, labelid, sectionname
|
||||
'genindex': ('genindex', '', _('Index')),
|
||||
'modindex': ('py-modindex', '', _('Module Index')),
|
||||
@ -516,7 +525,6 @@ class StandardDomain(Domain):
|
||||
'keyword': 'unknown keyword: %(target)s',
|
||||
'doc': 'unknown document: %(target)s',
|
||||
'option': 'unknown option: %(target)s',
|
||||
'citation': 'citation not found: %(target)s',
|
||||
}
|
||||
|
||||
enumerable_nodes = { # node_class -> (figtype, title_getter)
|
||||
@ -534,81 +542,60 @@ class StandardDomain(Domain):
|
||||
for node, settings in env.app.registry.enumerable_nodes.items():
|
||||
self.enumerable_nodes[node] = settings
|
||||
|
||||
@property
|
||||
def objects(self):
|
||||
# type: () -> Dict[Tuple[str, str], Tuple[str, str]]
|
||||
return self.data.setdefault('objects', {}) # (objtype, name) -> docname, labelid
|
||||
|
||||
@property
|
||||
def progoptions(self):
|
||||
# type: () -> Dict[Tuple[str, str], Tuple[str, str]]
|
||||
return self.data.setdefault('progoptions', {}) # (program, name) -> docname, labelid
|
||||
|
||||
@property
|
||||
def labels(self):
|
||||
# type: () -> Dict[str, Tuple[str, str, str]]
|
||||
return self.data.setdefault('labels', {}) # labelname -> docname, labelid, sectionname
|
||||
|
||||
@property
|
||||
def anonlabels(self):
|
||||
# type: () -> Dict[str, Tuple[str, str]]
|
||||
return self.data.setdefault('anonlabels', {}) # labelname -> docname, labelid
|
||||
|
||||
def clear_doc(self, docname):
|
||||
# type: (str) -> None
|
||||
for key, (fn, _l) in list(self.data['progoptions'].items()):
|
||||
key = None # type: Any
|
||||
for key, (fn, _l) in list(self.progoptions.items()):
|
||||
if fn == docname:
|
||||
del self.data['progoptions'][key]
|
||||
for key, (fn, _l) in list(self.data['objects'].items()):
|
||||
del self.progoptions[key]
|
||||
for key, (fn, _l) in list(self.objects.items()):
|
||||
if fn == docname:
|
||||
del self.data['objects'][key]
|
||||
for key, (fn, _l, lineno) in list(self.data['citations'].items()):
|
||||
del self.objects[key]
|
||||
for key, (fn, _l, _l) in list(self.labels.items()):
|
||||
if fn == docname:
|
||||
del self.data['citations'][key]
|
||||
for key, docnames in list(self.data['citation_refs'].items()):
|
||||
if docnames == [docname]:
|
||||
del self.data['citation_refs'][key]
|
||||
elif docname in docnames:
|
||||
docnames.remove(docname)
|
||||
for key, (fn, _l, _l) in list(self.data['labels'].items()):
|
||||
del self.labels[key]
|
||||
for key, (fn, _l) in list(self.anonlabels.items()):
|
||||
if fn == docname:
|
||||
del self.data['labels'][key]
|
||||
for key, (fn, _l) in list(self.data['anonlabels'].items()):
|
||||
if fn == docname:
|
||||
del self.data['anonlabels'][key]
|
||||
del self.anonlabels[key]
|
||||
|
||||
def merge_domaindata(self, docnames, otherdata):
|
||||
# type: (List[str], Dict) -> None
|
||||
# XXX duplicates?
|
||||
for key, data in otherdata['progoptions'].items():
|
||||
if data[0] in docnames:
|
||||
self.data['progoptions'][key] = data
|
||||
self.progoptions[key] = data
|
||||
for key, data in otherdata['objects'].items():
|
||||
if data[0] in docnames:
|
||||
self.data['objects'][key] = data
|
||||
for key, data in otherdata['citations'].items():
|
||||
if data[0] in docnames:
|
||||
self.data['citations'][key] = data
|
||||
for key, data in otherdata['citation_refs'].items():
|
||||
citation_refs = self.data['citation_refs'].setdefault(key, [])
|
||||
for docname in data:
|
||||
if docname in docnames:
|
||||
citation_refs.append(docname)
|
||||
self.objects[key] = data
|
||||
for key, data in otherdata['labels'].items():
|
||||
if data[0] in docnames:
|
||||
self.data['labels'][key] = data
|
||||
self.labels[key] = data
|
||||
for key, data in otherdata['anonlabels'].items():
|
||||
if data[0] in docnames:
|
||||
self.data['anonlabels'][key] = data
|
||||
self.anonlabels[key] = data
|
||||
|
||||
def process_doc(self, env, docname, document):
|
||||
# type: (BuildEnvironment, str, nodes.document) -> None
|
||||
self.note_citations(env, docname, document)
|
||||
self.note_citation_refs(env, docname, document)
|
||||
self.note_labels(env, docname, document)
|
||||
|
||||
def note_citations(self, env, docname, document):
|
||||
# type: (BuildEnvironment, str, nodes.document) -> None
|
||||
for node in document.traverse(nodes.citation):
|
||||
node['docname'] = docname
|
||||
label = cast(nodes.label, node[0]).astext()
|
||||
if label in self.data['citations']:
|
||||
path = env.doc2path(self.data['citations'][label][0])
|
||||
logger.warning(__('duplicate citation %s, other instance in %s'), label, path,
|
||||
location=node, type='ref', subtype='citation')
|
||||
self.data['citations'][label] = (docname, node['ids'][0], node.line)
|
||||
|
||||
def note_citation_refs(self, env, docname, document):
|
||||
# type: (BuildEnvironment, str, nodes.document) -> None
|
||||
for node in document.traverse(addnodes.pending_xref):
|
||||
if node['refdomain'] == 'std' and node['reftype'] == 'citation':
|
||||
label = node['reftarget']
|
||||
citation_refs = self.data['citation_refs'].setdefault(label, [])
|
||||
citation_refs.append(docname)
|
||||
|
||||
def note_labels(self, env, docname, document):
|
||||
# type: (BuildEnvironment, str, nodes.document) -> None
|
||||
labels, anonlabels = self.data['labels'], self.data['anonlabels']
|
||||
for name, explicit in document.nametypes.items():
|
||||
if not explicit:
|
||||
continue
|
||||
@ -626,11 +613,11 @@ class StandardDomain(Domain):
|
||||
# ignore footnote labels, labels automatically generated from a
|
||||
# link and object descriptions
|
||||
continue
|
||||
if name in labels:
|
||||
if name in self.labels:
|
||||
logger.warning(__('duplicate label %s, other instance in %s'),
|
||||
name, env.doc2path(labels[name][0]),
|
||||
name, env.doc2path(self.labels[name][0]),
|
||||
location=node)
|
||||
anonlabels[name] = docname, labelid
|
||||
self.anonlabels[name] = docname, labelid
|
||||
if node.tagname in ('section', 'rubric'):
|
||||
title = cast(nodes.title, node[0])
|
||||
sectname = clean_astext(title)
|
||||
@ -647,23 +634,15 @@ class StandardDomain(Domain):
|
||||
else:
|
||||
# anonymous-only labels
|
||||
continue
|
||||
labels[name] = docname, labelid, sectname
|
||||
self.labels[name] = docname, labelid, sectname
|
||||
|
||||
def add_object(self, objtype, name, docname, labelid):
|
||||
# type: (str, str, str, str) -> None
|
||||
self.data['objects'][objtype, name] = (docname, labelid)
|
||||
self.objects[objtype, name] = (docname, labelid)
|
||||
|
||||
def add_program_option(self, program, name, docname, labelid):
|
||||
# type: (str, str, str, str) -> None
|
||||
self.data['progoptions'][program, name] = (docname, labelid)
|
||||
|
||||
def check_consistency(self):
|
||||
# type: () -> None
|
||||
for name, (docname, labelid, lineno) in self.data['citations'].items():
|
||||
if name not in self.data['citation_refs']:
|
||||
logger.warning(__('Citation [%s] is not referenced.'), name,
|
||||
type='ref', subtype='citation',
|
||||
location=(docname, lineno))
|
||||
self.progoptions[program, name] = (docname, labelid)
|
||||
|
||||
def build_reference_node(self, fromdocname, builder, docname, labelid,
|
||||
sectname, rolename, **options):
|
||||
@ -703,7 +682,10 @@ class StandardDomain(Domain):
|
||||
elif typ == 'option':
|
||||
resolver = self._resolve_option_xref
|
||||
elif typ == 'citation':
|
||||
resolver = self._resolve_citation_xref
|
||||
warnings.warn('pending_xref(domain=std, type=citation) is deprecated: %r' % node,
|
||||
RemovedInSphinx40Warning)
|
||||
domain = env.get_domain('citation')
|
||||
return domain.resolve_xref(env, fromdocname, builder, typ, target, node, contnode)
|
||||
else:
|
||||
resolver = self._resolve_obj_xref
|
||||
|
||||
@ -714,13 +696,12 @@ class StandardDomain(Domain):
|
||||
if node['refexplicit']:
|
||||
# reference to anonymous label; the reference uses
|
||||
# the supplied link caption
|
||||
docname, labelid = self.data['anonlabels'].get(target, ('', ''))
|
||||
docname, labelid = self.anonlabels.get(target, ('', ''))
|
||||
sectname = node.astext()
|
||||
else:
|
||||
# reference to named label; the final node will
|
||||
# contain the section name after the label
|
||||
docname, labelid, sectname = self.data['labels'].get(target,
|
||||
('', '', ''))
|
||||
docname, labelid, sectname = self.labels.get(target, ('', '', ''))
|
||||
if not docname:
|
||||
return None
|
||||
|
||||
@ -729,10 +710,10 @@ class StandardDomain(Domain):
|
||||
|
||||
def _resolve_numref_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
# type: (BuildEnvironment, str, Builder, str, str, addnodes.pending_xref, nodes.Element) -> nodes.Element # NOQA
|
||||
if target in self.data['labels']:
|
||||
docname, labelid, figname = self.data['labels'].get(target, ('', '', ''))
|
||||
if target in self.labels:
|
||||
docname, labelid, figname = self.labels.get(target, ('', '', ''))
|
||||
else:
|
||||
docname, labelid = self.data['anonlabels'].get(target, ('', ''))
|
||||
docname, labelid = self.anonlabels.get(target, ('', ''))
|
||||
figname = None
|
||||
|
||||
if not docname:
|
||||
@ -791,7 +772,7 @@ class StandardDomain(Domain):
|
||||
def _resolve_keyword_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
# type: (BuildEnvironment, str, Builder, str, str, addnodes.pending_xref, nodes.Element) -> nodes.Element # NOQA
|
||||
# keywords are oddballs: they are referenced by named labels
|
||||
docname, labelid, _ = self.data['labels'].get(target, ('', '', ''))
|
||||
docname, labelid, _ = self.labels.get(target, ('', '', ''))
|
||||
if not docname:
|
||||
return None
|
||||
return make_refnode(builder, fromdocname, docname,
|
||||
@ -817,7 +798,7 @@ class StandardDomain(Domain):
|
||||
# type: (BuildEnvironment, str, Builder, str, str, addnodes.pending_xref, nodes.Element) -> nodes.Element # NOQA
|
||||
progname = node.get('std:program')
|
||||
target = target.strip()
|
||||
docname, labelid = self.data['progoptions'].get((progname, target), ('', ''))
|
||||
docname, labelid = self.progoptions.get((progname, target), ('', ''))
|
||||
if not docname:
|
||||
commands = []
|
||||
while ws_re.search(target):
|
||||
@ -825,8 +806,7 @@ class StandardDomain(Domain):
|
||||
commands.append(subcommand)
|
||||
progname = "-".join(commands)
|
||||
|
||||
docname, labelid = self.data['progoptions'].get((progname, target),
|
||||
('', ''))
|
||||
docname, labelid = self.progoptions.get((progname, target), ('', ''))
|
||||
if docname:
|
||||
break
|
||||
else:
|
||||
@ -835,33 +815,12 @@ class StandardDomain(Domain):
|
||||
return make_refnode(builder, fromdocname, docname,
|
||||
labelid, contnode)
|
||||
|
||||
def _resolve_citation_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
# type: (BuildEnvironment, str, Builder, str, str, addnodes.pending_xref, nodes.Element) -> nodes.Element # NOQA
|
||||
docname, labelid, lineno = self.data['citations'].get(target, ('', '', 0))
|
||||
if not docname:
|
||||
if 'ids' in node:
|
||||
# remove ids attribute that annotated at
|
||||
# transforms.CitationReference.apply.
|
||||
del node['ids'][:]
|
||||
return None
|
||||
|
||||
try:
|
||||
return make_refnode(builder, fromdocname, docname,
|
||||
labelid, contnode)
|
||||
except NoUri:
|
||||
# remove the ids we added in the CitationReferences
|
||||
# transform since they can't be transfered to
|
||||
# the contnode (if it's a Text node)
|
||||
if not isinstance(contnode, nodes.Element):
|
||||
del node['ids'][:]
|
||||
raise
|
||||
|
||||
def _resolve_obj_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
# type: (BuildEnvironment, str, Builder, str, str, addnodes.pending_xref, nodes.Element) -> nodes.Element # NOQA
|
||||
objtypes = self.objtypes_for_role(typ) or []
|
||||
for objtype in objtypes:
|
||||
if (objtype, target) in self.data['objects']:
|
||||
docname, labelid = self.data['objects'][objtype, target]
|
||||
if (objtype, target) in self.objects:
|
||||
docname, labelid = self.objects[objtype, target]
|
||||
break
|
||||
else:
|
||||
docname, labelid = '', ''
|
||||
@ -885,8 +844,8 @@ class StandardDomain(Domain):
|
||||
key = (objtype, target)
|
||||
if objtype == 'term':
|
||||
key = (objtype, ltarget)
|
||||
if key in self.data['objects']:
|
||||
docname, labelid = self.data['objects'][key]
|
||||
if key in self.objects:
|
||||
docname, labelid = self.objects[key]
|
||||
results.append(('std:' + self.role_for_objtype(objtype),
|
||||
make_refnode(builder, fromdocname, docname,
|
||||
labelid, contnode)))
|
||||
@ -897,22 +856,22 @@ class StandardDomain(Domain):
|
||||
# handle the special 'doc' reference here
|
||||
for doc in self.env.all_docs:
|
||||
yield (doc, clean_astext(self.env.titles[doc]), 'doc', doc, '', -1)
|
||||
for (prog, option), info in self.data['progoptions'].items():
|
||||
for (prog, option), info in self.progoptions.items():
|
||||
if prog:
|
||||
fullname = ".".join([prog, option])
|
||||
yield (fullname, fullname, 'cmdoption', info[0], info[1], 1)
|
||||
else:
|
||||
yield (option, option, 'cmdoption', info[0], info[1], 1)
|
||||
for (type, name), info in self.data['objects'].items():
|
||||
for (type, name), info in self.objects.items():
|
||||
yield (name, name, type, info[0], info[1],
|
||||
self.object_types[type].attrs['searchprio'])
|
||||
for name, info in self.data['labels'].items():
|
||||
yield (name, info[2], 'label', info[0], info[1], -1)
|
||||
for name, (docname, labelid, sectionname) in self.labels.items():
|
||||
yield (name, sectionname, 'label', docname, labelid, -1)
|
||||
# add anonymous-only labels as well
|
||||
non_anon_labels = set(self.data['labels'])
|
||||
for name, info in self.data['anonlabels'].items():
|
||||
non_anon_labels = set(self.labels)
|
||||
for name, (docname, labelid) in self.anonlabels.items():
|
||||
if name not in non_anon_labels:
|
||||
yield (name, name, 'label', info[0], info[1], -1)
|
||||
yield (name, name, 'label', docname, labelid, -1)
|
||||
|
||||
def get_type_name(self, type, primary=False):
|
||||
# type: (ObjType, bool) -> str
|
||||
@ -993,6 +952,21 @@ class StandardDomain(Domain):
|
||||
else:
|
||||
return None
|
||||
|
||||
def note_citations(self, env, docname, document):
|
||||
# type: (BuildEnvironment, str, nodes.document) -> None
|
||||
warnings.warn('StandardDomain.note_citations() is deprecated.',
|
||||
RemovedInSphinx40Warning)
|
||||
|
||||
def note_citation_refs(self, env, docname, document):
|
||||
# type: (BuildEnvironment, str, nodes.document) -> None
|
||||
warnings.warn('StandardDomain.note_citation_refs() is deprecated.',
|
||||
RemovedInSphinx40Warning)
|
||||
|
||||
def note_labels(self, env, docname, document):
|
||||
# type: (BuildEnvironment, str, nodes.document) -> None
|
||||
warnings.warn('StandardDomain.note_labels() is deprecated.',
|
||||
RemovedInSphinx40Warning)
|
||||
|
||||
|
||||
def setup(app):
|
||||
# type: (Sphinx) -> Dict[str, Any]
|
||||
|
@ -34,6 +34,7 @@ if False:
|
||||
from sphinx.application import Sphinx # NOQA
|
||||
from sphinx.builders import Builder # NOQA
|
||||
from sphinx.config import Config # NOQA
|
||||
from sphinx.event import EventManager # NOQA
|
||||
from sphinx.domains import Domain # NOQA
|
||||
from sphinx.project import Project # NOQA
|
||||
|
||||
@ -95,6 +96,7 @@ class BuildEnvironment:
|
||||
self.srcdir = None # type: str
|
||||
self.config = None # type: Config
|
||||
self.config_status = None # type: int
|
||||
self.events = None # type: EventManager
|
||||
self.project = None # type: Project
|
||||
self.version = None # type: Dict[str, str]
|
||||
|
||||
@ -190,7 +192,7 @@ class BuildEnvironment:
|
||||
# type: () -> Dict
|
||||
"""Obtains serializable data for pickling."""
|
||||
__dict__ = self.__dict__.copy()
|
||||
__dict__.update(app=None, domains={}) # clear unpickable attributes
|
||||
__dict__.update(app=None, domains={}, events=None) # clear unpickable attributes
|
||||
return __dict__
|
||||
|
||||
def __setstate__(self, state):
|
||||
@ -210,6 +212,7 @@ class BuildEnvironment:
|
||||
|
||||
self.app = app
|
||||
self.doctreedir = app.doctreedir
|
||||
self.events = app.events
|
||||
self.srcdir = app.srcdir
|
||||
self.project = app.project
|
||||
self.version = app.registry.get_envversion(app)
|
||||
@ -307,7 +310,7 @@ class BuildEnvironment:
|
||||
|
||||
for domainname, domain in self.domains.items():
|
||||
domain.merge_domaindata(docnames, other.domaindata[domainname])
|
||||
app.emit('env-merge-info', self, docnames, other)
|
||||
self.events.emit('env-merge-info', self, docnames, other)
|
||||
|
||||
def path2doc(self, filename):
|
||||
# type: (str) -> Optional[str]
|
||||
@ -449,7 +452,7 @@ class BuildEnvironment:
|
||||
def check_dependents(self, app, already):
|
||||
# type: (Sphinx, Set[str]) -> Iterator[str]
|
||||
to_rewrite = [] # type: List[str]
|
||||
for docnames in app.emit('env-get-updated', self):
|
||||
for docnames in self.events.emit('env-get-updated', self):
|
||||
to_rewrite.extend(docnames)
|
||||
for docname in set(to_rewrite):
|
||||
if docname not in already:
|
||||
@ -597,7 +600,7 @@ class BuildEnvironment:
|
||||
self.temp_data = backup
|
||||
|
||||
# allow custom references to be resolved
|
||||
self.app.emit('doctree-resolved', doctree, docname)
|
||||
self.events.emit('doctree-resolved', doctree, docname)
|
||||
|
||||
def collect_relations(self):
|
||||
# type: () -> Dict[str, List[str]]
|
||||
@ -653,4 +656,4 @@ class BuildEnvironment:
|
||||
# call check-consistency for all extensions
|
||||
for domain in self.domains.values():
|
||||
domain.check_consistency()
|
||||
self.app.emit('env-check-consistency', self)
|
||||
self.events.emit('env-check-consistency', self)
|
||||
|
@ -10,14 +10,20 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import warnings
|
||||
from collections import OrderedDict, defaultdict
|
||||
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||
from sphinx.errors import ExtensionError
|
||||
from sphinx.locale import __
|
||||
from sphinx.util import logging
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Callable, Dict, List # NOQA
|
||||
from sphinx.application import Sphinx # NOQA
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# List of all known core events. Maps name to arguments description.
|
||||
@ -42,20 +48,28 @@ core_events = {
|
||||
|
||||
|
||||
class EventManager:
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
"""Event manager for Sphinx."""
|
||||
|
||||
def __init__(self, app=None):
|
||||
# type: (Sphinx) -> None
|
||||
if app is None:
|
||||
warnings.warn('app argument is required for EventManager.',
|
||||
RemovedInSphinx40Warning)
|
||||
self.app = app
|
||||
self.events = core_events.copy()
|
||||
self.listeners = defaultdict(OrderedDict) # type: Dict[str, Dict[int, Callable]]
|
||||
self.next_listener_id = 0
|
||||
|
||||
def add(self, name):
|
||||
# type: (str) -> None
|
||||
"""Register a custom Sphinx event."""
|
||||
if name in self.events:
|
||||
raise ExtensionError(__('Event %r already present') % name)
|
||||
self.events[name] = ''
|
||||
|
||||
def connect(self, name, callback):
|
||||
# type: (str, Callable) -> int
|
||||
"""Connect a handler to specific event."""
|
||||
if name not in self.events:
|
||||
raise ExtensionError(__('Unknown event name: %s') % name)
|
||||
|
||||
@ -66,18 +80,35 @@ class EventManager:
|
||||
|
||||
def disconnect(self, listener_id):
|
||||
# type: (int) -> None
|
||||
"""Disconnect a handler."""
|
||||
for event in self.listeners.values():
|
||||
event.pop(listener_id, None)
|
||||
|
||||
def emit(self, name, *args):
|
||||
# type: (str, Any) -> List
|
||||
"""Emit a Sphinx event."""
|
||||
try:
|
||||
logger.debug('[app] emitting event: %r%s', name, repr(args)[:100])
|
||||
except Exception:
|
||||
# not every object likes to be repr()'d (think
|
||||
# random stuff coming via autodoc)
|
||||
pass
|
||||
|
||||
results = []
|
||||
for callback in self.listeners[name].values():
|
||||
results.append(callback(*args))
|
||||
if self.app is None:
|
||||
# for compatibility; RemovedInSphinx40Warning
|
||||
results.append(callback(*args))
|
||||
else:
|
||||
results.append(callback(self.app, *args))
|
||||
return results
|
||||
|
||||
def emit_firstresult(self, name, *args):
|
||||
# type: (str, Any) -> Any
|
||||
"""Emit a Sphinx event and returns first result.
|
||||
|
||||
This returns the result of the first handler that doesn't return ``None``.
|
||||
"""
|
||||
for result in self.emit(name, *args):
|
||||
if result is not None:
|
||||
return result
|
||||
|
@ -19,15 +19,18 @@ import glob
|
||||
import locale
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from fnmatch import fnmatch
|
||||
from os import path
|
||||
|
||||
import sphinx.locale
|
||||
from sphinx import __display_version__, package_dir
|
||||
from sphinx.cmd.quickstart import EXTENSIONS
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||
from sphinx.locale import __
|
||||
from sphinx.util import rst
|
||||
from sphinx.util.osutil import FileAvoidWrite, ensuredir
|
||||
from sphinx.util.template import ReSTRenderer
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
@ -47,6 +50,8 @@ else:
|
||||
INITPY = '__init__.py'
|
||||
PY_SUFFIXES = {'.py', '.pyx'}
|
||||
|
||||
template_dir = path.join(package_dir, 'templates', 'apidoc')
|
||||
|
||||
|
||||
def makename(package, module):
|
||||
# type: (str, str) -> str
|
||||
@ -79,6 +84,8 @@ def write_file(name, text, opts):
|
||||
def format_heading(level, text, escape=True):
|
||||
# type: (int, str, bool) -> str
|
||||
"""Create a heading of <level> [1, 2 or 3 supported]."""
|
||||
warnings.warn('format_warning() is deprecated.',
|
||||
RemovedInSphinx40Warning)
|
||||
if escape:
|
||||
text = rst.escape(text)
|
||||
underlining = ['=', '-', '~', ][level - 1] * len(text)
|
||||
@ -88,100 +95,79 @@ def format_heading(level, text, escape=True):
|
||||
def format_directive(module, package=None):
|
||||
# type: (str, str) -> str
|
||||
"""Create the automodule directive and add the options."""
|
||||
warnings.warn('format_directive() is deprecated.',
|
||||
RemovedInSphinx40Warning)
|
||||
directive = '.. automodule:: %s\n' % makename(package, module)
|
||||
for option in OPTIONS:
|
||||
directive += ' :%s:\n' % option
|
||||
return directive
|
||||
|
||||
|
||||
def create_module_file(package, module, opts):
|
||||
def create_module_file(package, basename, opts):
|
||||
# type: (str, str, Any) -> None
|
||||
"""Build the text of the file and write the file."""
|
||||
if not opts.noheadings:
|
||||
text = format_heading(1, '%s module' % module)
|
||||
else:
|
||||
text = ''
|
||||
# text += format_heading(2, ':mod:`%s` Module' % module)
|
||||
text += format_directive(module, package)
|
||||
write_file(makename(package, module), text, opts)
|
||||
qualname = makename(package, basename)
|
||||
context = {
|
||||
'show_headings': not opts.noheadings,
|
||||
'basename': basename,
|
||||
'qualname': qualname,
|
||||
'automodule_options': OPTIONS,
|
||||
}
|
||||
text = ReSTRenderer(template_dir).render('module.rst', context)
|
||||
write_file(qualname, text, opts)
|
||||
|
||||
|
||||
def create_package_file(root, master_package, subroot, py_files, opts, subs, is_namespace, excludes=[]): # NOQA
|
||||
# type: (str, str, str, List[str], Any, List[str], bool, List[str]) -> None
|
||||
"""Build the text of the file and write the file."""
|
||||
text = format_heading(1, ('%s package' if not is_namespace else "%s namespace")
|
||||
% makename(master_package, subroot))
|
||||
|
||||
if opts.modulefirst and not is_namespace:
|
||||
text += format_directive(subroot, master_package)
|
||||
text += '\n'
|
||||
|
||||
# build a list of directories that are szvpackages (contain an INITPY file)
|
||||
# and also checks the INITPY file is not empty, or there are other python
|
||||
# source files in that folder.
|
||||
# (depending on settings - but shall_skip() takes care of that)
|
||||
subs = [sub for sub in subs if not
|
||||
shall_skip(path.join(root, sub, INITPY), opts, excludes)]
|
||||
# if there are some package directories, add a TOC for theses subpackages
|
||||
if subs:
|
||||
text += format_heading(2, 'Subpackages')
|
||||
text += '.. toctree::\n\n'
|
||||
for sub in subs:
|
||||
text += ' %s.%s\n' % (makename(master_package, subroot), sub)
|
||||
text += '\n'
|
||||
|
||||
submods = [path.splitext(sub)[0] for sub in py_files
|
||||
if not shall_skip(path.join(root, sub), opts, excludes) and
|
||||
sub != INITPY]
|
||||
if submods:
|
||||
text += format_heading(2, 'Submodules')
|
||||
if opts.separatemodules:
|
||||
text += '.. toctree::\n\n'
|
||||
for submod in submods:
|
||||
modfile = makename(master_package, makename(subroot, submod))
|
||||
text += ' %s\n' % modfile
|
||||
|
||||
# generate separate file for this module
|
||||
if not opts.noheadings:
|
||||
filetext = format_heading(1, '%s module' % modfile)
|
||||
else:
|
||||
filetext = ''
|
||||
filetext += format_directive(makename(subroot, submod),
|
||||
master_package)
|
||||
write_file(modfile, filetext, opts)
|
||||
else:
|
||||
for submod in submods:
|
||||
modfile = makename(master_package, makename(subroot, submod))
|
||||
if not opts.noheadings:
|
||||
text += format_heading(2, '%s module' % modfile)
|
||||
text += format_directive(makename(subroot, submod),
|
||||
master_package)
|
||||
text += '\n'
|
||||
text += '\n'
|
||||
|
||||
if not opts.modulefirst and not is_namespace:
|
||||
text += format_heading(2, 'Module contents')
|
||||
text += format_directive(subroot, master_package)
|
||||
# build a list of sub packages (directories containing an INITPY file)
|
||||
subpackages = [sub for sub in subs if not
|
||||
shall_skip(path.join(root, sub, INITPY), opts, excludes)]
|
||||
subpackages = [makename(makename(master_package, subroot), pkgname)
|
||||
for pkgname in subpackages]
|
||||
# build a list of sub modules
|
||||
submodules = [path.splitext(sub)[0] for sub in py_files
|
||||
if not shall_skip(path.join(root, sub), opts, excludes) and
|
||||
sub != INITPY]
|
||||
submodules = [makename(master_package, makename(subroot, modname))
|
||||
for modname in submodules]
|
||||
|
||||
context = {
|
||||
'pkgname': makename(master_package, subroot),
|
||||
'subpackages': subpackages,
|
||||
'submodules': submodules,
|
||||
'is_namespace': is_namespace,
|
||||
'modulefirst': opts.modulefirst,
|
||||
'separatemodules': opts.separatemodules,
|
||||
'automodule_options': OPTIONS,
|
||||
'show_headings': not opts.noheadings,
|
||||
}
|
||||
text = ReSTRenderer(template_dir).render('package.rst', context)
|
||||
write_file(makename(master_package, subroot), text, opts)
|
||||
|
||||
if submodules and opts.separatemodules:
|
||||
for submodule in submodules:
|
||||
create_module_file(None, submodule, opts)
|
||||
|
||||
|
||||
def create_modules_toc_file(modules, opts, name='modules'):
|
||||
# type: (List[str], Any, str) -> None
|
||||
"""Create the module's index."""
|
||||
text = format_heading(1, '%s' % opts.header, escape=False)
|
||||
text += '.. toctree::\n'
|
||||
text += ' :maxdepth: %s\n\n' % opts.maxdepth
|
||||
|
||||
modules.sort()
|
||||
prev_module = ''
|
||||
for module in modules:
|
||||
for module in modules[:]:
|
||||
# look if the module is a subpackage and, if yes, ignore it
|
||||
if module.startswith(prev_module + '.'):
|
||||
continue
|
||||
prev_module = module
|
||||
text += ' %s\n' % module
|
||||
modules.remove(module)
|
||||
else:
|
||||
prev_module = module
|
||||
|
||||
context = {
|
||||
'header': opts.header,
|
||||
'maxdepth': opts.maxdepth,
|
||||
'docnames': modules,
|
||||
}
|
||||
text = ReSTRenderer(template_dir).render('toc.rst', context)
|
||||
write_file(name, text, opts)
|
||||
|
||||
|
||||
|
@ -10,7 +10,6 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import inspect
|
||||
import re
|
||||
import warnings
|
||||
from typing import Any
|
||||
@ -23,12 +22,13 @@ from sphinx.ext.autodoc.importer import import_object, get_object_members
|
||||
from sphinx.ext.autodoc.mock import mock
|
||||
from sphinx.locale import _, __
|
||||
from sphinx.pycode import ModuleAnalyzer, PycodeError
|
||||
from sphinx.util import inspect
|
||||
from sphinx.util import logging
|
||||
from sphinx.util import rpartition
|
||||
from sphinx.util.docstrings import prepare_docstring
|
||||
from sphinx.util.inspect import Signature, isdescriptor, safe_getmembers, \
|
||||
safe_getattr, object_description, is_builtin_class_method, \
|
||||
isenumattribute, isclassmethod, isstaticmethod, isfunction, isbuiltin, ispartial, getdoc
|
||||
from sphinx.util.inspect import (
|
||||
Signature, getdoc, object_description, safe_getattr, safe_getmembers
|
||||
)
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
@ -357,7 +357,7 @@ class Documenter:
|
||||
return True
|
||||
|
||||
modname = self.get_attr(self.object, '__module__', None)
|
||||
if ispartial(self.object) and modname == '_functools': # for pypy
|
||||
if inspect.ispartial(self.object) and modname == '_functools': # for pypy
|
||||
return True
|
||||
elif modname and modname != self.modname:
|
||||
return False
|
||||
@ -403,9 +403,9 @@ class Documenter:
|
||||
|
||||
retann = self.retann
|
||||
|
||||
result = self.env.app.emit_firstresult(
|
||||
'autodoc-process-signature', self.objtype, self.fullname,
|
||||
self.object, self.options, args, retann)
|
||||
result = self.env.events.emit_firstresult('autodoc-process-signature',
|
||||
self.objtype, self.fullname,
|
||||
self.object, self.options, args, retann)
|
||||
if result:
|
||||
args, retann = result
|
||||
|
||||
@ -440,7 +440,8 @@ class Documenter:
|
||||
docstring = getdoc(self.object, self.get_attr,
|
||||
self.env.config.autodoc_inherit_docstrings)
|
||||
if docstring:
|
||||
return [prepare_docstring(docstring, ignore)]
|
||||
tab_width = self.directive.state.document.settings.tab_width
|
||||
return [prepare_docstring(docstring, ignore, tab_width)]
|
||||
return []
|
||||
|
||||
def process_doc(self, docstrings):
|
||||
@ -934,7 +935,9 @@ class DocstringSignatureMixin:
|
||||
if base not in valid_names:
|
||||
continue
|
||||
# re-prepare docstring to ignore more leading indentation
|
||||
self._new_docstrings[i] = prepare_docstring('\n'.join(doclines[1:]))
|
||||
tab_width = self.directive.state.document.settings.tab_width # type: ignore
|
||||
self._new_docstrings[i] = prepare_docstring('\n'.join(doclines[1:]),
|
||||
tabsize=tab_width)
|
||||
result = args, retann
|
||||
# don't look any further
|
||||
break
|
||||
@ -991,25 +994,27 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # typ
|
||||
@classmethod
|
||||
def can_document_member(cls, member, membername, isattr, parent):
|
||||
# type: (Any, str, bool, Any) -> bool
|
||||
return isfunction(member) or isbuiltin(member)
|
||||
# supports functions, builtins and bound methods exported at the module level
|
||||
return (inspect.isfunction(member) or inspect.isbuiltin(member) or
|
||||
(inspect.isroutine(member) and isinstance(parent, ModuleDocumenter)))
|
||||
|
||||
def format_args(self):
|
||||
# type: () -> str
|
||||
if isbuiltin(self.object) or inspect.ismethoddescriptor(self.object):
|
||||
if inspect.isbuiltin(self.object) or inspect.ismethoddescriptor(self.object):
|
||||
# cannot introspect arguments of a C function or method
|
||||
return None
|
||||
try:
|
||||
if (not isfunction(self.object) and
|
||||
if (not inspect.isfunction(self.object) and
|
||||
not inspect.ismethod(self.object) and
|
||||
not isbuiltin(self.object) and
|
||||
not inspect.isbuiltin(self.object) and
|
||||
not inspect.isclass(self.object) and
|
||||
hasattr(self.object, '__call__')):
|
||||
args = Signature(self.object.__call__).format_args()
|
||||
else:
|
||||
args = Signature(self.object).format_args()
|
||||
except TypeError:
|
||||
if (is_builtin_class_method(self.object, '__new__') and
|
||||
is_builtin_class_method(self.object, '__init__')):
|
||||
if (inspect.is_builtin_class_method(self.object, '__new__') and
|
||||
inspect.is_builtin_class_method(self.object, '__init__')):
|
||||
raise TypeError('%r is a builtin class' % self.object)
|
||||
|
||||
# if a class should be documented as function (yay duck
|
||||
@ -1030,6 +1035,14 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # typ
|
||||
# type: (bool) -> None
|
||||
pass
|
||||
|
||||
def add_directive_header(self, sig):
|
||||
# type: (str) -> None
|
||||
sourcename = self.get_sourcename()
|
||||
super().add_directive_header(sig)
|
||||
|
||||
if inspect.iscoroutinefunction(self.object):
|
||||
self.add_line(' :async:', sourcename)
|
||||
|
||||
|
||||
class DecoratorDocumenter(FunctionDocumenter):
|
||||
"""
|
||||
@ -1091,8 +1104,8 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
||||
# classes without __init__ method, default __init__ or
|
||||
# __init__ written in C?
|
||||
if initmeth is None or \
|
||||
is_builtin_class_method(self.object, '__init__') or \
|
||||
not(inspect.ismethod(initmeth) or isfunction(initmeth)):
|
||||
inspect.is_builtin_class_method(self.object, '__init__') or \
|
||||
not(inspect.ismethod(initmeth) or inspect.isfunction(initmeth)):
|
||||
return None
|
||||
try:
|
||||
return Signature(initmeth, bound_method=True, has_retval=False).format_args()
|
||||
@ -1167,7 +1180,9 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
||||
docstrings = [initdocstring]
|
||||
else:
|
||||
docstrings.append(initdocstring)
|
||||
return [prepare_docstring(docstring, ignore) for docstring in docstrings]
|
||||
|
||||
tab_width = self.directive.state.document.settings.tab_width
|
||||
return [prepare_docstring(docstring, ignore, tab_width) for docstring in docstrings]
|
||||
|
||||
def add_content(self, more_content, no_docstring=False):
|
||||
# type: (Any, bool) -> None
|
||||
@ -1267,6 +1282,7 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
|
||||
Specialized Documenter subclass for methods (normal, static and class).
|
||||
"""
|
||||
objtype = 'method'
|
||||
directivetype = 'method'
|
||||
member_order = 50
|
||||
priority = 1 # must be more than FunctionDocumenter
|
||||
|
||||
@ -1287,24 +1303,19 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
|
||||
if obj is None:
|
||||
obj = self.object
|
||||
|
||||
if isclassmethod(obj):
|
||||
self.directivetype = 'classmethod'
|
||||
if (inspect.isclassmethod(obj) or
|
||||
inspect.isstaticmethod(obj, cls=self.parent, name=self.object_name)):
|
||||
# document class and static members before ordinary ones
|
||||
self.member_order = self.member_order - 1
|
||||
elif isstaticmethod(obj, cls=self.parent, name=self.object_name):
|
||||
self.directivetype = 'staticmethod'
|
||||
# document class and static members before ordinary ones
|
||||
self.member_order = self.member_order - 1
|
||||
else:
|
||||
self.directivetype = 'method'
|
||||
|
||||
return ret
|
||||
|
||||
def format_args(self):
|
||||
# type: () -> str
|
||||
if isbuiltin(self.object) or inspect.ismethoddescriptor(self.object):
|
||||
if inspect.isbuiltin(self.object) or inspect.ismethoddescriptor(self.object):
|
||||
# can never get arguments of a C function or method
|
||||
return None
|
||||
if isstaticmethod(self.object, cls=self.parent, name=self.object_name):
|
||||
if inspect.isstaticmethod(self.object, cls=self.parent, name=self.object_name):
|
||||
args = Signature(self.object, bound_method=False).format_args()
|
||||
else:
|
||||
args = Signature(self.object, bound_method=True).format_args()
|
||||
@ -1312,6 +1323,19 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
|
||||
args = args.replace('\\', '\\\\')
|
||||
return args
|
||||
|
||||
def add_directive_header(self, sig):
|
||||
# type: (str) -> None
|
||||
super().add_directive_header(sig)
|
||||
|
||||
sourcename = self.get_sourcename()
|
||||
obj = self.parent.__dict__.get(self.object_name, self.object)
|
||||
if inspect.iscoroutinefunction(obj):
|
||||
self.add_line(' :async:', sourcename)
|
||||
if inspect.isclassmethod(obj):
|
||||
self.add_line(' :classmethod:', sourcename)
|
||||
if inspect.isstaticmethod(obj, cls=self.parent, name=self.object_name):
|
||||
self.add_line(' :staticmethod:', sourcename)
|
||||
|
||||
def document_members(self, all_members=False):
|
||||
# type: (bool) -> None
|
||||
pass
|
||||
@ -1333,22 +1357,19 @@ class AttributeDocumenter(DocstringStripSignatureMixin, ClassLevelDocumenter):
|
||||
@staticmethod
|
||||
def is_function_or_method(obj):
|
||||
# type: (Any) -> bool
|
||||
return isfunction(obj) or isbuiltin(obj) or inspect.ismethod(obj)
|
||||
return inspect.isfunction(obj) or inspect.isbuiltin(obj) or inspect.ismethod(obj)
|
||||
|
||||
@classmethod
|
||||
def can_document_member(cls, member, membername, isattr, parent):
|
||||
# type: (Any, str, bool, Any) -> bool
|
||||
non_attr_types = (type, MethodDescriptorType)
|
||||
isdatadesc = isdescriptor(member) and not \
|
||||
cls.is_function_or_method(member) and not \
|
||||
isinstance(member, non_attr_types) and not \
|
||||
type(member).__name__ == "instancemethod"
|
||||
# That last condition addresses an obscure case of C-defined
|
||||
# methods using a deprecated type in Python 3, that is not otherwise
|
||||
# exported anywhere by Python
|
||||
return isdatadesc or (not isinstance(parent, ModuleDocumenter) and
|
||||
not inspect.isroutine(member) and
|
||||
not isinstance(member, type))
|
||||
if inspect.isattributedescriptor(member):
|
||||
return True
|
||||
elif (not isinstance(parent, ModuleDocumenter) and
|
||||
not inspect.isroutine(member) and
|
||||
not isinstance(member, type)):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def document_members(self, all_members=False):
|
||||
# type: (bool) -> None
|
||||
@ -1357,10 +1378,9 @@ class AttributeDocumenter(DocstringStripSignatureMixin, ClassLevelDocumenter):
|
||||
def import_object(self):
|
||||
# type: () -> Any
|
||||
ret = super().import_object()
|
||||
if isenumattribute(self.object):
|
||||
if inspect.isenumattribute(self.object):
|
||||
self.object = self.object.value
|
||||
if isdescriptor(self.object) and \
|
||||
not self.is_function_or_method(self.object):
|
||||
if inspect.isattributedescriptor(self.object):
|
||||
self._datadescriptor = True
|
||||
else:
|
||||
# if it's not a data descriptor
|
||||
@ -1398,6 +1418,37 @@ class AttributeDocumenter(DocstringStripSignatureMixin, ClassLevelDocumenter):
|
||||
super().add_content(more_content, no_docstring)
|
||||
|
||||
|
||||
class PropertyDocumenter(DocstringStripSignatureMixin, ClassLevelDocumenter): # type: ignore
|
||||
"""
|
||||
Specialized Documenter subclass for properties.
|
||||
"""
|
||||
objtype = 'property'
|
||||
directivetype = 'method'
|
||||
member_order = 60
|
||||
|
||||
# before AttributeDocumenter
|
||||
priority = AttributeDocumenter.priority + 1
|
||||
|
||||
@classmethod
|
||||
def can_document_member(cls, member, membername, isattr, parent):
|
||||
# type: (Any, str, bool, Any) -> bool
|
||||
return inspect.isproperty(member) and isinstance(parent, ClassDocumenter)
|
||||
|
||||
def document_members(self, all_members=False):
|
||||
# type: (bool) -> None
|
||||
pass
|
||||
|
||||
def get_real_modname(self):
|
||||
# type: () -> str
|
||||
return self.get_attr(self.parent or self.object, '__module__', None) \
|
||||
or self.modname
|
||||
|
||||
def add_directive_header(self, sig):
|
||||
# type: (str) -> None
|
||||
super().add_directive_header(sig)
|
||||
self.add_line(' :property:', self.get_sourcename())
|
||||
|
||||
|
||||
class InstanceAttributeDocumenter(AttributeDocumenter):
|
||||
"""
|
||||
Specialized Documenter subclass for attributes that cannot be imported
|
||||
@ -1456,6 +1507,7 @@ def setup(app):
|
||||
app.add_autodocumenter(DecoratorDocumenter)
|
||||
app.add_autodocumenter(MethodDocumenter)
|
||||
app.add_autodocumenter(AttributeDocumenter)
|
||||
app.add_autodocumenter(PropertyDocumenter)
|
||||
app.add_autodocumenter(InstanceAttributeDocumenter)
|
||||
|
||||
app.add_config_value('autoclass_content', 'class', True)
|
||||
|
@ -6,10 +6,14 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import warnings
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst.states import Struct
|
||||
from docutils.statemachine import StringList
|
||||
from docutils.utils import assemble_option_dict
|
||||
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||
from sphinx.ext.autodoc import Options, get_documenters
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.docutils import SphinxDirective, switch_source_input
|
||||
@ -17,7 +21,7 @@ from sphinx.util.nodes import nested_parse_with_titles
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Callable, Dict, List, Set, Type # NOQA
|
||||
from typing import Any, Callable, Dict, List, Set, Type # NOQA
|
||||
from docutils.parsers.rst.state import RSTState # NOQA
|
||||
from docutils.utils import Reporter # NOQA
|
||||
from sphinx.config import Config # NOQA
|
||||
@ -30,7 +34,8 @@ logger = logging.getLogger(__name__)
|
||||
# common option names for autodoc directives
|
||||
AUTODOC_DEFAULT_OPTIONS = ['members', 'undoc-members', 'inherited-members',
|
||||
'show-inheritance', 'private-members', 'special-members',
|
||||
'ignore-module-all', 'exclude-members', 'member-order']
|
||||
'ignore-module-all', 'exclude-members', 'member-order',
|
||||
'imported-members']
|
||||
|
||||
|
||||
class DummyOptionSpec(dict):
|
||||
@ -49,8 +54,8 @@ class DummyOptionSpec(dict):
|
||||
class DocumenterBridge:
|
||||
"""A parameters container for Documenters."""
|
||||
|
||||
def __init__(self, env, reporter, options, lineno):
|
||||
# type: (BuildEnvironment, Reporter, Options, int) -> None
|
||||
def __init__(self, env, reporter, options, lineno, state=None):
|
||||
# type: (BuildEnvironment, Reporter, Options, int, Any) -> None
|
||||
self.env = env
|
||||
self.reporter = reporter
|
||||
self.genopt = options
|
||||
@ -58,6 +63,16 @@ class DocumenterBridge:
|
||||
self.filename_set = set() # type: Set[str]
|
||||
self.result = StringList()
|
||||
|
||||
if state:
|
||||
self.state = state
|
||||
else:
|
||||
# create fake object for self.state.document.settings.tab_width
|
||||
warnings.warn('DocumenterBridge requires a state object on instantiation.',
|
||||
RemovedInSphinx40Warning)
|
||||
settings = Struct(tab_width=8)
|
||||
document = Struct(settings=settings)
|
||||
self.state = Struct(document=document)
|
||||
|
||||
def warn(self, msg):
|
||||
# type: (str) -> None
|
||||
logger.warning(msg, location=(self.env.docname, self.lineno))
|
||||
@ -130,7 +145,7 @@ class AutodocDirective(SphinxDirective):
|
||||
return []
|
||||
|
||||
# generate the output
|
||||
params = DocumenterBridge(self.env, reporter, documenter_options, lineno)
|
||||
params = DocumenterBridge(self.env, reporter, documenter_options, lineno, self.state)
|
||||
documenter = doccls(params, self.arguments[0])
|
||||
documenter.generate(more_content=self.content)
|
||||
if not params.result:
|
||||
|
@ -175,7 +175,7 @@ _app = None # type: Sphinx
|
||||
class FakeDirective(DocumenterBridge):
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
super().__init__({}, None, Options(), 0) # type: ignore
|
||||
super().__init__({}, None, Options(), 0, None) # type: ignore
|
||||
|
||||
|
||||
def get_documenter(app, obj, parent):
|
||||
@ -236,7 +236,7 @@ class Autosummary(SphinxDirective):
|
||||
def run(self):
|
||||
# type: () -> List[nodes.Node]
|
||||
self.bridge = DocumenterBridge(self.env, self.state.document.reporter,
|
||||
Options(), self.lineno)
|
||||
Options(), self.lineno, self.state)
|
||||
|
||||
names = [x.strip().split()[0] for x in self.content
|
||||
if x.strip() and re.search(r'^[~a-zA-Z_]', x.strip()[0])]
|
||||
@ -734,12 +734,13 @@ def process_generate_options(app):
|
||||
return
|
||||
|
||||
depth_limit = app.config.autosummary_depth_limit
|
||||
|
||||
imported_members = app.config.autosummary_imported_members
|
||||
with mock(app.config.autosummary_mock_imports):
|
||||
generate_autosummary_docs(genfiles, builder=app.builder,
|
||||
warn=logger.warning, info=logger.info,
|
||||
suffix=suffix, base_path=app.srcdir,
|
||||
app=app, depth_limit=depth_limit)
|
||||
app=app, imported_members=imported_members,
|
||||
depth_limit=depth_limit)
|
||||
|
||||
|
||||
def setup(app):
|
||||
@ -766,4 +767,6 @@ def setup(app):
|
||||
app.add_config_value('autosummary_depth_limit', 0, 'env', [int])
|
||||
app.add_config_value('autosummary_mock_imports',
|
||||
lambda config: config.autodoc_mock_imports, 'env')
|
||||
app.add_config_value('autosummary_imported_members', [], False, [bool])
|
||||
|
||||
return {'version': sphinx.__display_version__, 'parallel_read_safe': True}
|
||||
|
@ -41,7 +41,7 @@ from sphinx.util.rst import escape as rst_escape
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Callable, Dict, List, Tuple, Type, Union # NOQA
|
||||
from typing import Any, Callable, Dict, List, Set, Tuple, Type, Union # NOQA
|
||||
from sphinx.builders import Builder # NOQA
|
||||
from sphinx.ext.autodoc import Documenter # NOQA
|
||||
|
||||
@ -198,8 +198,8 @@ def generate_autosummary_docs(sources, # type: List[str]
|
||||
except TemplateNotFound:
|
||||
template = template_env.get_template('autosummary/base.rst')
|
||||
|
||||
def get_members(obj, typ, include_public=[], imported=True):
|
||||
# type: (Any, str, List[str], bool) -> Tuple[List[str], List[str]]
|
||||
def get_members(obj, types, include_public=[], imported=True):
|
||||
# type: (Any, Set[str], List[str], bool) -> Tuple[List[str], List[str]] # NOQA
|
||||
items = [] # type: List[str]
|
||||
for name in dir(obj):
|
||||
try:
|
||||
@ -207,7 +207,7 @@ def generate_autosummary_docs(sources, # type: List[str]
|
||||
except AttributeError:
|
||||
continue
|
||||
documenter = get_documenter(app, value, obj)
|
||||
if documenter.objtype == typ:
|
||||
if documenter.objtype in types:
|
||||
if imported or getattr(value, '__module__', None) == obj.__name__:
|
||||
# skip imported members if expected
|
||||
items.append(name)
|
||||
@ -237,11 +237,11 @@ def generate_autosummary_docs(sources, # type: List[str]
|
||||
if doc.objtype == 'module':
|
||||
ns['members'] = dir(obj)
|
||||
ns['functions'], ns['all_functions'] = \
|
||||
get_members(obj, 'function', imported=imported_members)
|
||||
get_members(obj, {'function'}, imported=imported_members)
|
||||
ns['classes'], ns['all_classes'] = \
|
||||
get_members(obj, 'class', imported=imported_members)
|
||||
get_members(obj, {'class'}, imported=imported_members)
|
||||
ns['exceptions'], ns['all_exceptions'] = \
|
||||
get_members(obj, 'exception', imported=imported_members)
|
||||
get_members(obj, {'exception'}, imported=imported_members)
|
||||
if add_package_children:
|
||||
ns['modules'], ns['all_modules'] = \
|
||||
get_package_members(obj, 'module')
|
||||
@ -252,9 +252,9 @@ def generate_autosummary_docs(sources, # type: List[str]
|
||||
ns['inherited_members'] = \
|
||||
set(dir(obj)) - set(obj.__dict__.keys())
|
||||
ns['methods'], ns['all_methods'] = \
|
||||
get_members(obj, 'method', ['__init__'])
|
||||
get_members(obj, {'method'}, ['__init__'])
|
||||
ns['attributes'], ns['all_attributes'] = \
|
||||
get_members(obj, 'attribute')
|
||||
get_members(obj, {'attribute', 'property'})
|
||||
|
||||
parts = name.split('.')
|
||||
if doc.objtype in ('method', 'attribute'):
|
||||
|
@ -23,6 +23,7 @@ from docutils import nodes
|
||||
|
||||
import sphinx
|
||||
from sphinx.util.docutils import SphinxDirective
|
||||
from sphinx.util.nodes import nested_parse_with_titles
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
@ -48,8 +49,7 @@ class IfConfig(SphinxDirective):
|
||||
node.document = self.state.document
|
||||
self.set_source_info(node)
|
||||
node['expr'] = self.arguments[0]
|
||||
self.state.nested_parse(self.content, self.content_offset,
|
||||
node, match_titles=True)
|
||||
nested_parse_with_titles(self.state, self.content, node)
|
||||
return [node]
|
||||
|
||||
|
||||
|
@ -21,12 +21,15 @@ from subprocess import CalledProcessError, PIPE
|
||||
from docutils import nodes
|
||||
|
||||
import sphinx
|
||||
from sphinx import package_dir
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning, deprecated_alias
|
||||
from sphinx.errors import SphinxError
|
||||
from sphinx.locale import _, __
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.math import get_node_equation_number, wrap_displaymath
|
||||
from sphinx.util.osutil import ensuredir
|
||||
from sphinx.util.png import read_png_depth, write_png_depth
|
||||
from sphinx.util.template import LaTeXRenderer
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
@ -38,6 +41,8 @@ if False:
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
templates_path = path.join(package_dir, 'templates', 'imgmath')
|
||||
|
||||
|
||||
class MathExtError(SphinxError):
|
||||
category = 'Math extension error'
|
||||
@ -85,21 +90,54 @@ DOC_BODY_PREVIEW = r'''
|
||||
'''
|
||||
|
||||
depth_re = re.compile(br'\[\d+ depth=(-?\d+)\]')
|
||||
depthsvg_re = re.compile(br'.*, depth=(.*)pt')
|
||||
depthsvgcomment_re = re.compile(r'<!-- DEPTH=(-?\d+) -->')
|
||||
|
||||
|
||||
def generate_latex_macro(math, config):
|
||||
# type: (str, Config) -> str
|
||||
def read_svg_depth(filename):
|
||||
# type: (str) -> int
|
||||
"""Read the depth from comment at last line of SVG file
|
||||
"""
|
||||
with open(filename, 'r') as f:
|
||||
for line in f:
|
||||
pass
|
||||
# Only last line is checked
|
||||
matched = depthsvgcomment_re.match(line)
|
||||
if matched:
|
||||
return int(matched.group(1))
|
||||
return None
|
||||
|
||||
|
||||
def write_svg_depth(filename, depth):
|
||||
# type: (str, int) -> None
|
||||
"""Write the depth to SVG file as a comment at end of file
|
||||
"""
|
||||
with open(filename, 'a') as f:
|
||||
f.write('\n<!-- DEPTH=%s -->' % depth)
|
||||
|
||||
|
||||
def generate_latex_macro(image_format, math, config, confdir=''):
|
||||
# type: (str, str, Config, str) -> str
|
||||
"""Generate LaTeX macro."""
|
||||
fontsize = config.imgmath_font_size
|
||||
baselineskip = int(round(fontsize * 1.2))
|
||||
variables = {
|
||||
'fontsize': config.imgmath_font_size,
|
||||
'baselineskip': int(round(config.imgmath_font_size * 1.2)),
|
||||
'preamble': config.imgmath_latex_preamble,
|
||||
'tightpage': '' if image_format == 'png' else ',tightpage',
|
||||
'math': math
|
||||
}
|
||||
|
||||
latex = DOC_HEAD + config.imgmath_latex_preamble
|
||||
if config.imgmath_use_preview:
|
||||
latex += DOC_BODY_PREVIEW % (fontsize, baselineskip, math)
|
||||
template_name = 'preview.tex_t'
|
||||
else:
|
||||
latex += DOC_BODY % (fontsize, baselineskip, math)
|
||||
template_name = 'template.tex_t'
|
||||
|
||||
return latex
|
||||
for template_dir in config.templates_path:
|
||||
template = path.join(confdir, template_dir, template_name)
|
||||
if path.exists(template):
|
||||
return LaTeXRenderer().render(template, variables)
|
||||
|
||||
return LaTeXRenderer(templates_path).render(template_name, variables)
|
||||
|
||||
|
||||
def ensure_tempdir(builder):
|
||||
@ -197,8 +235,18 @@ def convert_dvi_to_svg(dvipath, builder):
|
||||
command.extend(builder.config.imgmath_dvisvgm_args)
|
||||
command.append(dvipath)
|
||||
|
||||
convert_dvi_to_image(command, name)
|
||||
return filename, None
|
||||
stdout, stderr = convert_dvi_to_image(command, name)
|
||||
|
||||
depth = None
|
||||
if builder.config.imgmath_use_preview:
|
||||
for line in stderr.splitlines(): # not stdout !
|
||||
matched = depthsvg_re.match(line)
|
||||
if matched:
|
||||
depth = round(float(matched.group(1)) * 100 / 72.27) # assume 100ppi
|
||||
write_svg_depth(filename, depth)
|
||||
break
|
||||
|
||||
return filename, depth
|
||||
|
||||
|
||||
def render_math(self, math):
|
||||
@ -220,13 +268,19 @@ def render_math(self, math):
|
||||
if image_format not in SUPPORT_FORMAT:
|
||||
raise MathExtError('imgmath_image_format must be either "png" or "svg"')
|
||||
|
||||
latex = generate_latex_macro(math, self.builder.config)
|
||||
latex = generate_latex_macro(image_format,
|
||||
math,
|
||||
self.builder.config,
|
||||
self.builder.confdir)
|
||||
|
||||
filename = "%s.%s" % (sha1(latex.encode()).hexdigest(), image_format)
|
||||
relfn = posixpath.join(self.builder.imgpath, 'math', filename)
|
||||
outfn = path.join(self.builder.outdir, self.builder.imagedir, 'math', filename)
|
||||
if path.isfile(outfn):
|
||||
depth = read_png_depth(outfn)
|
||||
if image_format == 'png':
|
||||
depth = read_png_depth(outfn)
|
||||
elif image_format == 'svg':
|
||||
depth = read_svg_depth(outfn)
|
||||
return relfn, depth
|
||||
|
||||
# if latex or dvipng (dvisvgm) has failed once, don't bother to try again
|
||||
@ -332,6 +386,15 @@ def html_visit_displaymath(self, node):
|
||||
raise nodes.SkipNode
|
||||
|
||||
|
||||
deprecated_alias('sphinx.ext.imgmath',
|
||||
{
|
||||
'DOC_BODY': DOC_BODY,
|
||||
'DOC_BODY_PREVIEW': DOC_BODY_PREVIEW,
|
||||
'DOC_HEAD': DOC_HEAD,
|
||||
},
|
||||
RemovedInSphinx40Warning)
|
||||
|
||||
|
||||
def setup(app):
|
||||
# type: (Sphinx) -> Dict[str, Any]
|
||||
app.add_html_math_renderer('imgmath',
|
||||
|
@ -100,7 +100,7 @@ class GoogleDocstring:
|
||||
|
||||
"""
|
||||
|
||||
_name_rgx = re.compile(r"^\s*(:(?P<role>\w+):`(?P<name>[a-zA-Z0-9_.-]+)`|"
|
||||
_name_rgx = re.compile(r"^\s*((?::(?P<role>\S+):)?`(?P<name>[a-zA-Z0-9_.-]+)`|"
|
||||
r" (?P<name2>[a-zA-Z0-9_.-]+))\s*", re.X)
|
||||
|
||||
def __init__(self, docstring, config=None, app=None, what='', name='',
|
||||
@ -700,9 +700,9 @@ class GoogleDocstring:
|
||||
fields = self._consume_fields(parse_type=False, prefer_type=True)
|
||||
lines = [] # type: List[str]
|
||||
for _name, _type, _desc in fields:
|
||||
m = self._name_rgx.match(_type).groupdict()
|
||||
if m['role']:
|
||||
_type = m['name']
|
||||
m = self._name_rgx.match(_type)
|
||||
if m and m.group('name'):
|
||||
_type = m.group('name')
|
||||
_type = ' ' + _type if _type else ''
|
||||
_desc = self._strip_empty(_desc)
|
||||
_descs = ' ' + '\n '.join(_desc) if any(_desc) else ''
|
||||
|
@ -86,7 +86,7 @@ def process_todos(app, doctree):
|
||||
if not hasattr(env, 'todo_all_todos'):
|
||||
env.todo_all_todos = [] # type: ignore
|
||||
for node in doctree.traverse(todo_node):
|
||||
app.emit('todo-defined', node)
|
||||
app.events.emit('todo-defined', node)
|
||||
|
||||
newnode = node.deepcopy()
|
||||
newnode['ids'] = []
|
||||
|
@ -27,6 +27,7 @@ if False:
|
||||
# For type annotation
|
||||
from typing import Any, Dict # NOQA
|
||||
from pygments.formatter import Formatter # NOQA
|
||||
from pygments.style import Style # NOQA
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -64,16 +65,8 @@ class PygmentsBridge:
|
||||
def __init__(self, dest='html', stylename='sphinx'):
|
||||
# type: (str, str) -> None
|
||||
self.dest = dest
|
||||
if stylename is None or stylename == 'sphinx':
|
||||
style = SphinxStyle
|
||||
elif stylename == 'none':
|
||||
style = NoneStyle
|
||||
elif '.' in stylename:
|
||||
module, stylename = stylename.rsplit('.', 1)
|
||||
style = getattr(__import__(module, None, None, ['__name__']),
|
||||
stylename)
|
||||
else:
|
||||
style = get_style_by_name(stylename)
|
||||
|
||||
style = self.get_style(stylename)
|
||||
self.formatter_args = {'style': style} # type: Dict[str, Any]
|
||||
if dest == 'html':
|
||||
self.formatter = self.html_formatter
|
||||
@ -81,16 +74,25 @@ class PygmentsBridge:
|
||||
self.formatter = self.latex_formatter
|
||||
self.formatter_args['commandprefix'] = 'PYG'
|
||||
|
||||
def get_style(self, stylename):
|
||||
# type: (str) -> Style
|
||||
if stylename is None or stylename == 'sphinx':
|
||||
return SphinxStyle
|
||||
elif stylename == 'none':
|
||||
return NoneStyle
|
||||
elif '.' in stylename:
|
||||
module, stylename = stylename.rsplit('.', 1)
|
||||
return getattr(__import__(module, None, None, ['__name__']), stylename)
|
||||
else:
|
||||
return get_style_by_name(stylename)
|
||||
|
||||
def get_formatter(self, **kwargs):
|
||||
# type: (Any) -> Formatter
|
||||
kwargs.update(self.formatter_args)
|
||||
return self.formatter(**kwargs)
|
||||
|
||||
def highlight_block(self, source, lang, opts=None, location=None, force=False, **kwargs):
|
||||
# type: (str, str, Any, Any, bool, Any) -> str
|
||||
if not isinstance(source, str):
|
||||
source = source.decode()
|
||||
|
||||
def get_lexer(self, source, lang, opts=None, location=None):
|
||||
# type: (str, str, Any, Any) -> Lexer
|
||||
# find out which lexer to use
|
||||
if lang in ('py', 'python'):
|
||||
if source.startswith('>>>'):
|
||||
@ -121,6 +123,15 @@ class PygmentsBridge:
|
||||
else:
|
||||
lexer.add_filter('raiseonerror')
|
||||
|
||||
return lexer
|
||||
|
||||
def highlight_block(self, source, lang, opts=None, location=None, force=False, **kwargs):
|
||||
# type: (str, str, Any, Any, bool, Any) -> str
|
||||
if not isinstance(source, str):
|
||||
source = source.decode()
|
||||
|
||||
lexer = self.get_lexer(source, lang, opts, location)
|
||||
|
||||
# highlight via Pygments
|
||||
formatter = self.get_formatter(**kwargs)
|
||||
try:
|
||||
@ -136,6 +147,7 @@ class PygmentsBridge:
|
||||
type='misc', subtype='highlighting_failure',
|
||||
location=location)
|
||||
hlsource = highlight(source, lexers['none'], formatter)
|
||||
|
||||
if self.dest == 'html':
|
||||
return hlsource
|
||||
else:
|
||||
|
11
sphinx/io.py
11
sphinx/io.py
@ -14,6 +14,7 @@ from docutils.core import Publisher
|
||||
from docutils.io import FileInput, NullOutput
|
||||
from docutils.parsers.rst import Parser as RSTParser
|
||||
from docutils.readers import standalone
|
||||
from docutils.transforms.references import DanglingReferences
|
||||
from docutils.writers import UnfilteredWriter
|
||||
|
||||
from sphinx.transforms import (
|
||||
@ -60,7 +61,15 @@ class SphinxBaseReader(standalone.Reader):
|
||||
|
||||
def get_transforms(self):
|
||||
# type: () -> List[Type[Transform]]
|
||||
return super().get_transforms() + self.transforms
|
||||
transforms = super().get_transforms() + self.transforms
|
||||
|
||||
# remove transforms which is not needed for Sphinx
|
||||
unused = [DanglingReferences]
|
||||
for transform in unused:
|
||||
if transform in transforms:
|
||||
transforms.remove(transform)
|
||||
|
||||
return transforms
|
||||
|
||||
def new_document(self):
|
||||
# type: () -> nodes.document
|
||||
|
@ -381,8 +381,17 @@ class VariableCommentPicker(ast.NodeVisitor):
|
||||
self.context.pop()
|
||||
self.current_function = None
|
||||
|
||||
def visit_AsyncFunctionDef(self, node):
|
||||
# type: (ast.AsyncFunctionDef) -> None
|
||||
"""Handles AsyncFunctionDef node and set context."""
|
||||
self.visit_FunctionDef(node) # type: ignore
|
||||
|
||||
|
||||
class DefinitionFinder(TokenProcessor):
|
||||
"""Python source code parser to detect location of functions,
|
||||
classes and methods.
|
||||
"""
|
||||
|
||||
def __init__(self, lines):
|
||||
# type: (List[str]) -> None
|
||||
super().__init__(lines)
|
||||
@ -393,6 +402,7 @@ class DefinitionFinder(TokenProcessor):
|
||||
|
||||
def add_definition(self, name, entry):
|
||||
# type: (str, Tuple[str, int, int]) -> None
|
||||
"""Add a location of definition."""
|
||||
if self.indents and self.indents[-1][0] == 'def' and entry[0] == 'def':
|
||||
# ignore definition of inner function
|
||||
pass
|
||||
@ -401,6 +411,7 @@ class DefinitionFinder(TokenProcessor):
|
||||
|
||||
def parse(self):
|
||||
# type: () -> None
|
||||
"""Parse the code to obtain location of definitions."""
|
||||
while True:
|
||||
token = self.fetch_token()
|
||||
if token is None:
|
||||
@ -422,6 +433,7 @@ class DefinitionFinder(TokenProcessor):
|
||||
|
||||
def parse_definition(self, typ):
|
||||
# type: (str) -> None
|
||||
"""Parse AST of definition."""
|
||||
name = self.fetch_token()
|
||||
self.context.append(name.value)
|
||||
funcname = '.'.join(self.context)
|
||||
@ -443,6 +455,7 @@ class DefinitionFinder(TokenProcessor):
|
||||
|
||||
def finalize_block(self):
|
||||
# type: () -> None
|
||||
"""Finalize definition block."""
|
||||
definition = self.indents.pop()
|
||||
if definition[0] != 'other':
|
||||
typ, funcname, start_pos = definition
|
||||
|
9
sphinx/templates/apidoc/module.rst
Normal file
9
sphinx/templates/apidoc/module.rst
Normal file
@ -0,0 +1,9 @@
|
||||
{%- if show_headings %}
|
||||
{{- [basename, "module"] | join(' ') | e | heading }}
|
||||
|
||||
{% endif -%}
|
||||
.. automodule:: {{ qualname }}
|
||||
{%- for option in automodule_options %}
|
||||
:{{ option }}:
|
||||
{%- endfor %}
|
||||
|
52
sphinx/templates/apidoc/package.rst
Normal file
52
sphinx/templates/apidoc/package.rst
Normal file
@ -0,0 +1,52 @@
|
||||
{%- macro automodule(modname, options) -%}
|
||||
.. automodule:: {{ modname }}
|
||||
{%- for option in options %}
|
||||
:{{ option }}:
|
||||
{%- endfor %}
|
||||
{%- endmacro %}
|
||||
|
||||
{%- macro toctree(docnames) -%}
|
||||
.. toctree::
|
||||
{% for docname in docnames %}
|
||||
{{ docname }}
|
||||
{%- endfor %}
|
||||
{%- endmacro %}
|
||||
|
||||
{%- if is_namespace %}
|
||||
{{- [pkgname, "namespace"] | join(" ") | e | heading }}
|
||||
{% else %}
|
||||
{{- [pkgname, "package"] | join(" ") | e | heading }}
|
||||
{% endif %}
|
||||
|
||||
{%- if modulefirst and not is_namespace %}
|
||||
{{ automodule(pkgname, automodule_options) }}
|
||||
{% endif %}
|
||||
|
||||
{%- if subpackages %}
|
||||
Subpackages
|
||||
-----------
|
||||
|
||||
{{ toctree(subpackages) }}
|
||||
{% endif %}
|
||||
|
||||
{%- if submodules %}
|
||||
Submodules
|
||||
----------
|
||||
{% if separatemodules %}
|
||||
{{ toctree(submodules) }}
|
||||
{%- else %}
|
||||
{%- for submodule in submodules %}
|
||||
{% if show_headings %}
|
||||
{{- [submodule, "module"] | join(" ") | e | heading(2) }}
|
||||
{% endif %}
|
||||
{{ automodule(submodule, automodule_options) }}
|
||||
{%- endfor %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{%- if not modulefirst and not is_namespace %}
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
{{ automodule(pkgname, automodule_options) }}
|
||||
{% endif %}
|
8
sphinx/templates/apidoc/toc.rst
Normal file
8
sphinx/templates/apidoc/toc.rst
Normal file
@ -0,0 +1,8 @@
|
||||
{{ header | heading }}
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: {{ maxdepth }}
|
||||
{% for docname in docnames %}
|
||||
{{ docname }}
|
||||
{%- endfor %}
|
||||
|
18
sphinx/templates/imgmath/preview.tex_t
Normal file
18
sphinx/templates/imgmath/preview.tex_t
Normal file
@ -0,0 +1,18 @@
|
||||
\documentclass[12pt]{article}
|
||||
\usepackage[utf8]{inputenc}
|
||||
\usepackage{amsmath}
|
||||
\usepackage{amsthm}
|
||||
\usepackage{amssymb}
|
||||
\usepackage{amsfonts}
|
||||
\usepackage{anyfontsize}
|
||||
\usepackage{bm}
|
||||
\pagestyle{empty}
|
||||
<%= preamble %>
|
||||
|
||||
\usepackage[active<%= tightpage %>]{preview}
|
||||
|
||||
\begin{document}
|
||||
\begin{preview}
|
||||
\fontsize{<%= fontsize %>}{<%= baselineskip %>}\selectfont <%= math %>
|
||||
\end{preview}
|
||||
\end{document}
|
14
sphinx/templates/imgmath/template.tex_t
Normal file
14
sphinx/templates/imgmath/template.tex_t
Normal file
@ -0,0 +1,14 @@
|
||||
\documentclass[12pt]{article}
|
||||
\usepackage[utf8]{inputenc}
|
||||
\usepackage{amsmath}
|
||||
\usepackage{amsthm}
|
||||
\usepackage{amssymb}
|
||||
\usepackage{amsfonts}
|
||||
\usepackage{anyfontsize}
|
||||
\usepackage{bm}
|
||||
\pagestyle{empty}
|
||||
<%= preamble %>
|
||||
|
||||
\begin{document}
|
||||
\fontsize{<%= fontsize %>}{<%= baselineskip %>}\selectfont <%= math %>
|
||||
\end{document}
|
@ -1,5 +1,5 @@
|
||||
\begin{savenotes}\sphinxatlongtablestart\begin{longtable}
|
||||
<%- if table.align == 'center' -%>
|
||||
<%- if table.align in ('center', 'default') -%>
|
||||
[c]
|
||||
<%- elif table.align == 'left' -%>
|
||||
[l]
|
||||
|
@ -1,6 +1,6 @@
|
||||
\begin{savenotes}\sphinxattablestart
|
||||
<% if table.align -%>
|
||||
<%- if table.align == 'center' -%>
|
||||
<%- if table.align in ('center', 'default') -%>
|
||||
\centering
|
||||
<%- elif table.align == 'left' -%>
|
||||
\raggedright
|
||||
|
@ -1,6 +1,6 @@
|
||||
\begin{savenotes}\sphinxattablestart
|
||||
<% if table.align -%>
|
||||
<%- if table.align == 'center' -%>
|
||||
<%- if table.align in ('center', 'default') -%>
|
||||
\centering
|
||||
<%- elif table.align == 'left' -%>
|
||||
\raggedright
|
||||
|
@ -1,9 +1,10 @@
|
||||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = {{ rsrcdir }}
|
||||
BUILDDIR = {{ rbuilddir }}
|
||||
|
||||
@ -17,3 +18,4 @@ help:
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
|
@ -9,10 +9,10 @@
|
||||
#}
|
||||
{%- if pagename != "search" and builder != "singlehtml" %}
|
||||
<div id="searchbox" style="display: none" role="search">
|
||||
<h3>{{ _('Quick search') }}</h3>
|
||||
<h3 id="searchlabel">{{ _('Quick search') }}</h3>
|
||||
<div class="searchformwrapper">
|
||||
<form class="search" action="{{ pathto('search') }}" method="get">
|
||||
<input type="text" name="q" />
|
||||
<input type="text" name="q" aria-labelledby="searchlabel" />
|
||||
<input type="submit" value="{{ _('Go') }}" />
|
||||
</form>
|
||||
</div>
|
||||
|
@ -289,6 +289,12 @@ img.align-center, .figure.align-center, object.align-center {
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
img.align-default, .figure.align-default {
|
||||
display: block;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
.align-left {
|
||||
text-align: left;
|
||||
}
|
||||
@ -297,6 +303,10 @@ img.align-center, .figure.align-center, object.align-center {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.align-default {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.align-right {
|
||||
text-align: right;
|
||||
}
|
||||
@ -368,6 +378,11 @@ table.align-center {
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
table.align-default {
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
table caption span.caption-number {
|
||||
font-style: italic;
|
||||
}
|
||||
|
@ -75,6 +75,16 @@ var Search = {
|
||||
}
|
||||
},
|
||||
|
||||
loadIndex : function(url) {
|
||||
$.ajax({type: "GET", url: url, data: null,
|
||||
dataType: "script", cache: true,
|
||||
complete: function(jqxhr, textstatus) {
|
||||
if (textstatus != "success") {
|
||||
document.getElementById("searchindexloader").src = url;
|
||||
}
|
||||
}});
|
||||
},
|
||||
|
||||
setIndex : function(index) {
|
||||
var q;
|
||||
this._index = index;
|
||||
|
@ -410,6 +410,20 @@ p.versionchanged span.versionmodified {
|
||||
background-color: #DCE6A0;
|
||||
}
|
||||
|
||||
dl.field-list > dt {
|
||||
color: white;
|
||||
padding-left: 0.5em;
|
||||
padding-right: 5px;
|
||||
background-color: #82A0BE;
|
||||
}
|
||||
|
||||
dl.field-list > dd {
|
||||
padding-left: 0.5em;
|
||||
margin-top: 0em;
|
||||
margin-left: 0em;
|
||||
background-color: #f7f7f7;
|
||||
}
|
||||
|
||||
/* -- table styles ---------------------------------------------------------- */
|
||||
|
||||
table.docutils {
|
||||
|
@ -9,7 +9,6 @@
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import cast
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.transforms import Transform, Transformer
|
||||
@ -19,13 +18,12 @@ from docutils.utils import normalize_language_tag
|
||||
from docutils.utils.smartquotes import smartchars
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning, deprecated_alias
|
||||
from sphinx.locale import _, __
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.docutils import new_document
|
||||
from sphinx.util.i18n import format_date
|
||||
from sphinx.util.nodes import (
|
||||
NodeMatcher, apply_source_workaround, copy_source_info, is_smartquotable
|
||||
)
|
||||
from sphinx.util.nodes import NodeMatcher, apply_source_workaround, is_smartquotable
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
@ -200,39 +198,6 @@ class SortIds(SphinxTransform):
|
||||
node['ids'] = node['ids'][1:] + [node['ids'][0]]
|
||||
|
||||
|
||||
class SmartQuotesSkipper(SphinxTransform):
|
||||
"""Mark specific nodes as not smartquoted."""
|
||||
default_priority = 619
|
||||
|
||||
def apply(self, **kwargs):
|
||||
# type: (Any) -> None
|
||||
# citation labels
|
||||
for node in self.document.traverse(nodes.citation):
|
||||
label = cast(nodes.label, node[0])
|
||||
label['support_smartquotes'] = False
|
||||
|
||||
|
||||
class CitationReferences(SphinxTransform):
|
||||
"""
|
||||
Replace citation references by pending_xref nodes before the default
|
||||
docutils transform tries to resolve them.
|
||||
"""
|
||||
default_priority = 619
|
||||
|
||||
def apply(self, **kwargs):
|
||||
# type: (Any) -> None
|
||||
for node in self.document.traverse(nodes.citation_reference):
|
||||
target = node.astext()
|
||||
ref = addnodes.pending_xref(target, refdomain='std', reftype='citation',
|
||||
reftarget=target, refwarn=True,
|
||||
support_smartquotes=False,
|
||||
ids=node["ids"],
|
||||
classes=node.get('classes', []))
|
||||
ref += nodes.inline(target, '[%s]' % target)
|
||||
copy_source_info(node, ref)
|
||||
node.replace_self(ref)
|
||||
|
||||
|
||||
TRANSLATABLE_NODES = {
|
||||
'literal-block': nodes.literal_block,
|
||||
'doctest-block': nodes.doctest_block,
|
||||
@ -328,7 +293,7 @@ class FigureAligner(SphinxTransform):
|
||||
# type: (Any) -> None
|
||||
matcher = NodeMatcher(nodes.table, nodes.figure)
|
||||
for node in self.document.traverse(matcher): # type: nodes.Element
|
||||
node.setdefault('align', 'center')
|
||||
node.setdefault('align', 'default')
|
||||
|
||||
|
||||
class FilterSystemMessages(SphinxTransform):
|
||||
@ -440,12 +405,22 @@ class ManpageLink(SphinxTransform):
|
||||
node.attributes.update(info)
|
||||
|
||||
|
||||
from sphinx.domains.citation import ( # NOQA
|
||||
CitationDefinitionTransform, CitationReferenceTransform
|
||||
)
|
||||
|
||||
deprecated_alias('sphinx.transforms',
|
||||
{
|
||||
'CitationReferences': CitationReferenceTransform,
|
||||
'SmartQuotesSkipper': CitationDefinitionTransform,
|
||||
},
|
||||
RemovedInSphinx40Warning)
|
||||
|
||||
|
||||
def setup(app):
|
||||
# type: (Sphinx) -> Dict[str, Any]
|
||||
app.add_transform(ApplySourceWorkaround)
|
||||
app.add_transform(ExtraTranslatableNodes)
|
||||
app.add_transform(SmartQuotesSkipper)
|
||||
app.add_transform(CitationReferences)
|
||||
app.add_transform(DefaultSubstitutions)
|
||||
app.add_transform(MoveModuleTargets)
|
||||
app.add_transform(HandleCodeBlocks)
|
||||
|
@ -9,7 +9,7 @@
|
||||
"""
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.transforms.references import Substitutions
|
||||
from docutils.transforms.references import DanglingReferences, Substitutions
|
||||
|
||||
from sphinx.transforms import SphinxTransform
|
||||
|
||||
@ -31,6 +31,22 @@ class SubstitutionDefinitionsRemover(SphinxTransform):
|
||||
node.parent.remove(node)
|
||||
|
||||
|
||||
class SphinxDanglingReferences(DanglingReferences):
|
||||
"""DanglingReferences transform which does not output info messages."""
|
||||
|
||||
def apply(self, **kwargs):
|
||||
# type: (Any) -> None
|
||||
try:
|
||||
reporter = self.document.reporter
|
||||
report_level = reporter.report_level
|
||||
|
||||
# suppress INFO level messages for a while
|
||||
reporter.report_level = max(reporter.WARNING_LEVEL, reporter.report_level)
|
||||
super().apply()
|
||||
finally:
|
||||
reporter.report_level = report_level
|
||||
|
||||
|
||||
class SphinxDomains(SphinxTransform):
|
||||
"""Collect objects to Sphinx domains for cross references."""
|
||||
default_priority = 850
|
||||
@ -44,6 +60,7 @@ class SphinxDomains(SphinxTransform):
|
||||
def setup(app):
|
||||
# type: (Sphinx) -> Dict[str, Any]
|
||||
app.add_transform(SubstitutionDefinitionsRemover)
|
||||
app.add_transform(SphinxDanglingReferences)
|
||||
app.add_transform(SphinxDomains)
|
||||
|
||||
return {
|
||||
|
@ -15,8 +15,8 @@ if False:
|
||||
from typing import List # NOQA
|
||||
|
||||
|
||||
def prepare_docstring(s, ignore=1):
|
||||
# type: (str, int) -> List[str]
|
||||
def prepare_docstring(s, ignore=1, tabsize=8):
|
||||
# type: (str, int, int) -> List[str]
|
||||
"""Convert a docstring into lines of parseable reST. Remove common leading
|
||||
indentation, where the indentation of a given number of lines (usually just
|
||||
one) is ignored.
|
||||
@ -25,7 +25,7 @@ def prepare_docstring(s, ignore=1):
|
||||
ViewList (used as argument of nested_parse().) An empty line is added to
|
||||
act as a separator between this docstring and following content.
|
||||
"""
|
||||
lines = s.expandtabs().splitlines()
|
||||
lines = s.expandtabs(tabsize).splitlines()
|
||||
# Find minimum indentation of any non-blank lines after ignored lines.
|
||||
margin = sys.maxsize
|
||||
for line in lines[ignore:]:
|
||||
|
@ -14,7 +14,10 @@ import inspect
|
||||
import re
|
||||
import sys
|
||||
import typing
|
||||
from functools import partial
|
||||
from functools import partial, partialmethod
|
||||
from inspect import ( # NOQA
|
||||
isclass, ismethod, ismethoddescriptor, isroutine
|
||||
)
|
||||
from io import StringIO
|
||||
|
||||
from sphinx.util import logging
|
||||
@ -24,6 +27,17 @@ if False:
|
||||
# For type annotation
|
||||
from typing import Any, Callable, Mapping, List, Tuple, Type # NOQA
|
||||
|
||||
if sys.version_info > (3, 7):
|
||||
from types import (
|
||||
ClassMethodDescriptorType,
|
||||
MethodDescriptorType,
|
||||
WrapperDescriptorType
|
||||
)
|
||||
else:
|
||||
ClassMethodDescriptorType = type(object.__init__)
|
||||
MethodDescriptorType = type(str.join)
|
||||
WrapperDescriptorType = type(dict.__dict__['fromkeys'])
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
memory_address_re = re.compile(r' at 0x[0-9a-f]{8,16}(?=>)', re.IGNORECASE)
|
||||
@ -113,7 +127,7 @@ def isenumattribute(x):
|
||||
def ispartial(obj):
|
||||
# type: (Any) -> bool
|
||||
"""Check if the object is partial."""
|
||||
return isinstance(obj, partial)
|
||||
return isinstance(obj, (partial, partialmethod))
|
||||
|
||||
|
||||
def isclassmethod(obj):
|
||||
@ -156,6 +170,34 @@ def isdescriptor(x):
|
||||
return False
|
||||
|
||||
|
||||
def isattributedescriptor(obj):
|
||||
# type: (Any) -> bool
|
||||
"""Check if the object is an attribute like descriptor."""
|
||||
if inspect.isdatadescriptor(object):
|
||||
# data descriptor is kind of attribute
|
||||
return True
|
||||
elif isdescriptor(obj):
|
||||
# non data descriptor
|
||||
if isfunction(obj) or isbuiltin(obj) or inspect.ismethod(obj):
|
||||
# attribute must not be either function, builtin and method
|
||||
return False
|
||||
elif inspect.isclass(obj):
|
||||
# attribute must not be a class
|
||||
return False
|
||||
elif isinstance(obj, (ClassMethodDescriptorType,
|
||||
MethodDescriptorType,
|
||||
WrapperDescriptorType)):
|
||||
# attribute must not be a method descriptor
|
||||
return False
|
||||
elif type(obj).__name__ == "instancemethod":
|
||||
# attribute must not be an instancemethod (C-API)
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def isfunction(obj):
|
||||
# type: (Any) -> bool
|
||||
"""Check if the object is function."""
|
||||
@ -168,6 +210,24 @@ def isbuiltin(obj):
|
||||
return inspect.isbuiltin(obj) or ispartial(obj) and inspect.isbuiltin(obj.func)
|
||||
|
||||
|
||||
def iscoroutinefunction(obj):
|
||||
# type: (Any) -> bool
|
||||
"""Check if the object is coroutine-function."""
|
||||
if inspect.iscoroutinefunction(obj):
|
||||
return True
|
||||
elif ispartial(obj) and inspect.iscoroutinefunction(obj.func):
|
||||
# partialed
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def isproperty(obj):
|
||||
# type: (Any) -> bool
|
||||
"""Check if the object is property."""
|
||||
return isinstance(obj, property)
|
||||
|
||||
|
||||
def safe_getattr(obj, name, *defargs):
|
||||
# type: (Any, str, str) -> object
|
||||
"""A getattr() that turns all exceptions into AttributeErrors."""
|
||||
|
@ -20,6 +20,7 @@ from io import StringIO
|
||||
from os import path
|
||||
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||
from sphinx.testing.path import path as Path
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
@ -167,15 +168,18 @@ fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
|
||||
|
||||
def abspath(pathdir):
|
||||
# type: (str) -> str
|
||||
pathdir = path.abspath(pathdir)
|
||||
if isinstance(pathdir, bytes):
|
||||
try:
|
||||
pathdir = pathdir.decode(fs_encoding)
|
||||
except UnicodeDecodeError:
|
||||
raise UnicodeDecodeError('multibyte filename not supported on '
|
||||
'this filesystem encoding '
|
||||
'(%r)' % fs_encoding)
|
||||
return pathdir
|
||||
if isinstance(pathdir, Path):
|
||||
return pathdir.abspath()
|
||||
else:
|
||||
pathdir = path.abspath(pathdir)
|
||||
if isinstance(pathdir, bytes):
|
||||
try:
|
||||
pathdir = pathdir.decode(fs_encoding)
|
||||
except UnicodeDecodeError:
|
||||
raise UnicodeDecodeError('multibyte filename not supported on '
|
||||
'this filesystem encoding '
|
||||
'(%r)' % fs_encoding)
|
||||
return pathdir
|
||||
|
||||
|
||||
def getcwd():
|
||||
|
@ -9,11 +9,14 @@
|
||||
"""
|
||||
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from contextlib import contextmanager
|
||||
from unicodedata import east_asian_width
|
||||
|
||||
from docutils.parsers.rst import roles
|
||||
from docutils.parsers.rst.languages import en as english
|
||||
from docutils.utils import Reporter
|
||||
from jinja2 import environmentfilter
|
||||
|
||||
from sphinx.locale import __
|
||||
from sphinx.util import docutils
|
||||
@ -21,13 +24,20 @@ from sphinx.util import logging
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Generator # NOQA
|
||||
from typing import Callable, Dict, Generator # NOQA
|
||||
from docutils.statemachine import StringList # NOQA
|
||||
from jinja2 import Environment # NOQA
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
docinfo_re = re.compile(':\\w+:.*?')
|
||||
symbols_re = re.compile(r'([!-\-/:-@\[-`{-~])') # symbols without dot(0x2e)
|
||||
SECTIONING_CHARS = ['=', '-', '~']
|
||||
|
||||
# width of characters
|
||||
WIDECHARS = defaultdict(lambda: "WF") # type: Dict[str, str]
|
||||
# WF: Wide + Full-width
|
||||
WIDECHARS["ja"] = "WFA" # In Japanese, Ambiguous characters also have double width
|
||||
|
||||
|
||||
def escape(text):
|
||||
@ -37,6 +47,29 @@ def escape(text):
|
||||
return text
|
||||
|
||||
|
||||
def textwidth(text, widechars='WF'):
|
||||
# type: (str, str) -> int
|
||||
"""Get width of text."""
|
||||
def charwidth(char, widechars):
|
||||
# type: (str, str) -> int
|
||||
if east_asian_width(char) in widechars:
|
||||
return 2
|
||||
else:
|
||||
return 1
|
||||
|
||||
return sum(charwidth(c, widechars) for c in text)
|
||||
|
||||
|
||||
@environmentfilter
|
||||
def heading(env, text, level=1):
|
||||
# type: (Environment, str, int) -> str
|
||||
"""Create a heading for *level*."""
|
||||
assert level <= 3
|
||||
width = textwidth(text, WIDECHARS[env.language]) # type: ignore
|
||||
sectioning_char = SECTIONING_CHARS[level - 1]
|
||||
return '%s\n%s' % (text, sectioning_char * width)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def default_role(docname, name):
|
||||
# type: (str, str) -> Generator
|
||||
|
@ -15,7 +15,7 @@ from jinja2.sandbox import SandboxedEnvironment
|
||||
from sphinx import package_dir
|
||||
from sphinx.jinja2glue import SphinxFileSystemLoader
|
||||
from sphinx.locale import get_translator
|
||||
from sphinx.util import texescape
|
||||
from sphinx.util import rst, texescape
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
@ -67,9 +67,10 @@ class SphinxRenderer(FileRenderer):
|
||||
|
||||
|
||||
class LaTeXRenderer(SphinxRenderer):
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
template_path = os.path.join(package_dir, 'templates', 'latex')
|
||||
def __init__(self, template_path=None):
|
||||
# type: (str) -> None
|
||||
if template_path is None:
|
||||
template_path = os.path.join(package_dir, 'templates', 'latex')
|
||||
super().__init__(template_path)
|
||||
|
||||
# use texescape as escape filter
|
||||
@ -83,3 +84,17 @@ class LaTeXRenderer(SphinxRenderer):
|
||||
self.env.variable_end_string = '%>'
|
||||
self.env.block_start_string = '<%'
|
||||
self.env.block_end_string = '%>'
|
||||
|
||||
|
||||
class ReSTRenderer(SphinxRenderer):
|
||||
def __init__(self, template_path=None, language=None):
|
||||
# type: (str, str) -> None
|
||||
super().__init__(template_path)
|
||||
|
||||
# add language to environment
|
||||
self.env.extend(language=language)
|
||||
|
||||
# use texescape as escape filter
|
||||
self.env.filters['e'] = rst.escape
|
||||
self.env.filters['escape'] = rst.escape
|
||||
self.env.filters['heading'] = rst.heading
|
||||
|
@ -67,6 +67,7 @@ class HTML5Translator(SphinxTranslator, BaseTranslator):
|
||||
self.param_separator = ''
|
||||
self.optional_param_level = 0
|
||||
self._table_row_index = 0
|
||||
self._fieldlist_row_index = 0
|
||||
self.required_params_left = 0
|
||||
|
||||
def visit_start_of_file(self, node):
|
||||
|
@ -1527,6 +1527,7 @@ class LaTeXTranslator(SphinxTranslator):
|
||||
(1, 'middle'): ('\\raisebox{-0.5\\height}{', '}'),
|
||||
(1, 'bottom'): ('\\raisebox{-\\height}{', '}'),
|
||||
(0, 'center'): ('{\\hspace*{\\fill}', '\\hspace*{\\fill}}'),
|
||||
(0, 'default'): ('{\\hspace*{\\fill}', '\\hspace*{\\fill}}'),
|
||||
# These 2 don't exactly do the right thing. The image should
|
||||
# be floated alongside the paragraph. See
|
||||
# https://www.w3.org/TR/html4/struct/objects.html#adef-align-IMG
|
||||
|
@ -282,7 +282,7 @@ class ManualPageTranslator(SphinxTranslator, BaseTranslator):
|
||||
|
||||
def depart_rubric(self, node):
|
||||
# type: (nodes.Element) -> None
|
||||
pass
|
||||
self.body.append('\n')
|
||||
|
||||
def visit_seealso(self, node):
|
||||
# type: (nodes.Element) -> None
|
||||
|
7
tests/roots/test-ext-autodoc/target/bound_method.py
Normal file
7
tests/roots/test-ext-autodoc/target/bound_method.py
Normal file
@ -0,0 +1,7 @@
|
||||
class Cls:
|
||||
def method(self):
|
||||
"""Method docstring"""
|
||||
pass
|
||||
|
||||
|
||||
bound_method = Cls().method
|
15
tests/roots/test-ext-autodoc/target/functions.py
Normal file
15
tests/roots/test-ext-autodoc/target/functions.py
Normal file
@ -0,0 +1,15 @@
|
||||
from functools import partial
|
||||
|
||||
|
||||
def func():
|
||||
pass
|
||||
|
||||
|
||||
async def coroutinefunc():
|
||||
pass
|
||||
|
||||
partial_func = partial(func)
|
||||
partial_coroutinefunc = partial(coroutinefunc)
|
||||
|
||||
builtin_func = print
|
||||
partial_builtin_func = partial(print)
|
29
tests/roots/test-ext-autodoc/target/methods.py
Normal file
29
tests/roots/test-ext-autodoc/target/methods.py
Normal file
@ -0,0 +1,29 @@
|
||||
from functools import partialmethod
|
||||
|
||||
|
||||
class Base():
|
||||
def meth(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def staticmeth():
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def classmeth(cls):
|
||||
pass
|
||||
|
||||
@property
|
||||
def prop(self):
|
||||
pass
|
||||
|
||||
partialmeth = partialmethod(meth)
|
||||
|
||||
async def coroutinemeth(self):
|
||||
pass
|
||||
|
||||
partial_coroutinemeth = partialmethod(coroutinemeth)
|
||||
|
||||
|
||||
class Inherited(Base):
|
||||
pass
|
@ -0,0 +1 @@
|
||||
from .autosummary_dummy_module import Bar, foo
|
@ -0,0 +1,8 @@
|
||||
class Bar:
|
||||
"""Bar class"""
|
||||
pass
|
||||
|
||||
|
||||
def foo():
|
||||
"""Foo function"""
|
||||
pass
|
@ -0,0 +1,7 @@
|
||||
import os
|
||||
import sys
|
||||
sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
extensions = ['sphinx.ext.autosummary']
|
||||
autosummary_generate = True
|
||||
autosummary_imported_members = True
|
@ -0,0 +1,7 @@
|
||||
test-ext-autosummary-mock_imports
|
||||
=================================
|
||||
|
||||
.. autosummary::
|
||||
:toctree: generated
|
||||
|
||||
autosummary_dummy_package
|
@ -11,6 +11,7 @@
|
||||
|
||||
import platform
|
||||
import sys
|
||||
from unittest.mock import Mock
|
||||
from warnings import catch_warnings
|
||||
|
||||
import pytest
|
||||
@ -33,7 +34,9 @@ def do_autodoc(app, objtype, name, options=None):
|
||||
app.env.temp_data.setdefault('docname', 'index') # set dummy docname
|
||||
doccls = app.registry.documenters[objtype]
|
||||
docoptions = process_documenter_options(doccls, app.config, options)
|
||||
bridge = DocumenterBridge(app.env, LoggingReporter(''), docoptions, 1)
|
||||
state = Mock()
|
||||
state.document.settings.tab_width = 8
|
||||
bridge = DocumenterBridge(app.env, LoggingReporter(''), docoptions, 1, state)
|
||||
documenter = doccls(bridge, name)
|
||||
documenter.generate()
|
||||
|
||||
@ -92,7 +95,9 @@ def setup_test():
|
||||
genopt = options,
|
||||
result = ViewList(),
|
||||
filename_set = set(),
|
||||
state = Mock(),
|
||||
)
|
||||
directive.state.document.settings.tab_width = 8
|
||||
|
||||
processed_docstrings = []
|
||||
processed_signatures = []
|
||||
@ -256,6 +261,11 @@ def test_format_signature():
|
||||
assert formatsig('method', 'H.foo', H.foo2, None, None) == '(*c)'
|
||||
assert formatsig('method', 'H.foo', H.foo3, None, None) == r"(d='\\n')"
|
||||
|
||||
# test bound methods interpreted as functions
|
||||
assert formatsig('function', 'foo', H().foo1, None, None) == '(b, *c)'
|
||||
assert formatsig('function', 'foo', H().foo2, None, None) == '(*c)'
|
||||
assert formatsig('function', 'foo', H().foo3, None, None) == r"(d='\\n')"
|
||||
|
||||
# test exception handling (exception is caught and args is '')
|
||||
directive.env.config.autodoc_docstring_signature = False
|
||||
assert formatsig('function', 'int', int, None, None) == ''
|
||||
@ -448,6 +458,14 @@ def test_get_doc():
|
||||
directive.env.config.autoclass_content = 'both'
|
||||
assert getdocl('class', I) == ['Class docstring', '', 'New docstring']
|
||||
|
||||
# verify that method docstrings get extracted in both normal case
|
||||
# and in case of bound method posing as a function
|
||||
class J: # NOQA
|
||||
def foo(self):
|
||||
"""Method docstring"""
|
||||
assert getdocl('method', J.foo) == ['Method docstring']
|
||||
assert getdocl('function', J().foo) == ['Method docstring']
|
||||
|
||||
from target import Base, Derived
|
||||
|
||||
# NOTE: inspect.getdoc seems not to work with locally defined classes
|
||||
@ -689,9 +707,9 @@ def test_autodoc_members(app):
|
||||
actual = do_autodoc(app, 'class', 'target.Base', options)
|
||||
assert list(filter(lambda l: '::' in l, actual)) == [
|
||||
'.. py:class:: Base',
|
||||
' .. py:classmethod:: Base.inheritedclassmeth()',
|
||||
' .. py:method:: Base.inheritedclassmeth()',
|
||||
' .. py:method:: Base.inheritedmeth()',
|
||||
' .. py:staticmethod:: Base.inheritedstaticmeth(cls)'
|
||||
' .. py:method:: Base.inheritedstaticmeth(cls)'
|
||||
]
|
||||
|
||||
# default specific-members
|
||||
@ -700,7 +718,7 @@ def test_autodoc_members(app):
|
||||
assert list(filter(lambda l: '::' in l, actual)) == [
|
||||
'.. py:class:: Base',
|
||||
' .. py:method:: Base.inheritedmeth()',
|
||||
' .. py:staticmethod:: Base.inheritedstaticmeth(cls)'
|
||||
' .. py:method:: Base.inheritedstaticmeth(cls)'
|
||||
]
|
||||
|
||||
|
||||
@ -711,7 +729,7 @@ def test_autodoc_exclude_members(app):
|
||||
actual = do_autodoc(app, 'class', 'target.Base', options)
|
||||
assert list(filter(lambda l: '::' in l, actual)) == [
|
||||
'.. py:class:: Base',
|
||||
' .. py:classmethod:: Base.inheritedclassmeth()'
|
||||
' .. py:method:: Base.inheritedclassmeth()'
|
||||
]
|
||||
|
||||
# members vs exclude-members
|
||||
@ -739,9 +757,9 @@ def test_autodoc_undoc_members(app):
|
||||
' .. py:attribute:: Class.inst_attr_string',
|
||||
' .. py:attribute:: Class.mdocattr',
|
||||
' .. py:method:: Class.meth()',
|
||||
' .. py:classmethod:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:attribute:: Class.prop',
|
||||
' .. py:classmethod:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)',
|
||||
' .. py:method:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:method:: Class.prop',
|
||||
' .. py:method:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)',
|
||||
' .. py:attribute:: Class.skipattr',
|
||||
' .. py:method:: Class.skipmeth()',
|
||||
' .. py:attribute:: Class.udocattr',
|
||||
@ -756,11 +774,12 @@ def test_autodoc_inherited_members(app):
|
||||
actual = do_autodoc(app, 'class', 'target.Class', options)
|
||||
assert list(filter(lambda l: 'method::' in l, actual)) == [
|
||||
' .. py:method:: Class.excludemeth()',
|
||||
' .. py:classmethod:: Class.inheritedclassmeth()',
|
||||
' .. py:method:: Class.inheritedclassmeth()',
|
||||
' .. py:method:: Class.inheritedmeth()',
|
||||
' .. py:staticmethod:: Class.inheritedstaticmeth(cls)',
|
||||
' .. py:method:: Class.inheritedstaticmeth(cls)',
|
||||
' .. py:method:: Class.meth()',
|
||||
' .. py:classmethod:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:method:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:method:: Class.prop',
|
||||
' .. py:method:: Class.skipmeth()'
|
||||
]
|
||||
|
||||
@ -819,9 +838,9 @@ def test_autodoc_special_members(app):
|
||||
' .. py:attribute:: Class.inst_attr_string',
|
||||
' .. py:attribute:: Class.mdocattr',
|
||||
' .. py:method:: Class.meth()',
|
||||
' .. py:classmethod:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:attribute:: Class.prop',
|
||||
' .. py:classmethod:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)',
|
||||
' .. py:method:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:method:: Class.prop',
|
||||
' .. py:method:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)',
|
||||
' .. py:attribute:: Class.skipattr',
|
||||
' .. py:method:: Class.skipmeth()',
|
||||
' .. py:attribute:: Class.udocattr',
|
||||
@ -939,6 +958,34 @@ def test_autodoc_inner_class(app):
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.sphinx('html', testroot='ext-autodoc')
|
||||
def test_autodoc_classmethod(app):
|
||||
actual = do_autodoc(app, 'method', 'target.Base.inheritedclassmeth')
|
||||
assert list(actual) == [
|
||||
'',
|
||||
'.. py:method:: Base.inheritedclassmeth()',
|
||||
' :module: target',
|
||||
' :classmethod:',
|
||||
'',
|
||||
' Inherited class method.',
|
||||
' '
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.sphinx('html', testroot='ext-autodoc')
|
||||
def test_autodoc_staticmethod(app):
|
||||
actual = do_autodoc(app, 'method', 'target.Base.inheritedstaticmeth')
|
||||
assert list(actual) == [
|
||||
'',
|
||||
'.. py:method:: Base.inheritedstaticmeth(cls)',
|
||||
' :module: target',
|
||||
' :staticmethod:',
|
||||
'',
|
||||
' Inherited static method.',
|
||||
' '
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.sphinx('html', testroot='ext-autodoc')
|
||||
def test_autodoc_descriptor(app):
|
||||
actual = do_autodoc(app, 'attribute', 'target.Class.descr')
|
||||
@ -984,12 +1031,12 @@ def test_autodoc_member_order(app):
|
||||
' .. py:method:: Class.excludemeth()',
|
||||
' .. py:attribute:: Class.skipattr',
|
||||
' .. py:attribute:: Class.attr',
|
||||
' .. py:attribute:: Class.prop',
|
||||
' .. py:method:: Class.prop',
|
||||
' .. py:attribute:: Class.docattr',
|
||||
' .. py:attribute:: Class.udocattr',
|
||||
' .. py:attribute:: Class.mdocattr',
|
||||
' .. py:classmethod:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)',
|
||||
' .. py:classmethod:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:method:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)',
|
||||
' .. py:method:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:attribute:: Class.inst_attr_inline',
|
||||
' .. py:attribute:: Class.inst_attr_comment',
|
||||
' .. py:attribute:: Class.inst_attr_string',
|
||||
@ -1006,8 +1053,8 @@ def test_autodoc_member_order(app):
|
||||
'.. py:class:: Class(arg)',
|
||||
' .. py:method:: Class.excludemeth()',
|
||||
' .. py:method:: Class.meth()',
|
||||
' .. py:classmethod:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:classmethod:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)',
|
||||
' .. py:method:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:method:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)',
|
||||
' .. py:method:: Class.skipmeth()',
|
||||
' .. py:method:: Class.undocmeth()',
|
||||
' .. py:attribute:: Class._private_inst_attr',
|
||||
@ -1018,7 +1065,7 @@ def test_autodoc_member_order(app):
|
||||
' .. py:attribute:: Class.inst_attr_inline',
|
||||
' .. py:attribute:: Class.inst_attr_string',
|
||||
' .. py:attribute:: Class.mdocattr',
|
||||
' .. py:attribute:: Class.prop',
|
||||
' .. py:method:: Class.prop',
|
||||
' .. py:attribute:: Class.skipattr',
|
||||
' .. py:attribute:: Class.udocattr'
|
||||
]
|
||||
@ -1040,9 +1087,9 @@ def test_autodoc_member_order(app):
|
||||
' .. py:attribute:: Class.inst_attr_string',
|
||||
' .. py:attribute:: Class.mdocattr',
|
||||
' .. py:method:: Class.meth()',
|
||||
' .. py:classmethod:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:attribute:: Class.prop',
|
||||
' .. py:classmethod:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)',
|
||||
' .. py:method:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:method:: Class.prop',
|
||||
' .. py:method:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)',
|
||||
' .. py:attribute:: Class.skipattr',
|
||||
' .. py:method:: Class.skipmeth()',
|
||||
' .. py:attribute:: Class.udocattr',
|
||||
@ -1108,14 +1155,16 @@ def test_autodoc_docstring_signature(app):
|
||||
' indented line',
|
||||
' ',
|
||||
' ',
|
||||
' .. py:attribute:: DocstringSig.prop1',
|
||||
' .. py:method:: DocstringSig.prop1',
|
||||
' :module: target',
|
||||
' :property:',
|
||||
' ',
|
||||
' First line of docstring',
|
||||
' ',
|
||||
' ',
|
||||
' .. py:attribute:: DocstringSig.prop2',
|
||||
' .. py:method:: DocstringSig.prop2',
|
||||
' :module: target',
|
||||
' :property:',
|
||||
' ',
|
||||
' First line of docstring',
|
||||
' Second line of docstring',
|
||||
@ -1150,15 +1199,17 @@ def test_autodoc_docstring_signature(app):
|
||||
' indented line',
|
||||
' ',
|
||||
' ',
|
||||
' .. py:attribute:: DocstringSig.prop1',
|
||||
' .. py:method:: DocstringSig.prop1',
|
||||
' :module: target',
|
||||
' :property:',
|
||||
' ',
|
||||
' DocstringSig.prop1(self)',
|
||||
' First line of docstring',
|
||||
' ',
|
||||
' ',
|
||||
' .. py:attribute:: DocstringSig.prop2',
|
||||
' .. py:method:: DocstringSig.prop2',
|
||||
' :module: target',
|
||||
' :property:',
|
||||
' ',
|
||||
' First line of docstring',
|
||||
' Second line of docstring',
|
||||
@ -1460,8 +1511,34 @@ def test_partialfunction():
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('setup_test')
|
||||
def test_bound_method():
|
||||
options = {"members": None}
|
||||
actual = do_autodoc(app, 'module', 'target.bound_method', options)
|
||||
assert list(actual) == [
|
||||
'',
|
||||
'.. py:module:: target.bound_method',
|
||||
'',
|
||||
'',
|
||||
'.. py:function:: bound_method()',
|
||||
' :module: target.bound_method',
|
||||
'',
|
||||
' Method docstring',
|
||||
' ',
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('setup_test')
|
||||
def test_coroutine():
|
||||
actual = do_autodoc(app, 'function', 'target.functions.coroutinefunc')
|
||||
assert list(actual) == [
|
||||
'',
|
||||
'.. py:function:: coroutinefunc()',
|
||||
' :module: target.functions',
|
||||
' :async:',
|
||||
'',
|
||||
]
|
||||
|
||||
options = {"members": None}
|
||||
actual = do_autodoc(app, 'class', 'target.coroutine.AsyncClass', options)
|
||||
assert list(actual) == [
|
||||
@ -1472,6 +1549,7 @@ def test_coroutine():
|
||||
' ',
|
||||
' .. py:method:: AsyncClass.do_coroutine()',
|
||||
' :module: target.coroutine',
|
||||
' :async:',
|
||||
' ',
|
||||
' A documented coroutine function',
|
||||
' '
|
||||
@ -1527,6 +1605,8 @@ def test_autodoc_default_options(app):
|
||||
assert ' .. py:attribute:: EnumCls.val4' not in actual
|
||||
actual = do_autodoc(app, 'class', 'target.CustomIter')
|
||||
assert ' .. py:method:: target.CustomIter' not in actual
|
||||
actual = do_autodoc(app, 'module', 'target')
|
||||
assert '.. py:function:: save_traceback(app)' not in actual
|
||||
|
||||
# with :members:
|
||||
app.config.autodoc_default_options = {'members': None}
|
||||
@ -1590,6 +1670,15 @@ def test_autodoc_default_options(app):
|
||||
assert ' .. py:method:: CustomIter.snafucate()' in actual
|
||||
assert ' Makes this snafucated.' in actual
|
||||
|
||||
# with :imported-members:
|
||||
app.config.autodoc_default_options = {
|
||||
'members': None,
|
||||
'imported-members': None,
|
||||
'ignore-module-all': None,
|
||||
}
|
||||
actual = do_autodoc(app, 'module', 'target')
|
||||
assert '.. py:function:: save_traceback(app)' in actual
|
||||
|
||||
|
||||
@pytest.mark.sphinx('html', testroot='ext-autodoc')
|
||||
def test_autodoc_default_options_with_values(app):
|
||||
@ -1614,11 +1703,11 @@ def test_autodoc_default_options_with_values(app):
|
||||
' .. py:method:: Class.skipmeth()',
|
||||
' .. py:method:: Class.excludemeth()',
|
||||
' .. py:attribute:: Class.attr',
|
||||
' .. py:attribute:: Class.prop',
|
||||
' .. py:method:: Class.prop',
|
||||
' .. py:attribute:: Class.docattr',
|
||||
' .. py:attribute:: Class.udocattr',
|
||||
' .. py:attribute:: Class.mdocattr',
|
||||
' .. py:classmethod:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:method:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:attribute:: Class.inst_attr_inline',
|
||||
' .. py:attribute:: Class.inst_attr_comment',
|
||||
' .. py:attribute:: Class.inst_attr_string',
|
||||
|
@ -565,7 +565,7 @@ def test_numfig_disabled_warn(app, warning):
|
||||
|
||||
@pytest.mark.parametrize("fname,expect", flat_dict({
|
||||
'index.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", None, True),
|
||||
(".//table/caption/span[@class='caption-number']", None, True),
|
||||
(".//div[@class='code-block-caption']/"
|
||||
@ -582,21 +582,21 @@ def test_numfig_disabled_warn(app, warning):
|
||||
(".//li/p/a/span", '^Sect.1 Foo$', True),
|
||||
],
|
||||
'foo.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", None, True),
|
||||
(".//table/caption/span[@class='caption-number']", None, True),
|
||||
(".//div[@class='code-block-caption']/"
|
||||
"span[@class='caption-number']", None, True),
|
||||
],
|
||||
'bar.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", None, True),
|
||||
(".//table/caption/span[@class='caption-number']", None, True),
|
||||
(".//div[@class='code-block-caption']/"
|
||||
"span[@class='caption-number']", None, True),
|
||||
],
|
||||
'baz.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", None, True),
|
||||
(".//table/caption/span[@class='caption-number']", None, True),
|
||||
(".//div[@class='code-block-caption']/"
|
||||
@ -633,9 +633,9 @@ def test_numfig_without_numbered_toctree_warn(app, warning):
|
||||
|
||||
@pytest.mark.parametrize("fname,expect", flat_dict({
|
||||
'index.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 9 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 10 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Table 9 $', True),
|
||||
@ -657,13 +657,13 @@ def test_numfig_without_numbered_toctree_warn(app, warning):
|
||||
(".//li/p/code/span", '^Sect.{number}$', True),
|
||||
],
|
||||
'foo.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 1 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 2 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 3 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 4 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Table 1 $', True),
|
||||
@ -683,11 +683,11 @@ def test_numfig_without_numbered_toctree_warn(app, warning):
|
||||
"span[@class='caption-number']", '^Listing 4 $', True),
|
||||
],
|
||||
'bar.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 5 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 7 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 8 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Table 5 $', True),
|
||||
@ -703,7 +703,7 @@ def test_numfig_without_numbered_toctree_warn(app, warning):
|
||||
"span[@class='caption-number']", '^Listing 8 $', True),
|
||||
],
|
||||
'baz.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 6 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Table 6 $', True),
|
||||
@ -741,9 +741,9 @@ def test_numfig_with_numbered_toctree_warn(app, warning):
|
||||
|
||||
@pytest.mark.parametrize("fname,expect", flat_dict({
|
||||
'index.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 1 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 2 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Table 1 $', True),
|
||||
@ -765,13 +765,13 @@ def test_numfig_with_numbered_toctree_warn(app, warning):
|
||||
(".//li/p/a/span", '^Sect.1 Foo$', True),
|
||||
],
|
||||
'foo.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 1.1 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 1.2 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 1.3 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 1.4 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Table 1.1 $', True),
|
||||
@ -791,11 +791,11 @@ def test_numfig_with_numbered_toctree_warn(app, warning):
|
||||
"span[@class='caption-number']", '^Listing 1.4 $', True),
|
||||
],
|
||||
'bar.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 2.1 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 2.3 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 2.4 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Table 2.1 $', True),
|
||||
@ -811,7 +811,7 @@ def test_numfig_with_numbered_toctree_warn(app, warning):
|
||||
"span[@class='caption-number']", '^Listing 2.4 $', True),
|
||||
],
|
||||
'baz.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 2.2 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Table 2.2 $', True),
|
||||
@ -846,9 +846,9 @@ def test_numfig_with_prefix_warn(app, warning):
|
||||
|
||||
@pytest.mark.parametrize("fname,expect", flat_dict({
|
||||
'index.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Figure:1 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Figure:2 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Tab_1 $', True),
|
||||
@ -870,13 +870,13 @@ def test_numfig_with_prefix_warn(app, warning):
|
||||
(".//li/p/a/span", '^Sect.1 Foo$', True),
|
||||
],
|
||||
'foo.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Figure:1.1 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Figure:1.2 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Figure:1.3 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Figure:1.4 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Tab_1.1 $', True),
|
||||
@ -896,11 +896,11 @@ def test_numfig_with_prefix_warn(app, warning):
|
||||
"span[@class='caption-number']", '^Code-1.4 $', True),
|
||||
],
|
||||
'bar.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Figure:2.1 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Figure:2.3 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Figure:2.4 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Tab_2.1 $', True),
|
||||
@ -916,7 +916,7 @@ def test_numfig_with_prefix_warn(app, warning):
|
||||
"span[@class='caption-number']", '^Code-2.4 $', True),
|
||||
],
|
||||
'baz.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Figure:2.2 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Tab_2.2 $', True),
|
||||
@ -952,9 +952,9 @@ def test_numfig_with_secnum_depth_warn(app, warning):
|
||||
|
||||
@pytest.mark.parametrize("fname,expect", flat_dict({
|
||||
'index.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 1 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 2 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Table 1 $', True),
|
||||
@ -976,13 +976,13 @@ def test_numfig_with_secnum_depth_warn(app, warning):
|
||||
(".//li/p/a/span", '^Sect.1 Foo$', True),
|
||||
],
|
||||
'foo.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 1.1 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 1.1.1 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 1.1.2 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 1.2.1 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Table 1.1 $', True),
|
||||
@ -1002,11 +1002,11 @@ def test_numfig_with_secnum_depth_warn(app, warning):
|
||||
"span[@class='caption-number']", '^Listing 1.2.1 $', True),
|
||||
],
|
||||
'bar.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 2.1.1 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 2.1.3 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 2.2.1 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Table 2.1.1 $', True),
|
||||
@ -1022,7 +1022,7 @@ def test_numfig_with_secnum_depth_warn(app, warning):
|
||||
"span[@class='caption-number']", '^Listing 2.2.1 $', True),
|
||||
],
|
||||
'baz.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 2.1.2 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Table 2.1.2 $', True),
|
||||
@ -1043,9 +1043,9 @@ def test_numfig_with_secnum_depth(app, cached_etree_parse, fname, expect):
|
||||
|
||||
@pytest.mark.parametrize("fname,expect", flat_dict({
|
||||
'index.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 1 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 2 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Table 1 $', True),
|
||||
@ -1065,13 +1065,13 @@ def test_numfig_with_secnum_depth(app, cached_etree_parse, fname, expect):
|
||||
(".//li/p/a/span", '^Section.2.1$', True),
|
||||
(".//li/p/a/span", '^Fig.1 should be Fig.1$', True),
|
||||
(".//li/p/a/span", '^Sect.1 Foo$', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 1.1 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 1.2 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 1.3 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 1.4 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Table 1.1 $', True),
|
||||
@ -1089,11 +1089,11 @@ def test_numfig_with_secnum_depth(app, cached_etree_parse, fname, expect):
|
||||
"span[@class='caption-number']", '^Listing 1.3 $', True),
|
||||
(".//div[@class='code-block-caption']/"
|
||||
"span[@class='caption-number']", '^Listing 1.4 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 2.1 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 2.3 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 2.4 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Table 2.1 $', True),
|
||||
@ -1107,7 +1107,7 @@ def test_numfig_with_secnum_depth(app, cached_etree_parse, fname, expect):
|
||||
"span[@class='caption-number']", '^Listing 2.3 $', True),
|
||||
(".//div[@class='code-block-caption']/"
|
||||
"span[@class='caption-number']", '^Listing 2.4 $', True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']/"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']/"
|
||||
"span[@class='caption-number']", '^Fig. 2.2 $', True),
|
||||
(".//table/caption/span[@class='caption-number']",
|
||||
'^Table 2.2 $', True),
|
||||
@ -1126,11 +1126,11 @@ def test_numfig_with_singlehtml(app, cached_etree_parse, fname, expect):
|
||||
|
||||
@pytest.mark.parametrize("fname,expect", flat_dict({
|
||||
'index.html': [
|
||||
(".//div[@class='figure align-center']/p[@class='caption']"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']"
|
||||
"/span[@class='caption-number']", "Fig. 1", True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']"
|
||||
"/span[@class='caption-number']", "Fig. 2", True),
|
||||
(".//div[@class='figure align-center']/p[@class='caption']"
|
||||
(".//div[@class='figure align-default']/p[@class='caption']"
|
||||
"/span[@class='caption-number']", "Fig. 3", True),
|
||||
(".//div//span[@class='caption-number']", "No.1 ", True),
|
||||
(".//div//span[@class='caption-number']", "No.2 ", True),
|
||||
@ -1338,7 +1338,7 @@ def test_html_sidebar(app, status, warning):
|
||||
assert '<h1 class="logo"><a href="#">Python</a></h1>' in result
|
||||
assert '<h3>Navigation</h3>' in result
|
||||
assert '<h3>Related Topics</h3>' in result
|
||||
assert '<h3>Quick search</h3>' in result
|
||||
assert '<h3 id="searchlabel">Quick search</h3>' in result
|
||||
|
||||
app.builder.add_sidebars('index', ctx)
|
||||
assert ctx['sidebars'] == ['about.html', 'navigation.html', 'relations.html',
|
||||
@ -1353,7 +1353,7 @@ def test_html_sidebar(app, status, warning):
|
||||
assert '<h1 class="logo"><a href="#">Python</a></h1>' not in result
|
||||
assert '<h3>Navigation</h3>' not in result
|
||||
assert '<h3>Related Topics</h3>' in result
|
||||
assert '<h3>Quick search</h3>' not in result
|
||||
assert '<h3 id="searchlabel">Quick search</h3>' not in result
|
||||
|
||||
app.builder.add_sidebars('index', ctx)
|
||||
assert ctx['sidebars'] == ['relations.html']
|
||||
@ -1367,7 +1367,7 @@ def test_html_sidebar(app, status, warning):
|
||||
assert '<h1 class="logo"><a href="#">Python</a></h1>' not in result
|
||||
assert '<h3>Navigation</h3>' not in result
|
||||
assert '<h3>Related Topics</h3>' not in result
|
||||
assert '<h3>Quick search</h3>' not in result
|
||||
assert '<h3 id="searchlabel">Quick search</h3>' not in result
|
||||
|
||||
app.builder.add_sidebars('index', ctx)
|
||||
assert ctx['sidebars'] == []
|
||||
|
@ -59,3 +59,10 @@ def test_default_man_pages():
|
||||
expected = [('index', 'stasi', 'STASI™ Documentation 1.0',
|
||||
["Wolfgang Schäuble & G'Beckstein"], 1)]
|
||||
assert default_man_pages(config) == expected
|
||||
|
||||
|
||||
@pytest.mark.sphinx('man', testroot='markup-rubric')
|
||||
def test_rubric(app, status, warning):
|
||||
app.build()
|
||||
content = (app.outdir / 'python.1').text()
|
||||
assert 'This is a rubric\n' in content
|
||||
|
@ -755,6 +755,20 @@ def test_attributes():
|
||||
check('member', 'int *[[attr]] *i', {1: 'i__iPP', 2: '1i'})
|
||||
|
||||
|
||||
def test_xref_parsing():
|
||||
def check(target):
|
||||
class Config:
|
||||
cpp_id_attributes = ["id_attr"]
|
||||
cpp_paren_attributes = ["paren_attr"]
|
||||
parser = DefinitionParser(target, None, Config())
|
||||
ast, isShorthand = parser.parse_xref_object()
|
||||
parser.assert_end()
|
||||
check('f')
|
||||
check('f()')
|
||||
check('void f()')
|
||||
check('T f()')
|
||||
|
||||
|
||||
# def test_print():
|
||||
# # used for getting all the ids out for checking
|
||||
# for a in ids:
|
||||
|
@ -290,3 +290,173 @@ def test_pyobject_prefix(app):
|
||||
desc)])]))
|
||||
assert doctree[1][1][1].astext().strip() == 'say' # prefix is stripped
|
||||
assert doctree[1][1][3].astext().strip() == 'FooBar.say' # not stripped
|
||||
|
||||
|
||||
def test_pydata(app):
|
||||
text = ".. py:data:: var\n"
|
||||
domain = app.env.get_domain('py')
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (addnodes.index,
|
||||
[desc, ([desc_signature, desc_name, "var"],
|
||||
[desc_content, ()])]))
|
||||
assert 'var' in domain.objects
|
||||
assert domain.objects['var'] == ('index', 'data')
|
||||
|
||||
|
||||
def test_pyfunction(app):
|
||||
text = (".. py:function:: func1\n"
|
||||
".. py:function:: func2\n"
|
||||
" :async:\n")
|
||||
domain = app.env.get_domain('py')
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (addnodes.index,
|
||||
[desc, ([desc_signature, ([desc_name, "func1"],
|
||||
[desc_parameterlist, ()])],
|
||||
[desc_content, ()])],
|
||||
addnodes.index,
|
||||
[desc, ([desc_signature, ([desc_annotation, "async "],
|
||||
[desc_name, "func2"],
|
||||
[desc_parameterlist, ()])],
|
||||
[desc_content, ()])]))
|
||||
assert 'func1' in domain.objects
|
||||
assert domain.objects['func1'] == ('index', 'function')
|
||||
assert 'func2' in domain.objects
|
||||
assert domain.objects['func2'] == ('index', 'function')
|
||||
|
||||
|
||||
def test_pymethod_options(app):
|
||||
text = (".. py:class:: Class\n"
|
||||
"\n"
|
||||
" .. py:method:: meth1\n"
|
||||
" .. py:method:: meth2\n"
|
||||
" :classmethod:\n"
|
||||
" .. py:method:: meth3\n"
|
||||
" :staticmethod:\n"
|
||||
" .. py:method:: meth4\n"
|
||||
" :async:\n"
|
||||
" .. py:method:: meth5\n"
|
||||
" :property:\n")
|
||||
domain = app.env.get_domain('py')
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (addnodes.index,
|
||||
[desc, ([desc_signature, ([desc_annotation, "class "],
|
||||
[desc_name, "Class"])],
|
||||
[desc_content, (addnodes.index,
|
||||
desc,
|
||||
addnodes.index,
|
||||
desc,
|
||||
addnodes.index,
|
||||
desc,
|
||||
addnodes.index,
|
||||
desc,
|
||||
addnodes.index,
|
||||
desc)])]))
|
||||
|
||||
# method
|
||||
assert_node(doctree[1][1][0], addnodes.index,
|
||||
entries=[('single', 'meth1() (Class method)', 'Class.meth1', '', None)])
|
||||
assert_node(doctree[1][1][1], ([desc_signature, ([desc_name, "meth1"],
|
||||
[desc_parameterlist, ()])],
|
||||
[desc_content, ()]))
|
||||
assert 'Class.meth1' in domain.objects
|
||||
assert domain.objects['Class.meth1'] == ('index', 'method')
|
||||
|
||||
# :classmethod:
|
||||
assert_node(doctree[1][1][2], addnodes.index,
|
||||
entries=[('single', 'meth2() (Class class method)', 'Class.meth2', '', None)])
|
||||
assert_node(doctree[1][1][3], ([desc_signature, ([desc_annotation, "classmethod "],
|
||||
[desc_name, "meth2"],
|
||||
[desc_parameterlist, ()])],
|
||||
[desc_content, ()]))
|
||||
assert 'Class.meth2' in domain.objects
|
||||
assert domain.objects['Class.meth2'] == ('index', 'method')
|
||||
|
||||
# :staticmethod:
|
||||
assert_node(doctree[1][1][4], addnodes.index,
|
||||
entries=[('single', 'meth3() (Class static method)', 'Class.meth3', '', None)])
|
||||
assert_node(doctree[1][1][5], ([desc_signature, ([desc_annotation, "static "],
|
||||
[desc_name, "meth3"],
|
||||
[desc_parameterlist, ()])],
|
||||
[desc_content, ()]))
|
||||
assert 'Class.meth3' in domain.objects
|
||||
assert domain.objects['Class.meth3'] == ('index', 'method')
|
||||
|
||||
# :async:
|
||||
assert_node(doctree[1][1][6], addnodes.index,
|
||||
entries=[('single', 'meth4() (Class method)', 'Class.meth4', '', None)])
|
||||
assert_node(doctree[1][1][7], ([desc_signature, ([desc_annotation, "async "],
|
||||
[desc_name, "meth4"],
|
||||
[desc_parameterlist, ()])],
|
||||
[desc_content, ()]))
|
||||
assert 'Class.meth4' in domain.objects
|
||||
assert domain.objects['Class.meth4'] == ('index', 'method')
|
||||
|
||||
# :property:
|
||||
assert_node(doctree[1][1][8], addnodes.index,
|
||||
entries=[('single', 'meth5() (Class property)', 'Class.meth5', '', None)])
|
||||
assert_node(doctree[1][1][9], ([desc_signature, ([desc_annotation, "property "],
|
||||
[desc_name, "meth5"])],
|
||||
[desc_content, ()]))
|
||||
assert 'Class.meth5' in domain.objects
|
||||
assert domain.objects['Class.meth5'] == ('index', 'method')
|
||||
|
||||
|
||||
def test_pyclassmethod(app):
|
||||
text = (".. py:class:: Class\n"
|
||||
"\n"
|
||||
" .. py:classmethod:: meth\n")
|
||||
domain = app.env.get_domain('py')
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (addnodes.index,
|
||||
[desc, ([desc_signature, ([desc_annotation, "class "],
|
||||
[desc_name, "Class"])],
|
||||
[desc_content, (addnodes.index,
|
||||
desc)])]))
|
||||
assert_node(doctree[1][1][0], addnodes.index,
|
||||
entries=[('single', 'meth() (Class class method)', 'Class.meth', '', None)])
|
||||
assert_node(doctree[1][1][1], ([desc_signature, ([desc_annotation, "classmethod "],
|
||||
[desc_name, "meth"],
|
||||
[desc_parameterlist, ()])],
|
||||
[desc_content, ()]))
|
||||
assert 'Class.meth' in domain.objects
|
||||
assert domain.objects['Class.meth'] == ('index', 'method')
|
||||
|
||||
|
||||
def test_pystaticmethod(app):
|
||||
text = (".. py:class:: Class\n"
|
||||
"\n"
|
||||
" .. py:staticmethod:: meth\n")
|
||||
domain = app.env.get_domain('py')
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (addnodes.index,
|
||||
[desc, ([desc_signature, ([desc_annotation, "class "],
|
||||
[desc_name, "Class"])],
|
||||
[desc_content, (addnodes.index,
|
||||
desc)])]))
|
||||
assert_node(doctree[1][1][0], addnodes.index,
|
||||
entries=[('single', 'meth() (Class static method)', 'Class.meth', '', None)])
|
||||
assert_node(doctree[1][1][1], ([desc_signature, ([desc_annotation, "static "],
|
||||
[desc_name, "meth"],
|
||||
[desc_parameterlist, ()])],
|
||||
[desc_content, ()]))
|
||||
assert 'Class.meth' in domain.objects
|
||||
assert domain.objects['Class.meth'] == ('index', 'method')
|
||||
|
||||
|
||||
def test_pyattribute(app):
|
||||
text = (".. py:class:: Class\n"
|
||||
"\n"
|
||||
" .. py:attribute:: attr\n")
|
||||
domain = app.env.get_domain('py')
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (addnodes.index,
|
||||
[desc, ([desc_signature, ([desc_annotation, "class "],
|
||||
[desc_name, "Class"])],
|
||||
[desc_content, (addnodes.index,
|
||||
desc)])]))
|
||||
assert_node(doctree[1][1][0], addnodes.index,
|
||||
entries=[('single', 'attr (Class attribute)', 'Class.attr', '', None)])
|
||||
assert_node(doctree[1][1][1], ([desc_signature, desc_name, "attr"],
|
||||
[desc_content, ()]))
|
||||
assert 'Class.attr' in domain.objects
|
||||
assert domain.objects['Class.attr'] == ('index', 'attribute')
|
||||
|
@ -8,7 +8,13 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.addnodes import (
|
||||
desc, desc_addname, desc_annotation, desc_content, desc_name, desc_signature
|
||||
)
|
||||
from sphinx.domains.rst import parse_directive
|
||||
from sphinx.testing import restructuredtext
|
||||
from sphinx.testing.util import assert_node
|
||||
|
||||
|
||||
def test_parse_directive():
|
||||
@ -16,10 +22,119 @@ def test_parse_directive():
|
||||
assert s == ('foö', '')
|
||||
|
||||
s = parse_directive(' .. foö :: ')
|
||||
assert s == ('foö', ' ')
|
||||
assert s == ('foö', '')
|
||||
|
||||
s = parse_directive('.. foö:: args1 args2')
|
||||
assert s == ('foö', ' args1 args2')
|
||||
|
||||
s = parse_directive('.. :: bar')
|
||||
assert s == ('.. :: bar', '')
|
||||
|
||||
|
||||
def test_rst_directive(app):
|
||||
# bare
|
||||
text = ".. rst:directive:: toctree"
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (addnodes.index,
|
||||
[desc, ([desc_signature, desc_name, ".. toctree::"],
|
||||
[desc_content, ()])]))
|
||||
assert_node(doctree[0],
|
||||
entries=[("single", "toctree (directive)", "directive-toctree", "", None)])
|
||||
assert_node(doctree[1], addnodes.desc, desctype="directive",
|
||||
domain="rst", objtype="directive", noindex=False)
|
||||
|
||||
# decorated
|
||||
text = ".. rst:directive:: .. toctree::"
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (addnodes.index,
|
||||
[desc, ([desc_signature, desc_name, ".. toctree::"],
|
||||
[desc_content, ()])]))
|
||||
assert_node(doctree[0],
|
||||
entries=[("single", "toctree (directive)", "directive-toctree", "", None)])
|
||||
assert_node(doctree[1], addnodes.desc, desctype="directive",
|
||||
domain="rst", objtype="directive", noindex=False)
|
||||
|
||||
|
||||
def test_rst_directive_with_argument(app):
|
||||
text = ".. rst:directive:: .. toctree:: foo bar baz"
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (addnodes.index,
|
||||
[desc, ([desc_signature, ([desc_name, ".. toctree::"],
|
||||
[desc_addname, " foo bar baz"])],
|
||||
[desc_content, ()])]))
|
||||
assert_node(doctree[0],
|
||||
entries=[("single", "toctree (directive)", "directive-toctree", "", None)])
|
||||
assert_node(doctree[1], addnodes.desc, desctype="directive",
|
||||
domain="rst", objtype="directive", noindex=False)
|
||||
|
||||
|
||||
def test_rst_directive_option(app):
|
||||
text = ".. rst:directive:option:: foo"
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (addnodes.index,
|
||||
[desc, ([desc_signature, desc_name, ":foo:"],
|
||||
[desc_content, ()])]))
|
||||
assert_node(doctree[0],
|
||||
entries=[("single", ":foo: (directive option)",
|
||||
"directive:option--foo", "", "F")])
|
||||
assert_node(doctree[1], addnodes.desc, desctype="directive:option",
|
||||
domain="rst", objtype="directive:option", noindex=False)
|
||||
|
||||
|
||||
def test_rst_directive_option_with_argument(app):
|
||||
text = ".. rst:directive:option:: foo: bar baz"
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (addnodes.index,
|
||||
[desc, ([desc_signature, ([desc_name, ":foo:"],
|
||||
[desc_annotation, " bar baz"])],
|
||||
[desc_content, ()])]))
|
||||
assert_node(doctree[0],
|
||||
entries=[("single", ":foo: (directive option)",
|
||||
"directive:option--foo", "", "F")])
|
||||
assert_node(doctree[1], addnodes.desc, desctype="directive:option",
|
||||
domain="rst", objtype="directive:option", noindex=False)
|
||||
|
||||
|
||||
def test_rst_directive_option_type(app):
|
||||
text = (".. rst:directive:option:: foo\n"
|
||||
" :type: directives.flags\n")
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (addnodes.index,
|
||||
[desc, ([desc_signature, ([desc_name, ":foo:"],
|
||||
[desc_annotation, " (directives.flags)"])],
|
||||
[desc_content, ()])]))
|
||||
assert_node(doctree[0],
|
||||
entries=[("single", ":foo: (directive option)",
|
||||
"directive:option--foo", "", "F")])
|
||||
assert_node(doctree[1], addnodes.desc, desctype="directive:option",
|
||||
domain="rst", objtype="directive:option", noindex=False)
|
||||
|
||||
|
||||
def test_rst_directive_and_directive_option(app):
|
||||
text = (".. rst:directive:: foo\n"
|
||||
"\n"
|
||||
" .. rst:directive:option:: bar\n")
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (addnodes.index,
|
||||
[desc, ([desc_signature, desc_name, ".. foo::"],
|
||||
[desc_content, (addnodes.index,
|
||||
desc)])]))
|
||||
assert_node(doctree[1][1][0],
|
||||
entries=[("pair", "foo (directive); :bar: (directive option)",
|
||||
"directive:option-foo-bar", "", "B")])
|
||||
assert_node(doctree[1][1][1], ([desc_signature, desc_name, ":bar:"],
|
||||
[desc_content, ()]))
|
||||
assert_node(doctree[1][1][1], addnodes.desc, desctype="directive:option",
|
||||
domain="rst", objtype="directive:option", noindex=False)
|
||||
|
||||
|
||||
def test_rst_role(app):
|
||||
text = ".. rst:role:: ref"
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (addnodes.index,
|
||||
[desc, ([desc_signature, desc_name, ":ref:"],
|
||||
[desc_content, ()])]))
|
||||
assert_node(doctree[0],
|
||||
entries=[("single", "ref (role)", "role-ref", "", None)])
|
||||
assert_node(doctree[1], addnodes.desc, desctype="role",
|
||||
domain="rst", objtype="role", noindex=False)
|
||||
|
@ -11,8 +11,12 @@
|
||||
from unittest import mock
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import definition, definition_list, definition_list_item, term
|
||||
|
||||
from sphinx.addnodes import glossary, index
|
||||
from sphinx.domains.std import StandardDomain
|
||||
from sphinx.testing import restructuredtext
|
||||
from sphinx.testing.util import assert_node
|
||||
|
||||
|
||||
def test_process_doc_handle_figure_caption():
|
||||
@ -80,3 +84,158 @@ def test_get_full_qualified_name():
|
||||
kwargs = {'std:program': 'ls'}
|
||||
node = nodes.reference(reftype='option', reftarget='-l', **kwargs)
|
||||
assert domain.get_full_qualified_name(node) == 'ls.-l'
|
||||
|
||||
|
||||
def test_glossary(app):
|
||||
text = (".. glossary::\n"
|
||||
"\n"
|
||||
" term1\n"
|
||||
" term2\n"
|
||||
" description\n"
|
||||
"\n"
|
||||
" term3 : classifier\n"
|
||||
" description\n"
|
||||
" description\n"
|
||||
"\n"
|
||||
" term4 : class1 : class2\n"
|
||||
" description\n")
|
||||
|
||||
# doctree
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (
|
||||
[glossary, definition_list, ([definition_list_item, ([term, ("term1",
|
||||
index)],
|
||||
[term, ("term2",
|
||||
index)],
|
||||
definition)],
|
||||
[definition_list_item, ([term, ("term3",
|
||||
index)],
|
||||
definition)],
|
||||
[definition_list_item, ([term, ("term4",
|
||||
index)],
|
||||
definition)])],
|
||||
))
|
||||
assert_node(doctree[0][0][0][0][1],
|
||||
entries=[("single", "term1", "term-term1", "main", None)])
|
||||
assert_node(doctree[0][0][0][1][1],
|
||||
entries=[("single", "term2", "term-term2", "main", None)])
|
||||
assert_node(doctree[0][0][0][2],
|
||||
[definition, nodes.paragraph, "description"])
|
||||
assert_node(doctree[0][0][1][0][1],
|
||||
entries=[("single", "term3", "term-term3", "main", "classifier")])
|
||||
assert_node(doctree[0][0][1][1],
|
||||
[definition, nodes.paragraph, ("description\n"
|
||||
"description")])
|
||||
assert_node(doctree[0][0][2][0][1],
|
||||
entries=[("single", "term4", "term-term4", "main", "class1")])
|
||||
assert_node(doctree[0][0][2][1],
|
||||
[nodes.definition, nodes.paragraph, "description"])
|
||||
|
||||
# index
|
||||
objects = list(app.env.get_domain("std").get_objects())
|
||||
assert ("term1", "term1", "term", "index", "term-term1", -1) in objects
|
||||
assert ("term2", "term2", "term", "index", "term-term2", -1) in objects
|
||||
assert ("term3", "term3", "term", "index", "term-term3", -1) in objects
|
||||
assert ("term4", "term4", "term", "index", "term-term4", -1) in objects
|
||||
|
||||
|
||||
def test_glossary_warning(app, status, warning):
|
||||
# empty line between terms
|
||||
text = (".. glossary::\n"
|
||||
"\n"
|
||||
" term1\n"
|
||||
"\n"
|
||||
" term2\n")
|
||||
restructuredtext.parse(app, text, "case1")
|
||||
assert ("case1.rst:4: WARNING: glossary terms must not be separated by empty lines"
|
||||
in warning.getvalue())
|
||||
|
||||
# glossary starts with indented item
|
||||
text = (".. glossary::\n"
|
||||
"\n"
|
||||
" description\n"
|
||||
" term\n")
|
||||
restructuredtext.parse(app, text, "case2")
|
||||
assert ("case2.rst:3: WARNING: glossary term must be preceded by empty line"
|
||||
in warning.getvalue())
|
||||
|
||||
# empty line between terms
|
||||
text = (".. glossary::\n"
|
||||
"\n"
|
||||
" term1\n"
|
||||
" description\n"
|
||||
" term2\n")
|
||||
restructuredtext.parse(app, text, "case3")
|
||||
assert ("case3.rst:4: WARNING: glossary term must be preceded by empty line"
|
||||
in warning.getvalue())
|
||||
|
||||
|
||||
def test_glossary_comment(app):
|
||||
text = (".. glossary::\n"
|
||||
"\n"
|
||||
" term1\n"
|
||||
" description\n"
|
||||
" .. term2\n"
|
||||
" description\n"
|
||||
" description\n")
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (
|
||||
[glossary, definition_list, definition_list_item, ([term, ("term1",
|
||||
index)],
|
||||
definition)],
|
||||
))
|
||||
assert_node(doctree[0][0][0][1],
|
||||
[nodes.definition, nodes.paragraph, "description"])
|
||||
|
||||
|
||||
def test_glossary_comment2(app):
|
||||
text = (".. glossary::\n"
|
||||
"\n"
|
||||
" term1\n"
|
||||
" description\n"
|
||||
"\n"
|
||||
" .. term2\n"
|
||||
" term3\n"
|
||||
" description\n"
|
||||
" description\n")
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (
|
||||
[glossary, definition_list, ([definition_list_item, ([term, ("term1",
|
||||
index)],
|
||||
definition)],
|
||||
[definition_list_item, ([term, ("term3",
|
||||
index)],
|
||||
definition)])],
|
||||
))
|
||||
assert_node(doctree[0][0][0][1],
|
||||
[nodes.definition, nodes.paragraph, "description"])
|
||||
assert_node(doctree[0][0][1][1],
|
||||
[nodes.definition, nodes.paragraph, ("description\n"
|
||||
"description")])
|
||||
|
||||
|
||||
def test_glossary_sorted(app):
|
||||
text = (".. glossary::\n"
|
||||
" :sorted:\n"
|
||||
"\n"
|
||||
" term3\n"
|
||||
" description\n"
|
||||
"\n"
|
||||
" term2\n"
|
||||
" term1\n"
|
||||
" description\n")
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (
|
||||
[glossary, definition_list, ([definition_list_item, ([term, ("term2",
|
||||
index)],
|
||||
[term, ("term1",
|
||||
index)],
|
||||
definition)],
|
||||
[definition_list_item, ([term, ("term3",
|
||||
index)],
|
||||
definition)])],
|
||||
))
|
||||
assert_node(doctree[0][0][0][2],
|
||||
[nodes.definition, nodes.paragraph, "description"])
|
||||
assert_node(doctree[0][0][1][1],
|
||||
[nodes.definition, nodes.paragraph, "description"])
|
||||
|
@ -8,135 +8,116 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from collections import namedtuple
|
||||
from unittest import mock
|
||||
import pytest
|
||||
|
||||
from sphinx import locale
|
||||
from sphinx.environment.adapters.indexentries import IndexEntries
|
||||
|
||||
Environment = namedtuple('Environment', 'indexentries')
|
||||
|
||||
dummy_builder = mock.Mock()
|
||||
dummy_builder.get_relative_uri.return_value = ''
|
||||
from sphinx.testing import restructuredtext
|
||||
|
||||
|
||||
def test_create_single_index():
|
||||
# type, value, tid, main, index_key
|
||||
env = Environment({
|
||||
'index': [
|
||||
('single', 'docutils', 'id1', '', None),
|
||||
('single', 'Python', 'id2', '', None),
|
||||
('single', 'pip; install', 'id3', '', None),
|
||||
('single', 'pip; upgrade', 'id4', '', None),
|
||||
('single', 'Sphinx', 'id5', '', None),
|
||||
('single', 'Ель', 'id6', '', None),
|
||||
('single', 'ёлка', 'id7', '', None),
|
||||
('single', 'תירבע', 'id8', '', None),
|
||||
('single', '9-symbol', 'id9', '', None),
|
||||
('single', '&-symbol', 'id10', '', None),
|
||||
],
|
||||
})
|
||||
index = IndexEntries(env).create_index(dummy_builder)
|
||||
@pytest.mark.sphinx('dummy')
|
||||
def test_create_single_index(app):
|
||||
app.env.indexentries.clear()
|
||||
text = (".. index:: docutils\n"
|
||||
".. index:: Python\n"
|
||||
".. index:: pip; install\n"
|
||||
".. index:: pip; upgrade\n"
|
||||
".. index:: Sphinx\n"
|
||||
".. index:: Ель\n"
|
||||
".. index:: ёлка\n"
|
||||
".. index:: תירבע\n"
|
||||
".. index:: 9-symbol\n"
|
||||
".. index:: &-symbol\n")
|
||||
restructuredtext.parse(app, text)
|
||||
index = IndexEntries(app.env).create_index(app.builder)
|
||||
assert len(index) == 6
|
||||
assert index[0] == ('Symbols', [('&-symbol', [[('', '#id10')], [], None]),
|
||||
('9-symbol', [[('', '#id9')], [], None])])
|
||||
assert index[1] == ('D', [('docutils', [[('', '#id1')], [], None])])
|
||||
assert index[2] == ('P', [('pip', [[], [('install', [('', '#id3')]),
|
||||
('upgrade', [('', '#id4')])], None]),
|
||||
('Python', [[('', '#id2')], [], None])])
|
||||
assert index[3] == ('S', [('Sphinx', [[('', '#id5')], [], None])])
|
||||
assert index[4] == ('Е', [('ёлка', [[('', '#id7')], [], None]),
|
||||
('Ель', [[('', '#id6')], [], None])])
|
||||
assert index[5] == ('ת', [('תירבע', [[('', '#id8')], [], None])])
|
||||
assert index[0] == ('Symbols', [('&-symbol', [[('', '#index-9')], [], None]),
|
||||
('9-symbol', [[('', '#index-8')], [], None])])
|
||||
assert index[1] == ('D', [('docutils', [[('', '#index-0')], [], None])])
|
||||
assert index[2] == ('P', [('pip', [[], [('install', [('', '#index-2')]),
|
||||
('upgrade', [('', '#index-3')])], None]),
|
||||
('Python', [[('', '#index-1')], [], None])])
|
||||
assert index[3] == ('S', [('Sphinx', [[('', '#index-4')], [], None])])
|
||||
assert index[4] == ('Е', [('ёлка', [[('', '#index-6')], [], None]),
|
||||
('Ель', [[('', '#index-5')], [], None])])
|
||||
assert index[5] == ('ת', [('תירבע', [[('', '#index-7')], [], None])])
|
||||
|
||||
|
||||
def test_create_pair_index():
|
||||
# type, value, tid, main, index_key
|
||||
env = Environment({
|
||||
'index': [
|
||||
('pair', 'docutils; reStructuredText', 'id1', '', None),
|
||||
('pair', 'Python; interpreter', 'id2', '', None),
|
||||
('pair', 'Sphinx; documentation tool', 'id3', '', None),
|
||||
],
|
||||
})
|
||||
index = IndexEntries(env).create_index(dummy_builder)
|
||||
@pytest.mark.sphinx('dummy')
|
||||
def test_create_pair_index(app):
|
||||
app.env.indexentries.clear()
|
||||
text = (".. index:: pair: docutils; reStructuredText\n"
|
||||
".. index:: pair: Python; interpreter\n"
|
||||
".. index:: pair: Sphinx; documentation tool\n")
|
||||
restructuredtext.parse(app, text)
|
||||
index = IndexEntries(app.env).create_index(app.builder)
|
||||
assert len(index) == 5
|
||||
assert index[0] == ('D',
|
||||
[('documentation tool', [[], [('Sphinx', [('', '#id3')])], None]),
|
||||
('docutils', [[], [('reStructuredText', [('', '#id1')])], None])])
|
||||
assert index[1] == ('I', [('interpreter', [[], [('Python', [('', '#id2')])], None])])
|
||||
assert index[2] == ('P', [('Python', [[], [('interpreter', [('', '#id2')])], None])])
|
||||
[('documentation tool', [[], [('Sphinx', [('', '#index-2')])], None]),
|
||||
('docutils', [[], [('reStructuredText', [('', '#index-0')])], None])])
|
||||
assert index[1] == ('I', [('interpreter', [[], [('Python', [('', '#index-1')])], None])])
|
||||
assert index[2] == ('P', [('Python', [[], [('interpreter', [('', '#index-1')])], None])])
|
||||
assert index[3] == ('R',
|
||||
[('reStructuredText', [[], [('docutils', [('', '#id1')])], None])])
|
||||
[('reStructuredText', [[], [('docutils', [('', '#index-0')])], None])])
|
||||
assert index[4] == ('S',
|
||||
[('Sphinx', [[], [('documentation tool', [('', '#id3')])], None])])
|
||||
[('Sphinx', [[], [('documentation tool', [('', '#index-2')])], None])])
|
||||
|
||||
|
||||
def test_create_triple_index():
|
||||
# type, value, tid, main, index_key
|
||||
env = Environment({
|
||||
'index': [
|
||||
('triple', 'foo; bar; baz', 'id1', '', None),
|
||||
('triple', 'Python; Sphinx; reST', 'id2', '', None),
|
||||
],
|
||||
})
|
||||
index = IndexEntries(env).create_index(dummy_builder)
|
||||
@pytest.mark.sphinx('dummy')
|
||||
def test_create_triple_index(app):
|
||||
app.env.indexentries.clear()
|
||||
text = (".. index:: triple: foo; bar; baz\n"
|
||||
".. index:: triple: Python; Sphinx; reST\n")
|
||||
restructuredtext.parse(app, text)
|
||||
index = IndexEntries(app.env).create_index(app.builder)
|
||||
assert len(index) == 5
|
||||
assert index[0] == ('B', [('bar', [[], [('baz, foo', [('', '#id1')])], None]),
|
||||
('baz', [[], [('foo bar', [('', '#id1')])], None])])
|
||||
assert index[1] == ('F', [('foo', [[], [('bar baz', [('', '#id1')])], None])])
|
||||
assert index[2] == ('P', [('Python', [[], [('Sphinx reST', [('', '#id2')])], None])])
|
||||
assert index[3] == ('R', [('reST', [[], [('Python Sphinx', [('', '#id2')])], None])])
|
||||
assert index[4] == ('S', [('Sphinx', [[], [('reST, Python', [('', '#id2')])], None])])
|
||||
assert index[0] == ('B', [('bar', [[], [('baz, foo', [('', '#index-0')])], None]),
|
||||
('baz', [[], [('foo bar', [('', '#index-0')])], None])])
|
||||
assert index[1] == ('F', [('foo', [[], [('bar baz', [('', '#index-0')])], None])])
|
||||
assert index[2] == ('P', [('Python', [[], [('Sphinx reST', [('', '#index-1')])], None])])
|
||||
assert index[3] == ('R', [('reST', [[], [('Python Sphinx', [('', '#index-1')])], None])])
|
||||
assert index[4] == ('S', [('Sphinx', [[], [('reST, Python', [('', '#index-1')])], None])])
|
||||
|
||||
|
||||
def test_create_see_index():
|
||||
locale.init([], None)
|
||||
|
||||
# type, value, tid, main, index_key
|
||||
env = Environment({
|
||||
'index': [
|
||||
('see', 'docutils; reStructuredText', 'id1', '', None),
|
||||
('see', 'Python; interpreter', 'id2', '', None),
|
||||
('see', 'Sphinx; documentation tool', 'id3', '', None),
|
||||
],
|
||||
})
|
||||
index = IndexEntries(env).create_index(dummy_builder)
|
||||
@pytest.mark.sphinx('dummy')
|
||||
def test_create_see_index(app):
|
||||
app.env.indexentries.clear()
|
||||
text = (".. index:: see: docutils; reStructuredText\n"
|
||||
".. index:: see: Python; interpreter\n"
|
||||
".. index:: see: Sphinx; documentation tool\n")
|
||||
restructuredtext.parse(app, text)
|
||||
index = IndexEntries(app.env).create_index(app.builder)
|
||||
assert len(index) == 3
|
||||
assert index[0] == ('D', [('docutils', [[], [('see reStructuredText', [])], None])])
|
||||
assert index[1] == ('P', [('Python', [[], [('see interpreter', [])], None])])
|
||||
assert index[2] == ('S', [('Sphinx', [[], [('see documentation tool', [])], None])])
|
||||
|
||||
|
||||
def test_create_seealso_index():
|
||||
locale.init([], None)
|
||||
|
||||
# type, value, tid, main, index_key
|
||||
env = Environment({
|
||||
'index': [
|
||||
('seealso', 'docutils; reStructuredText', 'id1', '', None),
|
||||
('seealso', 'Python; interpreter', 'id2', '', None),
|
||||
('seealso', 'Sphinx; documentation tool', 'id3', '', None),
|
||||
],
|
||||
})
|
||||
index = IndexEntries(env).create_index(dummy_builder)
|
||||
@pytest.mark.sphinx('dummy')
|
||||
def test_create_seealso_index(app):
|
||||
app.env.indexentries.clear()
|
||||
text = (".. index:: seealso: docutils; reStructuredText\n"
|
||||
".. index:: seealso: Python; interpreter\n"
|
||||
".. index:: seealso: Sphinx; documentation tool\n")
|
||||
restructuredtext.parse(app, text)
|
||||
index = IndexEntries(app.env).create_index(app.builder)
|
||||
assert len(index) == 3
|
||||
assert index[0] == ('D', [('docutils', [[], [('see also reStructuredText', [])], None])])
|
||||
assert index[1] == ('P', [('Python', [[], [('see also interpreter', [])], None])])
|
||||
assert index[2] == ('S', [('Sphinx', [[], [('see also documentation tool', [])], None])])
|
||||
|
||||
|
||||
def test_create_index_by_key():
|
||||
# type, value, tid, main, index_key
|
||||
env = Environment({
|
||||
'index': [
|
||||
('single', 'docutils', 'id1', '', None),
|
||||
('single', 'Python', 'id2', '', None),
|
||||
('single', 'スフィンクス', 'id3', '', 'ス'),
|
||||
],
|
||||
})
|
||||
index = IndexEntries(env).create_index(dummy_builder)
|
||||
@pytest.mark.sphinx('dummy')
|
||||
def test_create_index_by_key(app):
|
||||
app.env.indexentries.clear()
|
||||
# At present, only glossary directive is able to create index key
|
||||
text = (".. glossary::\n"
|
||||
"\n"
|
||||
" docutils\n"
|
||||
" Python\n"
|
||||
" スフィンクス : ス\n")
|
||||
restructuredtext.parse(app, text)
|
||||
index = IndexEntries(app.env).create_index(app.builder)
|
||||
assert len(index) == 3
|
||||
assert index[0] == ('D', [('docutils', [[('', '#id1')], [], None])])
|
||||
assert index[1] == ('P', [('Python', [[('', '#id2')], [], None])])
|
||||
assert index[2] == ('ス', [('スフィンクス', [[('', '#id3')], [], 'ス'])])
|
||||
assert index[0] == ('D', [('docutils', [[('main', '#term-docutils')], [], None])])
|
||||
assert index[1] == ('P', [('Python', [[('main', '#term-python')], [], None])])
|
||||
assert index[2] == ('ス', [('スフィンクス', [[('main', '#term-2')], [], 'ス'])])
|
||||
|
@ -13,6 +13,7 @@ from collections import namedtuple
|
||||
import pytest
|
||||
|
||||
from sphinx.ext.apidoc import main as apidoc_main
|
||||
from sphinx.testing.path import path
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@ -398,3 +399,216 @@ def test_subpackage_in_toc(make_app, apidoc):
|
||||
assert 'parent.child.foo' in parent_child
|
||||
|
||||
assert (outdir / 'parent.child.foo.rst').isfile()
|
||||
|
||||
|
||||
def test_toc_file(tempdir):
|
||||
outdir = path(tempdir)
|
||||
(outdir / 'module').makedirs()
|
||||
(outdir / 'example.py').write_text('')
|
||||
(outdir / 'module' / 'example.py').write_text('')
|
||||
apidoc_main(['-o', tempdir, tempdir])
|
||||
assert (outdir / 'modules.rst').exists()
|
||||
|
||||
content = (outdir / 'modules.rst').text()
|
||||
assert content == ("test_toc_file0\n"
|
||||
"==============\n"
|
||||
"\n"
|
||||
".. toctree::\n"
|
||||
" :maxdepth: 4\n"
|
||||
"\n"
|
||||
" example\n")
|
||||
|
||||
|
||||
def test_module_file(tempdir):
|
||||
outdir = path(tempdir)
|
||||
(outdir / 'example.py').write_text('')
|
||||
apidoc_main(['-o', tempdir, tempdir])
|
||||
assert (outdir / 'example.rst').exists()
|
||||
|
||||
content = (outdir / 'example.rst').text()
|
||||
assert content == ("example module\n"
|
||||
"==============\n"
|
||||
"\n"
|
||||
".. automodule:: example\n"
|
||||
" :members:\n"
|
||||
" :undoc-members:\n"
|
||||
" :show-inheritance:\n")
|
||||
|
||||
|
||||
def test_module_file_noheadings(tempdir):
|
||||
outdir = path(tempdir)
|
||||
(outdir / 'example.py').write_text('')
|
||||
apidoc_main(['--no-headings', '-o', tempdir, tempdir])
|
||||
assert (outdir / 'example.rst').exists()
|
||||
|
||||
content = (outdir / 'example.rst').text()
|
||||
assert content == (".. automodule:: example\n"
|
||||
" :members:\n"
|
||||
" :undoc-members:\n"
|
||||
" :show-inheritance:\n")
|
||||
|
||||
|
||||
def test_package_file(tempdir):
|
||||
outdir = path(tempdir)
|
||||
(outdir / 'testpkg').makedirs()
|
||||
(outdir / 'testpkg' / '__init__.py').write_text('')
|
||||
(outdir / 'testpkg' / 'example.py').write_text('')
|
||||
(outdir / 'testpkg' / 'subpkg').makedirs()
|
||||
(outdir / 'testpkg' / 'subpkg' / '__init__.py').write_text('')
|
||||
apidoc_main(['-o', tempdir, tempdir / 'testpkg'])
|
||||
assert (outdir / 'testpkg.rst').exists()
|
||||
assert (outdir / 'testpkg.subpkg.rst').exists()
|
||||
|
||||
content = (outdir / 'testpkg.rst').text()
|
||||
assert content == ("testpkg package\n"
|
||||
"===============\n"
|
||||
"\n"
|
||||
"Subpackages\n"
|
||||
"-----------\n"
|
||||
"\n"
|
||||
".. toctree::\n"
|
||||
"\n"
|
||||
" testpkg.subpkg\n"
|
||||
"\n"
|
||||
"Submodules\n"
|
||||
"----------\n"
|
||||
"\n"
|
||||
"testpkg.example module\n"
|
||||
"----------------------\n"
|
||||
"\n"
|
||||
".. automodule:: testpkg.example\n"
|
||||
" :members:\n"
|
||||
" :undoc-members:\n"
|
||||
" :show-inheritance:\n"
|
||||
"\n"
|
||||
"\n"
|
||||
"Module contents\n"
|
||||
"---------------\n"
|
||||
"\n"
|
||||
".. automodule:: testpkg\n"
|
||||
" :members:\n"
|
||||
" :undoc-members:\n"
|
||||
" :show-inheritance:\n")
|
||||
|
||||
content = (outdir / 'testpkg.subpkg.rst').text()
|
||||
assert content == ("testpkg.subpkg package\n"
|
||||
"======================\n"
|
||||
"\n"
|
||||
"Module contents\n"
|
||||
"---------------\n"
|
||||
"\n"
|
||||
".. automodule:: testpkg.subpkg\n"
|
||||
" :members:\n"
|
||||
" :undoc-members:\n"
|
||||
" :show-inheritance:\n")
|
||||
|
||||
|
||||
def test_package_file_separate(tempdir):
|
||||
outdir = path(tempdir)
|
||||
(outdir / 'testpkg').makedirs()
|
||||
(outdir / 'testpkg' / '__init__.py').write_text('')
|
||||
(outdir / 'testpkg' / 'example.py').write_text('')
|
||||
apidoc_main(['--separate', '-o', tempdir, tempdir / 'testpkg'])
|
||||
assert (outdir / 'testpkg.rst').exists()
|
||||
assert (outdir / 'testpkg.example.rst').exists()
|
||||
|
||||
content = (outdir / 'testpkg.rst').text()
|
||||
assert content == ("testpkg package\n"
|
||||
"===============\n"
|
||||
"\n"
|
||||
"Submodules\n"
|
||||
"----------\n"
|
||||
"\n"
|
||||
".. toctree::\n"
|
||||
"\n"
|
||||
" testpkg.example\n"
|
||||
"\n"
|
||||
"Module contents\n"
|
||||
"---------------\n"
|
||||
"\n"
|
||||
".. automodule:: testpkg\n"
|
||||
" :members:\n"
|
||||
" :undoc-members:\n"
|
||||
" :show-inheritance:\n")
|
||||
|
||||
content = (outdir / 'testpkg.example.rst').text()
|
||||
assert content == ("testpkg.example module\n"
|
||||
"======================\n"
|
||||
"\n"
|
||||
".. automodule:: testpkg.example\n"
|
||||
" :members:\n"
|
||||
" :undoc-members:\n"
|
||||
" :show-inheritance:\n")
|
||||
|
||||
|
||||
def test_package_file_module_first(tempdir):
|
||||
outdir = path(tempdir)
|
||||
(outdir / 'testpkg').makedirs()
|
||||
(outdir / 'testpkg' / '__init__.py').write_text('')
|
||||
(outdir / 'testpkg' / 'example.py').write_text('')
|
||||
apidoc_main(['--module-first', '-o', tempdir, tempdir])
|
||||
|
||||
content = (outdir / 'testpkg.rst').text()
|
||||
assert content == ("testpkg package\n"
|
||||
"===============\n"
|
||||
"\n"
|
||||
".. automodule:: testpkg\n"
|
||||
" :members:\n"
|
||||
" :undoc-members:\n"
|
||||
" :show-inheritance:\n"
|
||||
"\n"
|
||||
"Submodules\n"
|
||||
"----------\n"
|
||||
"\n"
|
||||
"testpkg.example module\n"
|
||||
"----------------------\n"
|
||||
"\n"
|
||||
".. automodule:: testpkg.example\n"
|
||||
" :members:\n"
|
||||
" :undoc-members:\n"
|
||||
" :show-inheritance:\n"
|
||||
"\n")
|
||||
|
||||
|
||||
def test_package_file_without_submodules(tempdir):
|
||||
outdir = path(tempdir)
|
||||
(outdir / 'testpkg').makedirs()
|
||||
(outdir / 'testpkg' / '__init__.py').write_text('')
|
||||
apidoc_main(['-o', tempdir, tempdir / 'testpkg'])
|
||||
assert (outdir / 'testpkg.rst').exists()
|
||||
|
||||
content = (outdir / 'testpkg.rst').text()
|
||||
assert content == ("testpkg package\n"
|
||||
"===============\n"
|
||||
"\n"
|
||||
"Module contents\n"
|
||||
"---------------\n"
|
||||
"\n"
|
||||
".. automodule:: testpkg\n"
|
||||
" :members:\n"
|
||||
" :undoc-members:\n"
|
||||
" :show-inheritance:\n")
|
||||
|
||||
|
||||
def test_namespace_package_file(tempdir):
|
||||
outdir = path(tempdir)
|
||||
(outdir / 'testpkg').makedirs()
|
||||
(outdir / 'testpkg' / 'example.py').write_text('')
|
||||
apidoc_main(['--implicit-namespace', '-o', tempdir, tempdir / 'testpkg'])
|
||||
assert (outdir / 'testpkg.rst').exists()
|
||||
|
||||
content = (outdir / 'testpkg.rst').text()
|
||||
assert content == ("testpkg namespace\n"
|
||||
"=================\n"
|
||||
"\n"
|
||||
"Submodules\n"
|
||||
"----------\n"
|
||||
"\n"
|
||||
"testpkg.example module\n"
|
||||
"----------------------\n"
|
||||
"\n"
|
||||
".. automodule:: testpkg.example\n"
|
||||
" :members:\n"
|
||||
" :undoc-members:\n"
|
||||
" :show-inheritance:\n"
|
||||
"\n")
|
||||
|
@ -379,3 +379,23 @@ def test_autosummary_mock_imports(app, status, warning):
|
||||
assert app.env.get_doctree('generated/foo')
|
||||
finally:
|
||||
sys.modules.pop('foo', None) # unload foo module
|
||||
|
||||
|
||||
@pytest.mark.sphinx('dummy', testroot='ext-autosummary-imported_members')
|
||||
def test_autosummary_imported_members(app, status, warning):
|
||||
try:
|
||||
app.build()
|
||||
# generated/foo is generated successfully
|
||||
assert app.env.get_doctree('generated/autosummary_dummy_package')
|
||||
|
||||
module = (app.srcdir / 'generated' / 'autosummary_dummy_package.rst').text()
|
||||
assert (' .. autosummary::\n'
|
||||
' \n'
|
||||
' Bar\n'
|
||||
' \n' in module)
|
||||
assert (' .. autosummary::\n'
|
||||
' \n'
|
||||
' foo\n'
|
||||
' \n' in module)
|
||||
finally:
|
||||
sys.modules.pop('autosummary_dummy_package', None)
|
||||
|
@ -21,7 +21,7 @@ def test_graphviz_png_html(app, status, warning):
|
||||
app.builder.build_all()
|
||||
|
||||
content = (app.outdir / 'index.html').text()
|
||||
html = (r'<div class="figure align-center" .*?>\s*'
|
||||
html = (r'<div class="figure align-default" .*?>\s*'
|
||||
r'<div class="graphviz"><img .*?/></div>\s*<p class="caption">'
|
||||
r'<span class="caption-text">caption of graph</span>.*</p>\s*</div>')
|
||||
assert re.search(html, content, re.S)
|
||||
@ -52,7 +52,7 @@ def test_graphviz_svg_html(app, status, warning):
|
||||
|
||||
content = (app.outdir / 'index.html').text()
|
||||
|
||||
html = (r'<div class=\"figure align-center\" .*?>\n'
|
||||
html = (r'<div class=\"figure align-default\" .*?>\n'
|
||||
r'<div class="graphviz"><object data=\".*\.svg\".*>\n'
|
||||
r'\s*<p class=\"warning\">digraph foo {\n'
|
||||
r'bar -> baz\n'
|
||||
|
@ -1,129 +0,0 @@
|
||||
"""
|
||||
test_inheritance
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
Tests for :mod:`sphinx.ext.inheritance_diagram` module.
|
||||
|
||||
:copyright: Copyright 2015 by the Sphinx team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
from sphinx.ext.inheritance_diagram import InheritanceDiagram
|
||||
|
||||
|
||||
@pytest.mark.sphinx(buildername="html", testroot="inheritance")
|
||||
@pytest.mark.usefixtures('if_graphviz_found')
|
||||
def test_inheritance_diagram(app, status, warning):
|
||||
# monkey-patch InheritaceDiagram.run() so we can get access to its
|
||||
# results.
|
||||
orig_run = InheritanceDiagram.run
|
||||
graphs = {}
|
||||
|
||||
def new_run(self):
|
||||
result = orig_run(self)
|
||||
node = result[0]
|
||||
source = os.path.basename(node.document.current_source).replace(".rst", "")
|
||||
graphs[source] = node['graph']
|
||||
return result
|
||||
|
||||
InheritanceDiagram.run = new_run
|
||||
|
||||
try:
|
||||
app.builder.build_all()
|
||||
finally:
|
||||
InheritanceDiagram.run = orig_run
|
||||
|
||||
assert app.statuscode == 0
|
||||
|
||||
html_warnings = warning.getvalue()
|
||||
assert html_warnings == ""
|
||||
|
||||
# note: it is better to split these asserts into separate test functions
|
||||
# but I can't figure out how to build only a specific .rst file
|
||||
|
||||
# basic inheritance diagram showing all classes
|
||||
for cls in graphs['basic_diagram'].class_info:
|
||||
# use in b/c traversing order is different sometimes
|
||||
assert cls in [
|
||||
('dummy.test.A', 'dummy.test.A', [], None),
|
||||
('dummy.test.F', 'dummy.test.F', ['dummy.test.C'], None),
|
||||
('dummy.test.C', 'dummy.test.C', ['dummy.test.A'], None),
|
||||
('dummy.test.E', 'dummy.test.E', ['dummy.test.B'], None),
|
||||
('dummy.test.D', 'dummy.test.D', ['dummy.test.B', 'dummy.test.C'], None),
|
||||
('dummy.test.B', 'dummy.test.B', ['dummy.test.A'], None)
|
||||
]
|
||||
|
||||
# inheritance diagram using :parts: 1 option
|
||||
for cls in graphs['diagram_w_parts'].class_info:
|
||||
assert cls in [
|
||||
('A', 'dummy.test.A', [], None),
|
||||
('F', 'dummy.test.F', ['C'], None),
|
||||
('C', 'dummy.test.C', ['A'], None),
|
||||
('E', 'dummy.test.E', ['B'], None),
|
||||
('D', 'dummy.test.D', ['B', 'C'], None),
|
||||
('B', 'dummy.test.B', ['A'], None)
|
||||
]
|
||||
|
||||
# inheritance diagram with 1 top class
|
||||
# :top-classes: dummy.test.B
|
||||
# rendering should be
|
||||
# A
|
||||
# \
|
||||
# B C
|
||||
# / \ / \
|
||||
# E D F
|
||||
#
|
||||
for cls in graphs['diagram_w_1_top_class'].class_info:
|
||||
assert cls in [
|
||||
('dummy.test.A', 'dummy.test.A', [], None),
|
||||
('dummy.test.F', 'dummy.test.F', ['dummy.test.C'], None),
|
||||
('dummy.test.C', 'dummy.test.C', ['dummy.test.A'], None),
|
||||
('dummy.test.E', 'dummy.test.E', ['dummy.test.B'], None),
|
||||
('dummy.test.D', 'dummy.test.D', ['dummy.test.B', 'dummy.test.C'], None),
|
||||
('dummy.test.B', 'dummy.test.B', [], None)
|
||||
]
|
||||
|
||||
# inheritance diagram with 2 top classes
|
||||
# :top-classes: dummy.test.B, dummy.test.C
|
||||
# Note: we're specifying separate classes, not the entire module here
|
||||
# rendering should be
|
||||
#
|
||||
# B C
|
||||
# / \ / \
|
||||
# E D F
|
||||
#
|
||||
for cls in graphs['diagram_w_2_top_classes'].class_info:
|
||||
assert cls in [
|
||||
('dummy.test.F', 'dummy.test.F', ['dummy.test.C'], None),
|
||||
('dummy.test.C', 'dummy.test.C', [], None),
|
||||
('dummy.test.E', 'dummy.test.E', ['dummy.test.B'], None),
|
||||
('dummy.test.D', 'dummy.test.D', ['dummy.test.B', 'dummy.test.C'], None),
|
||||
('dummy.test.B', 'dummy.test.B', [], None)
|
||||
]
|
||||
|
||||
# inheritance diagram with 2 top classes and specifiying the entire module
|
||||
# rendering should be
|
||||
#
|
||||
# A
|
||||
# B C
|
||||
# / \ / \
|
||||
# E D F
|
||||
#
|
||||
# Note: dummy.test.A is included in the graph before its descendants are even processed
|
||||
# b/c we've specified to load the entire module. The way InheritanceGraph works it is very
|
||||
# hard to exclude parent classes once after they have been included in the graph.
|
||||
# If you'd like to not show class A in the graph don't specify the entire module.
|
||||
# this is a known issue.
|
||||
for cls in graphs['diagram_module_w_2_top_classes'].class_info:
|
||||
assert cls in [
|
||||
('dummy.test.F', 'dummy.test.F', ['dummy.test.C'], None),
|
||||
('dummy.test.C', 'dummy.test.C', [], None),
|
||||
('dummy.test.E', 'dummy.test.E', ['dummy.test.B'], None),
|
||||
('dummy.test.D', 'dummy.test.D', ['dummy.test.B', 'dummy.test.C'], None),
|
||||
('dummy.test.B', 'dummy.test.B', [], None),
|
||||
('dummy.test.A', 'dummy.test.A', [], None),
|
||||
]
|
@ -9,11 +9,128 @@
|
||||
"""
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from sphinx.ext.inheritance_diagram import InheritanceException, import_classes
|
||||
from sphinx.ext.inheritance_diagram import (
|
||||
InheritanceDiagram, InheritanceException, import_classes
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.sphinx(buildername="html", testroot="inheritance")
|
||||
@pytest.mark.usefixtures('if_graphviz_found')
|
||||
def test_inheritance_diagram(app, status, warning):
|
||||
# monkey-patch InheritaceDiagram.run() so we can get access to its
|
||||
# results.
|
||||
orig_run = InheritanceDiagram.run
|
||||
graphs = {}
|
||||
|
||||
def new_run(self):
|
||||
result = orig_run(self)
|
||||
node = result[0]
|
||||
source = os.path.basename(node.document.current_source).replace(".rst", "")
|
||||
graphs[source] = node['graph']
|
||||
return result
|
||||
|
||||
InheritanceDiagram.run = new_run
|
||||
|
||||
try:
|
||||
app.builder.build_all()
|
||||
finally:
|
||||
InheritanceDiagram.run = orig_run
|
||||
|
||||
assert app.statuscode == 0
|
||||
|
||||
html_warnings = warning.getvalue()
|
||||
assert html_warnings == ""
|
||||
|
||||
# note: it is better to split these asserts into separate test functions
|
||||
# but I can't figure out how to build only a specific .rst file
|
||||
|
||||
# basic inheritance diagram showing all classes
|
||||
for cls in graphs['basic_diagram'].class_info:
|
||||
# use in b/c traversing order is different sometimes
|
||||
assert cls in [
|
||||
('dummy.test.A', 'dummy.test.A', [], None),
|
||||
('dummy.test.F', 'dummy.test.F', ['dummy.test.C'], None),
|
||||
('dummy.test.C', 'dummy.test.C', ['dummy.test.A'], None),
|
||||
('dummy.test.E', 'dummy.test.E', ['dummy.test.B'], None),
|
||||
('dummy.test.D', 'dummy.test.D', ['dummy.test.B', 'dummy.test.C'], None),
|
||||
('dummy.test.B', 'dummy.test.B', ['dummy.test.A'], None)
|
||||
]
|
||||
|
||||
# inheritance diagram using :parts: 1 option
|
||||
for cls in graphs['diagram_w_parts'].class_info:
|
||||
assert cls in [
|
||||
('A', 'dummy.test.A', [], None),
|
||||
('F', 'dummy.test.F', ['C'], None),
|
||||
('C', 'dummy.test.C', ['A'], None),
|
||||
('E', 'dummy.test.E', ['B'], None),
|
||||
('D', 'dummy.test.D', ['B', 'C'], None),
|
||||
('B', 'dummy.test.B', ['A'], None)
|
||||
]
|
||||
|
||||
# inheritance diagram with 1 top class
|
||||
# :top-classes: dummy.test.B
|
||||
# rendering should be
|
||||
# A
|
||||
# \
|
||||
# B C
|
||||
# / \ / \
|
||||
# E D F
|
||||
#
|
||||
for cls in graphs['diagram_w_1_top_class'].class_info:
|
||||
assert cls in [
|
||||
('dummy.test.A', 'dummy.test.A', [], None),
|
||||
('dummy.test.F', 'dummy.test.F', ['dummy.test.C'], None),
|
||||
('dummy.test.C', 'dummy.test.C', ['dummy.test.A'], None),
|
||||
('dummy.test.E', 'dummy.test.E', ['dummy.test.B'], None),
|
||||
('dummy.test.D', 'dummy.test.D', ['dummy.test.B', 'dummy.test.C'], None),
|
||||
('dummy.test.B', 'dummy.test.B', [], None)
|
||||
]
|
||||
|
||||
# inheritance diagram with 2 top classes
|
||||
# :top-classes: dummy.test.B, dummy.test.C
|
||||
# Note: we're specifying separate classes, not the entire module here
|
||||
# rendering should be
|
||||
#
|
||||
# B C
|
||||
# / \ / \
|
||||
# E D F
|
||||
#
|
||||
for cls in graphs['diagram_w_2_top_classes'].class_info:
|
||||
assert cls in [
|
||||
('dummy.test.F', 'dummy.test.F', ['dummy.test.C'], None),
|
||||
('dummy.test.C', 'dummy.test.C', [], None),
|
||||
('dummy.test.E', 'dummy.test.E', ['dummy.test.B'], None),
|
||||
('dummy.test.D', 'dummy.test.D', ['dummy.test.B', 'dummy.test.C'], None),
|
||||
('dummy.test.B', 'dummy.test.B', [], None)
|
||||
]
|
||||
|
||||
# inheritance diagram with 2 top classes and specifiying the entire module
|
||||
# rendering should be
|
||||
#
|
||||
# A
|
||||
# B C
|
||||
# / \ / \
|
||||
# E D F
|
||||
#
|
||||
# Note: dummy.test.A is included in the graph before its descendants are even processed
|
||||
# b/c we've specified to load the entire module. The way InheritanceGraph works it is very
|
||||
# hard to exclude parent classes once after they have been included in the graph.
|
||||
# If you'd like to not show class A in the graph don't specify the entire module.
|
||||
# this is a known issue.
|
||||
for cls in graphs['diagram_module_w_2_top_classes'].class_info:
|
||||
assert cls in [
|
||||
('dummy.test.F', 'dummy.test.F', ['dummy.test.C'], None),
|
||||
('dummy.test.C', 'dummy.test.C', [], None),
|
||||
('dummy.test.E', 'dummy.test.E', ['dummy.test.B'], None),
|
||||
('dummy.test.D', 'dummy.test.D', ['dummy.test.B', 'dummy.test.C'], None),
|
||||
('dummy.test.B', 'dummy.test.B', [], None),
|
||||
('dummy.test.A', 'dummy.test.A', [], None),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.sphinx('html', testroot='ext-inheritance_diagram')
|
||||
@ -23,7 +140,7 @@ def test_inheritance_diagram_png_html(app, status, warning):
|
||||
|
||||
content = (app.outdir / 'index.html').text()
|
||||
|
||||
pattern = ('<div class="figure align-center" id="id1">\n'
|
||||
pattern = ('<div class="figure align-default" id="id1">\n'
|
||||
'<div class="graphviz">'
|
||||
'<img src="_images/inheritance-\\w+.png" alt="Inheritance diagram of test.Foo" '
|
||||
'class="inheritance graphviz" /></div>\n<p class="caption">'
|
||||
@ -40,7 +157,7 @@ def test_inheritance_diagram_svg_html(app, status, warning):
|
||||
|
||||
content = (app.outdir / 'index.html').text()
|
||||
|
||||
pattern = ('<div class="figure align-center" id="id1">\n'
|
||||
pattern = ('<div class="figure align-default" id="id1">\n'
|
||||
'<div class="graphviz">'
|
||||
'<object data="_images/inheritance-\\w+.svg" '
|
||||
'type="image/svg\\+xml" class="inheritance graphviz">\n'
|
||||
@ -80,7 +197,7 @@ def test_inheritance_diagram_latex_alias(app, status, warning):
|
||||
|
||||
content = (app.outdir / 'index.html').text()
|
||||
|
||||
pattern = ('<div class="figure align-center" id="id1">\n'
|
||||
pattern = ('<div class="figure align-default" id="id1">\n'
|
||||
'<div class="graphviz">'
|
||||
'<img src="_images/inheritance-\\w+.png" alt="Inheritance diagram of test.Foo" '
|
||||
'class="inheritance graphviz" /></div>\n<p class="caption">'
|
||||
@ -90,7 +207,7 @@ def test_inheritance_diagram_latex_alias(app, status, warning):
|
||||
|
||||
|
||||
def test_import_classes(rootdir):
|
||||
from sphinx.application import Sphinx, TemplateBridge
|
||||
from sphinx.parsers import Parser, RSTParser
|
||||
from sphinx.util.i18n import CatalogInfo
|
||||
|
||||
try:
|
||||
@ -120,16 +237,16 @@ def test_import_classes(rootdir):
|
||||
assert classes == []
|
||||
|
||||
# all of classes in the module
|
||||
classes = import_classes('sphinx.application', None)
|
||||
assert set(classes) == {Sphinx, TemplateBridge}
|
||||
classes = import_classes('sphinx.parsers', None)
|
||||
assert set(classes) == {Parser, RSTParser}
|
||||
|
||||
# specified class in the module
|
||||
classes = import_classes('sphinx.application.Sphinx', None)
|
||||
assert classes == [Sphinx]
|
||||
classes = import_classes('sphinx.parsers.Parser', None)
|
||||
assert classes == [Parser]
|
||||
|
||||
# specified class in current module
|
||||
classes = import_classes('Sphinx', 'sphinx.application')
|
||||
assert classes == [Sphinx]
|
||||
classes = import_classes('Parser', 'sphinx.parsers')
|
||||
assert classes == [Parser]
|
||||
|
||||
# relative module name to current module
|
||||
classes = import_classes('i18n.CatalogInfo', 'sphinx.util')
|
||||
|
@ -473,12 +473,21 @@ Raises:
|
||||
A setting wasn't specified, or was invalid.
|
||||
ValueError:
|
||||
Something something value error.
|
||||
:py:class:`AttributeError`
|
||||
errors for missing attributes.
|
||||
~InvalidDimensionsError
|
||||
If the dimensions couldn't be parsed.
|
||||
`InvalidArgumentsError`
|
||||
If the arguments are invalid.
|
||||
|
||||
""", """
|
||||
Example Function
|
||||
|
||||
:raises RuntimeError: A setting wasn't specified, or was invalid.
|
||||
:raises ValueError: Something something value error.
|
||||
:raises AttributeError: errors for missing attributes.
|
||||
:raises ~InvalidDimensionsError: If the dimensions couldn't be parsed.
|
||||
:raises InvalidArgumentsError: If the arguments are invalid.
|
||||
"""),
|
||||
################################
|
||||
("""
|
||||
|
@ -314,6 +314,21 @@ def test_decorators():
|
||||
'Foo.method': ('def', 13, 15)}
|
||||
|
||||
|
||||
def test_async_function_and_method():
|
||||
source = ('async def some_function():\n'
|
||||
' """docstring"""\n'
|
||||
' a = 1 + 1 #: comment1\n'
|
||||
'\n'
|
||||
'class Foo:\n'
|
||||
' async def method(self):\n'
|
||||
' pass\n')
|
||||
parser = Parser(source)
|
||||
parser.parse()
|
||||
assert parser.definitions == {'some_function': ('def', 1, 3),
|
||||
'Foo': ('class', 5, 7),
|
||||
'Foo.method': ('def', 6, 7)}
|
||||
|
||||
|
||||
def test_formfeed_char():
|
||||
source = ('class Foo:\n'
|
||||
'\f\n'
|
||||
|
@ -126,4 +126,4 @@ def test_theme_sidebars(app, status, warning):
|
||||
assert '<h3><a href="#">Table of Contents</a></h3>' in result
|
||||
assert '<h3>Related Topics</h3>' not in result
|
||||
assert '<h3>This Page</h3>' not in result
|
||||
assert '<h3>Quick search</h3>' in result
|
||||
assert '<h3 id="searchlabel">Quick search</h3>' in result
|
||||
|
@ -7,8 +7,12 @@
|
||||
:copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import _testcapi
|
||||
import datetime
|
||||
import functools
|
||||
import sys
|
||||
import types
|
||||
from textwrap import dedent
|
||||
|
||||
import pytest
|
||||
@ -383,19 +387,102 @@ def test_dict_customtype():
|
||||
assert "<CustomType(2)>: 2" in description
|
||||
|
||||
|
||||
def test_isstaticmethod():
|
||||
class Foo():
|
||||
@staticmethod
|
||||
def method1():
|
||||
@pytest.mark.sphinx(testroot='ext-autodoc')
|
||||
def test_isstaticmethod(app):
|
||||
from target.methods import Base, Inherited
|
||||
|
||||
assert inspect.isstaticmethod(Base.staticmeth, Base, 'staticmeth') is True
|
||||
assert inspect.isstaticmethod(Base.meth, Base, 'meth') is False
|
||||
assert inspect.isstaticmethod(Inherited.staticmeth, Inherited, 'staticmeth') is True
|
||||
assert inspect.isstaticmethod(Inherited.meth, Inherited, 'meth') is False
|
||||
|
||||
|
||||
@pytest.mark.sphinx(testroot='ext-autodoc')
|
||||
def test_iscoroutinefunction(app):
|
||||
from target.functions import coroutinefunc, func, partial_coroutinefunc
|
||||
from target.methods import Base
|
||||
|
||||
assert inspect.iscoroutinefunction(func) is False # function
|
||||
assert inspect.iscoroutinefunction(coroutinefunc) is True # coroutine
|
||||
assert inspect.iscoroutinefunction(partial_coroutinefunc) is True # partial-ed coroutine
|
||||
assert inspect.iscoroutinefunction(Base.meth) is False # method
|
||||
assert inspect.iscoroutinefunction(Base.coroutinemeth) is True # coroutine-method
|
||||
|
||||
# partial-ed coroutine-method
|
||||
partial_coroutinemeth = Base.__dict__['partial_coroutinemeth']
|
||||
assert inspect.iscoroutinefunction(partial_coroutinemeth) is True
|
||||
|
||||
|
||||
@pytest.mark.sphinx(testroot='ext-autodoc')
|
||||
def test_isfunction(app):
|
||||
from target.functions import builtin_func, partial_builtin_func
|
||||
from target.functions import func, partial_func
|
||||
from target.methods import Base
|
||||
|
||||
assert inspect.isfunction(func) is True # function
|
||||
assert inspect.isfunction(partial_func) is True # partial-ed function
|
||||
assert inspect.isfunction(Base.meth) is True # method of class
|
||||
assert inspect.isfunction(Base.partialmeth) is True # partial-ed method of class
|
||||
assert inspect.isfunction(Base().meth) is False # method of instance
|
||||
assert inspect.isfunction(builtin_func) is False # builtin function
|
||||
assert inspect.isfunction(partial_builtin_func) is False # partial-ed builtin function
|
||||
|
||||
|
||||
@pytest.mark.sphinx(testroot='ext-autodoc')
|
||||
def test_isbuiltin(app):
|
||||
from target.functions import builtin_func, partial_builtin_func
|
||||
from target.functions import func, partial_func
|
||||
from target.methods import Base
|
||||
|
||||
assert inspect.isbuiltin(builtin_func) is True # builtin function
|
||||
assert inspect.isbuiltin(partial_builtin_func) is True # partial-ed builtin function
|
||||
assert inspect.isbuiltin(func) is False # function
|
||||
assert inspect.isbuiltin(partial_func) is False # partial-ed function
|
||||
assert inspect.isbuiltin(Base.meth) is False # method of class
|
||||
assert inspect.isbuiltin(Base().meth) is False # method of instance
|
||||
|
||||
|
||||
@pytest.mark.sphinx(testroot='ext-autodoc')
|
||||
def test_isdescriptor(app):
|
||||
from target.functions import func
|
||||
from target.methods import Base
|
||||
|
||||
assert inspect.isdescriptor(Base.prop) is True # property of class
|
||||
assert inspect.isdescriptor(Base().prop) is False # property of instance
|
||||
assert inspect.isdescriptor(Base.meth) is True # method of class
|
||||
assert inspect.isdescriptor(Base().meth) is True # method of instance
|
||||
assert inspect.isdescriptor(func) is True # function
|
||||
|
||||
|
||||
@pytest.mark.sphinx(testroot='ext-autodoc')
|
||||
def test_isattributedescriptor(app):
|
||||
from target.methods import Base
|
||||
|
||||
class Descriptor:
|
||||
def __get__(self, obj, typ=None):
|
||||
pass
|
||||
|
||||
def method2(self):
|
||||
pass
|
||||
testinstancemethod = _testcapi.instancemethod(str.__repr__)
|
||||
|
||||
class Bar(Foo):
|
||||
pass
|
||||
assert inspect.isattributedescriptor(Base.prop) is True # property
|
||||
assert inspect.isattributedescriptor(Base.meth) is False # method
|
||||
assert inspect.isattributedescriptor(Base.staticmeth) is False # staticmethod
|
||||
assert inspect.isattributedescriptor(Base.classmeth) is False # classmetho
|
||||
assert inspect.isattributedescriptor(Descriptor) is False # custom descriptor class # NOQA
|
||||
assert inspect.isattributedescriptor(str.join) is False # MethodDescriptorType # NOQA
|
||||
assert inspect.isattributedescriptor(object.__init__) is False # WrapperDescriptorType # NOQA
|
||||
assert inspect.isattributedescriptor(dict.__dict__['fromkeys']) is False # ClassMethodDescriptorType # NOQA
|
||||
assert inspect.isattributedescriptor(types.FrameType.f_locals) is True # GetSetDescriptorType # NOQA
|
||||
assert inspect.isattributedescriptor(datetime.timedelta.days) is True # MemberDescriptorType # NOQA
|
||||
assert inspect.isattributedescriptor(testinstancemethod) is False # instancemethod (C-API) # NOQA
|
||||
|
||||
assert inspect.isstaticmethod(Foo.method1, Foo, 'method1') is True
|
||||
assert inspect.isstaticmethod(Foo.method2, Foo, 'method2') is False
|
||||
assert inspect.isstaticmethod(Bar.method1, Bar, 'method1') is True
|
||||
assert inspect.isstaticmethod(Bar.method2, Bar, 'method2') is False
|
||||
|
||||
def test_isproperty(app):
|
||||
from target.functions import func
|
||||
from target.methods import Base
|
||||
|
||||
assert inspect.isproperty(Base.prop) is True # property of class
|
||||
assert inspect.isproperty(Base().prop) is False # property of instance
|
||||
assert inspect.isproperty(Base.meth) is False # method of class
|
||||
assert inspect.isproperty(Base().meth) is False # method of instance
|
||||
assert inspect.isproperty(func) is False # function
|
||||
|
@ -9,8 +9,11 @@
|
||||
"""
|
||||
|
||||
from docutils.statemachine import StringList
|
||||
from jinja2 import Environment
|
||||
|
||||
from sphinx.util.rst import append_epilog, escape, prepend_prolog
|
||||
from sphinx.util.rst import (
|
||||
append_epilog, escape, heading, prepend_prolog, textwidth
|
||||
)
|
||||
|
||||
|
||||
def test_escape():
|
||||
@ -83,3 +86,34 @@ def test_prepend_prolog_without_CR(app):
|
||||
('<generated>', 0, ''),
|
||||
('dummy.rst', 0, 'hello Sphinx world'),
|
||||
('dummy.rst', 1, 'Sphinx is a document generator')]
|
||||
|
||||
|
||||
def test_textwidth():
|
||||
assert textwidth('Hello') == 5
|
||||
assert textwidth('русский язык') == 12
|
||||
assert textwidth('русский язык', 'WFA') == 23 # Cyrillic are ambiguous chars
|
||||
|
||||
|
||||
def test_heading():
|
||||
env = Environment()
|
||||
env.extend(language=None)
|
||||
|
||||
assert heading(env, 'Hello') == ('Hello\n'
|
||||
'=====')
|
||||
assert heading(env, 'Hello', 1) == ('Hello\n'
|
||||
'=====')
|
||||
assert heading(env, 'Hello', 2) == ('Hello\n'
|
||||
'-----')
|
||||
assert heading(env, 'Hello', 3) == ('Hello\n'
|
||||
'~~~~~')
|
||||
assert heading(env, 'русский язык', 1) == (
|
||||
'русский язык\n'
|
||||
'============'
|
||||
)
|
||||
|
||||
# language=ja: ambiguous
|
||||
env.language = 'ja'
|
||||
assert heading(env, 'русский язык', 1) == (
|
||||
'русский язык\n'
|
||||
'======================='
|
||||
)
|
||||
|
37
tests/test_util_template.py
Normal file
37
tests/test_util_template.py
Normal file
@ -0,0 +1,37 @@
|
||||
"""
|
||||
test_util_template
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Tests sphinx.util.template functions.
|
||||
|
||||
:copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from sphinx.util.template import ReSTRenderer
|
||||
|
||||
|
||||
def test_ReSTRenderer_escape():
|
||||
r = ReSTRenderer()
|
||||
template = '{{ "*hello*" | e }}'
|
||||
assert r.render_string(template, {}) == r'\*hello\*'
|
||||
|
||||
|
||||
def test_ReSTRenderer_heading():
|
||||
r = ReSTRenderer()
|
||||
|
||||
template = '{{ "hello" | heading }}'
|
||||
assert r.render_string(template, {}) == 'hello\n====='
|
||||
|
||||
template = '{{ "hello" | heading(1) }}'
|
||||
assert r.render_string(template, {}) == 'hello\n====='
|
||||
|
||||
template = '{{ "русский язык" | heading(2) }}'
|
||||
assert r.render_string(template, {}) == ('русский язык\n'
|
||||
'------------')
|
||||
|
||||
# language: ja
|
||||
r.env.language = 'ja'
|
||||
template = '{{ "русский язык" | heading }}'
|
||||
assert r.render_string(template, {}) == ('русский язык\n'
|
||||
'=======================')
|
14
tox.ini
14
tox.ini
@ -15,7 +15,6 @@ deps =
|
||||
du14: docutils==0.14
|
||||
extras =
|
||||
test
|
||||
websupport
|
||||
setenv =
|
||||
PYTHONWARNINGS = all,ignore::ImportWarning:pkgutil,ignore::ImportWarning:importlib._bootstrap,ignore::ImportWarning:importlib._bootstrap_external,ignore::ImportWarning:pytest_cov.plugin,ignore::DeprecationWarning:site,ignore::DeprecationWarning:_pytest.assertion.rewrite,ignore::DeprecationWarning:_pytest.fixtures,ignore::DeprecationWarning:distutils
|
||||
commands=
|
||||
@ -62,11 +61,20 @@ commands=
|
||||
basepython = python3
|
||||
description =
|
||||
Build documentation.
|
||||
deps =
|
||||
sphinxcontrib-websupport
|
||||
extras =
|
||||
docs
|
||||
commands =
|
||||
python setup.py build_sphinx {posargs}
|
||||
|
||||
[testenv:docslint]
|
||||
basepython = python3
|
||||
description =
|
||||
Lint documentation.
|
||||
extras =
|
||||
docs
|
||||
commands =
|
||||
python utils/doclinter.py CHANGES CONTRIBUTING.rst README.rst doc/
|
||||
|
||||
[testenv:bindep]
|
||||
description =
|
||||
Install binary dependencies.
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user