mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge branch 'master' into pdf_bookmarksdepth
This commit is contained in:
commit
1a62d89276
44
CHANGES
44
CHANGES
@ -30,6 +30,7 @@ Incompatible changes
|
|||||||
* html theme: Move a script tag for documentation_options.js in
|
* html theme: Move a script tag for documentation_options.js in
|
||||||
basic/layout.html to ``script_files`` variable
|
basic/layout.html to ``script_files`` variable
|
||||||
* html theme: Move CSS tags in basic/layout.html to ``css_files`` variable
|
* html theme: Move CSS tags in basic/layout.html to ``css_files`` variable
|
||||||
|
* #8915: html theme: Emit a warning for sphinx_rtd_theme-0.2.4 or older
|
||||||
* #8508: LaTeX: uplatex becomes a default setting of latex_engine for Japanese
|
* #8508: LaTeX: uplatex becomes a default setting of latex_engine for Japanese
|
||||||
documents
|
documents
|
||||||
* #5977: py domain: ``:var:``, ``:cvar:`` and ``:ivar:`` fields do not create
|
* #5977: py domain: ``:var:``, ``:cvar:`` and ``:ivar:`` fields do not create
|
||||||
@ -38,6 +39,9 @@ Incompatible changes
|
|||||||
``None`` by default instead of ``'default'``
|
``None`` by default instead of ``'default'``
|
||||||
* #8769: LaTeX refactoring: split sphinx.sty into multiple files and rename
|
* #8769: LaTeX refactoring: split sphinx.sty into multiple files and rename
|
||||||
some auxiliary files created in ``latex`` build output repertory
|
some auxiliary files created in ``latex`` build output repertory
|
||||||
|
* #8937: Use explicit title instead of <no title>
|
||||||
|
* #8487: The :file: option for csv-table directive now recognizes an absolute
|
||||||
|
path as a relative path from source directory
|
||||||
|
|
||||||
Deprecated
|
Deprecated
|
||||||
----------
|
----------
|
||||||
@ -47,6 +51,8 @@ Deprecated
|
|||||||
* ``sphinx.directives.patches.CSVTable``
|
* ``sphinx.directives.patches.CSVTable``
|
||||||
* ``sphinx.directives.patches.ListTable``
|
* ``sphinx.directives.patches.ListTable``
|
||||||
* ``sphinx.directives.patches.RSTTable``
|
* ``sphinx.directives.patches.RSTTable``
|
||||||
|
* ``sphinx.registry.SphinxComponentRegistry.get_source_input()``
|
||||||
|
* ``sphinx.registry.SphinxComponentRegistry.source_inputs``
|
||||||
* ``sphinx.transforms.FigureAligner``
|
* ``sphinx.transforms.FigureAligner``
|
||||||
* ``sphinx.util.pycompat.convert_with_2to3()``
|
* ``sphinx.util.pycompat.convert_with_2to3()``
|
||||||
* ``sphinx.util.pycompat.execfile_()``
|
* ``sphinx.util.pycompat.execfile_()``
|
||||||
@ -55,20 +61,34 @@ Deprecated
|
|||||||
Features added
|
Features added
|
||||||
--------------
|
--------------
|
||||||
|
|
||||||
|
* #8924: autodoc: Support ``bound`` argument for TypeVar
|
||||||
|
* #7549: autosummary: Enable :confval:`autosummary_generate` by default
|
||||||
* #4826: py domain: Add ``:canonical:`` option to python directives to describe
|
* #4826: py domain: Add ``:canonical:`` option to python directives to describe
|
||||||
the location where the object is defined
|
the location where the object is defined
|
||||||
|
* #7199: py domain: Add :confval:`python_use_unqualified_type_names` to suppress
|
||||||
|
the module name of the python reference if it can be resolved (experimental)
|
||||||
* #7784: i18n: The alt text for image is translated by default (without
|
* #7784: i18n: The alt text for image is translated by default (without
|
||||||
:confval:`gettext_additional_targets` setting)
|
:confval:`gettext_additional_targets` setting)
|
||||||
* #2018: html: :confval:`html_favicon` and :confval:`html_logo` now accept URL
|
* #2018: html: :confval:`html_favicon` and :confval:`html_logo` now accept URL
|
||||||
for the image
|
for the image
|
||||||
* #8070: html search: Support searching for 2characters word
|
* #8070: html search: Support searching for 2characters word
|
||||||
|
* #8938: imgconverter: Show the error of the command availability check
|
||||||
* #7830: Add debug logs for change detection of sources and templates
|
* #7830: Add debug logs for change detection of sources and templates
|
||||||
* #8201: Emit a warning if toctree contains duplicated entries
|
* #8201: Emit a warning if toctree contains duplicated entries
|
||||||
|
* #8326: ``master_doc`` is now renamed to :confval:`root_doc`
|
||||||
|
* #8942: C++, add support for the C++20 spaceship operator, ``<=>``.
|
||||||
|
* #7199: A new node, ``sphinx.addnodes.pending_xref_condition`` has been added.
|
||||||
|
It can be used to choose appropriate content of the reference by conditions.
|
||||||
|
|
||||||
Bugs fixed
|
Bugs fixed
|
||||||
----------
|
----------
|
||||||
|
|
||||||
|
* #8917: autodoc: Raises a warning if function has wrong __globals__ value
|
||||||
|
* #8415: autodoc: a TypeVar imported from other module is not resolved (in
|
||||||
|
Python 3.7 or above)
|
||||||
|
* #8905: html: html_add_permalinks=None and html_add_permalinks="" are ignored
|
||||||
* #8380: html search: Paragraphs in search results are not identified as ``<p>``
|
* #8380: html search: Paragraphs in search results are not identified as ``<p>``
|
||||||
|
* #8915: html theme: The translation of sphinx_rtd_theme does not work
|
||||||
* #8342: Emit a warning if a unknown domain is given for directive or role (ex.
|
* #8342: Emit a warning if a unknown domain is given for directive or role (ex.
|
||||||
``:unknown:doc:``)
|
``:unknown:doc:``)
|
||||||
* #8711: LaTeX: backticks in code-blocks trigger latexpdf build warning (and font
|
* #8711: LaTeX: backticks in code-blocks trigger latexpdf build warning (and font
|
||||||
@ -76,11 +96,20 @@ Bugs fixed
|
|||||||
* #8253: LaTeX: Figures with no size defined get overscaled (compared to images
|
* #8253: LaTeX: Figures with no size defined get overscaled (compared to images
|
||||||
with size explicitly set in pixels) (fixed for ``'pdflatex'/'lualatex'`` only)
|
with size explicitly set in pixels) (fixed for ``'pdflatex'/'lualatex'`` only)
|
||||||
* #8881: LaTeX: The depth of bookmarks panel in PDF is not enough for navigation
|
* #8881: LaTeX: The depth of bookmarks panel in PDF is not enough for navigation
|
||||||
|
* #8925: LaTeX: 3.5.0 ``verbatimmaxunderfull`` setting does not work as
|
||||||
|
expected
|
||||||
|
* #8911: C++: remove the longest matching prefix in
|
||||||
|
:confval:`cpp_index_common_prefix` instead of the first that matches.
|
||||||
|
* C, properly reject function declarations when a keyword is used
|
||||||
|
as parameter name.
|
||||||
|
* #8933: viewcode: Failed to create back-links on parallel build
|
||||||
|
* #8960: C and C++, fix rendering of (member) function pointer types in
|
||||||
|
function parameter lists.
|
||||||
|
|
||||||
Testing
|
Testing
|
||||||
--------
|
--------
|
||||||
|
|
||||||
Release 3.5.2 (in development)
|
Release 3.5.3 (in development)
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
Dependencies
|
Dependencies
|
||||||
@ -101,6 +130,16 @@ Bugs fixed
|
|||||||
Testing
|
Testing
|
||||||
--------
|
--------
|
||||||
|
|
||||||
|
Release 3.5.2 (released Mar 06, 2021)
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
Bugs fixed
|
||||||
|
----------
|
||||||
|
|
||||||
|
* #8943: i18n: Crashed by broken translation messages in ES, EL and HR
|
||||||
|
* #8936: LaTeX: A custom LaTeX builder fails with unknown node error
|
||||||
|
* #8952: Exceptions raised in a Directive cause parallel builds to hang
|
||||||
|
|
||||||
Release 3.5.1 (released Feb 16, 2021)
|
Release 3.5.1 (released Feb 16, 2021)
|
||||||
=====================================
|
=====================================
|
||||||
|
|
||||||
@ -164,6 +203,9 @@ Features added
|
|||||||
* #8775: autodoc: Support type union operator (PEP-604) in Python 3.10 or above
|
* #8775: autodoc: Support type union operator (PEP-604) in Python 3.10 or above
|
||||||
* #8297: autodoc: Allow to extend :confval:`autodoc_default_options` via
|
* #8297: autodoc: Allow to extend :confval:`autodoc_default_options` via
|
||||||
directive options
|
directive options
|
||||||
|
* #759: autodoc: Add a new configuration :confval:`autodoc_preserve_defaults` as
|
||||||
|
an experimental feature. It preserves the default argument values of
|
||||||
|
functions in source code and keep them not evaluated for readability.
|
||||||
* #8619: html: kbd role generates customizable HTML tags for compound keys
|
* #8619: html: kbd role generates customizable HTML tags for compound keys
|
||||||
* #8634: html: Allow to change the order of JS/CSS via ``priority`` parameter
|
* #8634: html: Allow to change the order of JS/CSS via ``priority`` parameter
|
||||||
for :meth:`Sphinx.add_js_file()` and :meth:`Sphinx.add_css_file()`
|
for :meth:`Sphinx.add_js_file()` and :meth:`Sphinx.add_css_file()`
|
||||||
|
@ -15,6 +15,7 @@ include sphinx-quickstart.py
|
|||||||
include sphinx-apidoc.py
|
include sphinx-apidoc.py
|
||||||
include tox.ini
|
include tox.ini
|
||||||
include sphinx/locale/.tx/config
|
include sphinx/locale/.tx/config
|
||||||
|
include sphinx/py.typed
|
||||||
|
|
||||||
recursive-include sphinx/templates *
|
recursive-include sphinx/templates *
|
||||||
recursive-include sphinx/texinputs *
|
recursive-include sphinx/texinputs *
|
||||||
|
8
doc/_static/conf.py.txt
vendored
8
doc/_static/conf.py.txt
vendored
@ -43,7 +43,7 @@ source_suffix = '.rst'
|
|||||||
# source_encoding = 'utf-8-sig'
|
# source_encoding = 'utf-8-sig'
|
||||||
|
|
||||||
# The master toctree document.
|
# The master toctree document.
|
||||||
master_doc = 'index'
|
root_doc = 'index'
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u'test'
|
project = u'test'
|
||||||
@ -252,7 +252,7 @@ latex_elements = {
|
|||||||
# (source start file, target name, title,
|
# (source start file, target name, title,
|
||||||
# author, documentclass [howto, manual, or own class]).
|
# author, documentclass [howto, manual, or own class]).
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
(master_doc, 'test.tex', u'test Documentation',
|
(root_doc, 'test.tex', u'test Documentation',
|
||||||
u'test', 'manual'),
|
u'test', 'manual'),
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -283,7 +283,7 @@ latex_documents = [
|
|||||||
# One entry per manual page. List of tuples
|
# One entry per manual page. List of tuples
|
||||||
# (source start file, name, description, authors, manual section).
|
# (source start file, name, description, authors, manual section).
|
||||||
man_pages = [
|
man_pages = [
|
||||||
(master_doc, 'test', u'test Documentation',
|
(root_doc, 'test', u'test Documentation',
|
||||||
[author], 1)
|
[author], 1)
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -298,7 +298,7 @@ man_pages = [
|
|||||||
# (source start file, target name, title, author,
|
# (source start file, target name, title, author,
|
||||||
# dir menu entry, description, category)
|
# dir menu entry, description, category)
|
||||||
texinfo_documents = [
|
texinfo_documents = [
|
||||||
(master_doc, 'test', u'test Documentation',
|
(root_doc, 'test', u'test Documentation',
|
||||||
author, 'test', 'One line description of project.',
|
author, 'test', 'One line description of project.',
|
||||||
'Miscellaneous'),
|
'Miscellaneous'),
|
||||||
]
|
]
|
||||||
|
@ -9,7 +9,7 @@ extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo',
|
|||||||
'sphinx.ext.intersphinx',
|
'sphinx.ext.intersphinx',
|
||||||
'sphinx.ext.viewcode', 'sphinx.ext.inheritance_diagram']
|
'sphinx.ext.viewcode', 'sphinx.ext.inheritance_diagram']
|
||||||
|
|
||||||
master_doc = 'contents'
|
root_doc = 'contents'
|
||||||
templates_path = ['_templates']
|
templates_path = ['_templates']
|
||||||
exclude_patterns = ['_build']
|
exclude_patterns = ['_build']
|
||||||
|
|
||||||
@ -78,6 +78,7 @@ latex_show_urls = 'footnote'
|
|||||||
latex_use_xindy = True
|
latex_use_xindy = True
|
||||||
|
|
||||||
autodoc_member_order = 'groupwise'
|
autodoc_member_order = 'groupwise'
|
||||||
|
autosummary_generate = False
|
||||||
todo_include_todos = True
|
todo_include_todos = True
|
||||||
extlinks = {'duref': ('http://docutils.sourceforge.net/docs/ref/rst/'
|
extlinks = {'duref': ('http://docutils.sourceforge.net/docs/ref/rst/'
|
||||||
'restructuredtext.html#%s', ''),
|
'restructuredtext.html#%s', ''),
|
||||||
|
142
doc/development/tutorials/autodoc_ext.rst
Normal file
142
doc/development/tutorials/autodoc_ext.rst
Normal file
@ -0,0 +1,142 @@
|
|||||||
|
.. _autodoc_ext_tutorial:
|
||||||
|
|
||||||
|
Developing autodoc extension for IntEnum
|
||||||
|
========================================
|
||||||
|
|
||||||
|
The objective of this tutorial is to create an extension that adds
|
||||||
|
support for new type for autodoc. This autodoc extension will format
|
||||||
|
the ``IntEnum`` class from Python standard library. (module ``enum``)
|
||||||
|
|
||||||
|
Overview
|
||||||
|
--------
|
||||||
|
|
||||||
|
We want the extension that will create auto-documentation for IntEnum.
|
||||||
|
``IntEnum`` is the integer enum class from standard library ``enum`` module.
|
||||||
|
|
||||||
|
Currently this class has no special auto documentation behavior.
|
||||||
|
|
||||||
|
We want to add following to autodoc:
|
||||||
|
|
||||||
|
* A new ``autointenum`` directive that will document the ``IntEnum`` class.
|
||||||
|
* The generated documentation will have all the enum possible values
|
||||||
|
with names.
|
||||||
|
* The ``autointenum`` directive will have an option ``:hex:`` which will
|
||||||
|
cause the integers be printed in hexadecimal form.
|
||||||
|
|
||||||
|
|
||||||
|
Prerequisites
|
||||||
|
-------------
|
||||||
|
|
||||||
|
We need the same setup as in :doc:`the previous extensions <todo>`. This time,
|
||||||
|
we will be putting out extension in a file called :file:`autodoc_intenum.py`.
|
||||||
|
The :file:`my_enums.py` will contain the sample enums we will document.
|
||||||
|
|
||||||
|
Here is an example of the folder structure you might obtain:
|
||||||
|
|
||||||
|
.. code-block:: text
|
||||||
|
|
||||||
|
└── source
|
||||||
|
├── _ext
|
||||||
|
│ └── autodoc_intenum.py
|
||||||
|
├── conf.py
|
||||||
|
├── index.rst
|
||||||
|
└── my_enums.py
|
||||||
|
|
||||||
|
|
||||||
|
Writing the extension
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
Start with ``setup`` function for the extension.
|
||||||
|
|
||||||
|
.. literalinclude:: examples/autodoc_intenum.py
|
||||||
|
:language: python
|
||||||
|
:linenos:
|
||||||
|
:pyobject: setup
|
||||||
|
|
||||||
|
|
||||||
|
The :meth:`~Sphinx.setup_extension` method will pull the autodoc extension
|
||||||
|
because our new extension depends on autodoc. :meth:`~Sphinx.add_autodocumenter`
|
||||||
|
is the method that registers our new auto documenter class.
|
||||||
|
|
||||||
|
We want to import certain objects from the autodoc extension:
|
||||||
|
|
||||||
|
.. literalinclude:: examples/autodoc_intenum.py
|
||||||
|
:language: python
|
||||||
|
:linenos:
|
||||||
|
:lines: 1-7
|
||||||
|
|
||||||
|
|
||||||
|
There are several different documenter classes such as ``MethodDocumenter``
|
||||||
|
or ``AttributeDocumenter`` available in the autodoc extension but
|
||||||
|
our new class is the subclass of ``ClassDocumenter`` which a
|
||||||
|
documenter class used by autodoc to document classes.
|
||||||
|
|
||||||
|
This is the definition of our new the auto-documenter class:
|
||||||
|
|
||||||
|
.. literalinclude:: examples/autodoc_intenum.py
|
||||||
|
:language: python
|
||||||
|
:linenos:
|
||||||
|
:pyobject: IntEnumDocumenter
|
||||||
|
|
||||||
|
|
||||||
|
Important attributes of the new class:
|
||||||
|
|
||||||
|
**objtype**
|
||||||
|
This attribute determines the ``auto`` directive name. In
|
||||||
|
this case the auto directive will be ``autointenum``.
|
||||||
|
|
||||||
|
**directivetype**
|
||||||
|
This attribute sets the generated directive name. In
|
||||||
|
this example the generated directive will be ``.. :py:class::``.
|
||||||
|
|
||||||
|
**priority**
|
||||||
|
the larger the number the higher is the priority. We want our
|
||||||
|
documenter be higher priority than the parent.
|
||||||
|
|
||||||
|
**option_spec**
|
||||||
|
option specifications. We copy the parent class options and
|
||||||
|
add a new option *hex*.
|
||||||
|
|
||||||
|
|
||||||
|
Overridden members:
|
||||||
|
|
||||||
|
**can_document_member**
|
||||||
|
This member is important to override. It should
|
||||||
|
return *True* when the passed object can be documented by this class.
|
||||||
|
|
||||||
|
**add_directive_header**
|
||||||
|
This method generates the directive header. We add
|
||||||
|
**:final:** directive option. Remember to call **super** or no directive
|
||||||
|
will be generated.
|
||||||
|
|
||||||
|
**add_content**
|
||||||
|
This method generates the body of the class documentation.
|
||||||
|
After calling the super method we generate lines for enum description.
|
||||||
|
|
||||||
|
|
||||||
|
Using the extension
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
You can now use the new autodoc directive to document any ``IntEnum``.
|
||||||
|
|
||||||
|
For example, you have the following ``IntEnum``:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
:caption: my_enums.py
|
||||||
|
|
||||||
|
class Colors(IntEnum):
|
||||||
|
"""Colors enumerator"""
|
||||||
|
NONE = 0
|
||||||
|
RED = 1
|
||||||
|
GREEN = 2
|
||||||
|
BLUE = 3
|
||||||
|
|
||||||
|
|
||||||
|
This will be the documentation file with auto-documentation directive:
|
||||||
|
|
||||||
|
.. code-block:: rst
|
||||||
|
:caption: index.rst
|
||||||
|
|
||||||
|
.. autointenum:: my_enums.Colors
|
||||||
|
|
||||||
|
|
52
doc/development/tutorials/examples/autodoc_intenum.py
Normal file
52
doc/development/tutorials/examples/autodoc_intenum.py
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
from enum import IntEnum
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
from docutils.statemachine import StringList
|
||||||
|
|
||||||
|
from sphinx.application import Sphinx
|
||||||
|
from sphinx.ext.autodoc import ClassDocumenter, bool_option
|
||||||
|
|
||||||
|
|
||||||
|
class IntEnumDocumenter(ClassDocumenter):
|
||||||
|
objtype = 'intenum'
|
||||||
|
directivetype = 'class'
|
||||||
|
priority = 10 + ClassDocumenter.priority
|
||||||
|
option_spec = dict(ClassDocumenter.option_spec)
|
||||||
|
option_spec['hex'] = bool_option
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def can_document_member(cls,
|
||||||
|
member: Any, membername: str,
|
||||||
|
isattr: bool, parent: Any) -> bool:
|
||||||
|
return isinstance(member, IntEnum)
|
||||||
|
|
||||||
|
def add_directive_header(self, sig: str) -> None:
|
||||||
|
super().add_directive_header(sig)
|
||||||
|
self.add_line(' :final:', self.get_sourcename())
|
||||||
|
|
||||||
|
def add_content(self,
|
||||||
|
more_content: Optional[StringList],
|
||||||
|
no_docstring: bool = False
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
super().add_content(more_content, no_docstring)
|
||||||
|
|
||||||
|
source_name = self.get_sourcename()
|
||||||
|
enum_object: IntEnum = self.object
|
||||||
|
use_hex = self.options.hex
|
||||||
|
self.add_line('', source_name)
|
||||||
|
|
||||||
|
for enum_value in enum_object:
|
||||||
|
the_value_name = enum_value.name
|
||||||
|
the_value_value = enum_value.value
|
||||||
|
if use_hex:
|
||||||
|
the_value_value = hex(the_value_value)
|
||||||
|
|
||||||
|
self.add_line(
|
||||||
|
f"**{the_value_name}**: {the_value_value}", source_name)
|
||||||
|
self.add_line('', source_name)
|
||||||
|
|
||||||
|
|
||||||
|
def setup(app: Sphinx) -> None:
|
||||||
|
app.setup_extension('sphinx.ext.autodoc') # Require autodoc extension
|
||||||
|
app.add_autodocumenter(IntEnumDocumenter)
|
@ -13,3 +13,5 @@ Refer to the following tutorials to get started with extension development.
|
|||||||
helloworld
|
helloworld
|
||||||
todo
|
todo
|
||||||
recipe
|
recipe
|
||||||
|
autodoc_ext
|
||||||
|
|
||||||
|
@ -32,11 +32,6 @@ The following is a list of deprecated interfaces.
|
|||||||
- TBD
|
- TBD
|
||||||
- ``logo_url``
|
- ``logo_url``
|
||||||
|
|
||||||
* - ``sphinx.directives.patches.CSVTable``
|
|
||||||
- 4.0
|
|
||||||
- 6.0
|
|
||||||
- ``docutils.parsers.rst.diretives.tables.CSVTable``
|
|
||||||
|
|
||||||
* - ``sphinx.directives.patches.ListTable``
|
* - ``sphinx.directives.patches.ListTable``
|
||||||
- 4.0
|
- 4.0
|
||||||
- 6.0
|
- 6.0
|
||||||
@ -47,6 +42,16 @@ The following is a list of deprecated interfaces.
|
|||||||
- 6.0
|
- 6.0
|
||||||
- ``docutils.parsers.rst.diretives.tables.RSTTable``
|
- ``docutils.parsers.rst.diretives.tables.RSTTable``
|
||||||
|
|
||||||
|
* - ``sphinx.registry.SphinxComponentRegistry.get_source_input()``
|
||||||
|
- 4.0
|
||||||
|
- 6.0
|
||||||
|
- N/A
|
||||||
|
|
||||||
|
* - ``sphinx.registry.SphinxComponentRegistry.source_inputs``
|
||||||
|
- 4.0
|
||||||
|
- 6.0
|
||||||
|
- N/A
|
||||||
|
|
||||||
* - ``sphinx.transforms.FigureAligner``
|
* - ``sphinx.transforms.FigureAligner``
|
||||||
- 4.0
|
- 4.0
|
||||||
- 6.0
|
- 6.0
|
||||||
|
@ -37,6 +37,7 @@ New inline nodes
|
|||||||
|
|
||||||
.. autoclass:: index
|
.. autoclass:: index
|
||||||
.. autoclass:: pending_xref
|
.. autoclass:: pending_xref
|
||||||
|
.. autoclass:: pending_xref_condition
|
||||||
.. autoclass:: literal_emphasis
|
.. autoclass:: literal_emphasis
|
||||||
.. autoclass:: download_reference
|
.. autoclass:: download_reference
|
||||||
|
|
||||||
|
@ -145,7 +145,7 @@ These options are used when :option:`--full` is specified:
|
|||||||
* ``module.rst_t``
|
* ``module.rst_t``
|
||||||
* ``package.rst_t``
|
* ``package.rst_t``
|
||||||
* ``toc.rst_t``
|
* ``toc.rst_t``
|
||||||
* ``master_doc.rst_t``
|
* ``root_doc.rst_t``
|
||||||
* ``conf.py_t``
|
* ``conf.py_t``
|
||||||
* ``Makefile_t``
|
* ``Makefile_t``
|
||||||
* ``Makefile.new_t``
|
* ``Makefile.new_t``
|
||||||
|
@ -72,7 +72,7 @@ Options
|
|||||||
|
|
||||||
.. option:: --master=MASTER
|
.. option:: --master=MASTER
|
||||||
|
|
||||||
Master document name. (see :confval:`master_doc`).
|
Master document name. (see :confval:`root_doc`).
|
||||||
|
|
||||||
.. rubric:: Extension Options
|
.. rubric:: Extension Options
|
||||||
|
|
||||||
@ -149,7 +149,7 @@ Options
|
|||||||
sphinx project files generated by quickstart. Following Jinja2 template
|
sphinx project files generated by quickstart. Following Jinja2 template
|
||||||
files are allowed:
|
files are allowed:
|
||||||
|
|
||||||
* ``master_doc.rst_t``
|
* ``root_doc.rst_t``
|
||||||
* ``conf.py_t``
|
* ``conf.py_t``
|
||||||
* ``Makefile_t``
|
* ``Makefile_t``
|
||||||
* ``Makefile.new_t``
|
* ``Makefile.new_t``
|
||||||
|
@ -325,7 +325,19 @@ in the future.
|
|||||||
|
|
||||||
.. data:: master_doc
|
.. data:: master_doc
|
||||||
|
|
||||||
The value of :confval:`master_doc`, for usage with :func:`pathto`.
|
Same as :data:`root_doc`.
|
||||||
|
|
||||||
|
.. versionchanged:: 4.0
|
||||||
|
|
||||||
|
Renamed to ``root_doc``.
|
||||||
|
|
||||||
|
.. data:: root_doc
|
||||||
|
|
||||||
|
The value of :confval:`root_doc`, for usage with :func:`pathto`.
|
||||||
|
|
||||||
|
.. versionchanged:: 4.0
|
||||||
|
|
||||||
|
Renamed from ``master_doc``.
|
||||||
|
|
||||||
.. data:: pagename
|
.. data:: pagename
|
||||||
|
|
||||||
|
@ -183,11 +183,20 @@ General configuration
|
|||||||
|
|
||||||
.. confval:: master_doc
|
.. confval:: master_doc
|
||||||
|
|
||||||
The document name of the "master" document, that is, the document that
|
Same as :confval:`root_doc`.
|
||||||
|
|
||||||
|
.. versionchanged:: 4.0
|
||||||
|
Renamed ``master_doc`` to ``master_doc``.
|
||||||
|
|
||||||
|
.. confval:: root_doc
|
||||||
|
|
||||||
|
The document name of the "root" document, that is, the document that
|
||||||
contains the root :rst:dir:`toctree` directive. Default is ``'index'``.
|
contains the root :rst:dir:`toctree` directive. Default is ``'index'``.
|
||||||
|
|
||||||
.. versionchanged:: 2.0
|
.. versionchanged:: 2.0
|
||||||
The default is changed to ``'index'`` from ``'contents'``.
|
The default is changed to ``'index'`` from ``'contents'``.
|
||||||
|
.. versionchanged:: 4.0
|
||||||
|
Renamed ``master_doc`` from ``master_doc``.
|
||||||
|
|
||||||
.. confval:: exclude_patterns
|
.. confval:: exclude_patterns
|
||||||
|
|
||||||
@ -2023,8 +2032,8 @@ These options influence LaTeX output.
|
|||||||
*startdocname*
|
*startdocname*
|
||||||
String that specifies the :term:`document name` of the LaTeX file's master
|
String that specifies the :term:`document name` of the LaTeX file's master
|
||||||
document. All documents referenced by the *startdoc* document in TOC trees
|
document. All documents referenced by the *startdoc* document in TOC trees
|
||||||
will be included in the LaTeX file. (If you want to use the default master
|
will be included in the LaTeX file. (If you want to use the default root
|
||||||
document for your LaTeX build, provide your :confval:`master_doc` here.)
|
document for your LaTeX build, provide your :confval:`root_doc` here.)
|
||||||
|
|
||||||
*targetname*
|
*targetname*
|
||||||
File name of the LaTeX file in the output directory.
|
File name of the LaTeX file in the output directory.
|
||||||
@ -2293,7 +2302,7 @@ These options influence manual page output.
|
|||||||
String that specifies the :term:`document name` of the manual page's master
|
String that specifies the :term:`document name` of the manual page's master
|
||||||
document. All documents referenced by the *startdoc* document in TOC trees
|
document. All documents referenced by the *startdoc* document in TOC trees
|
||||||
will be included in the manual file. (If you want to use the default
|
will be included in the manual file. (If you want to use the default
|
||||||
master document for your manual pages build, use your :confval:`master_doc`
|
root document for your manual pages build, use your :confval:`root_doc`
|
||||||
here.)
|
here.)
|
||||||
|
|
||||||
*name*
|
*name*
|
||||||
@ -2349,7 +2358,7 @@ These options influence Texinfo output.
|
|||||||
master document. All documents referenced by the *startdoc* document in
|
master document. All documents referenced by the *startdoc* document in
|
||||||
TOC trees will be included in the Texinfo file. (If you want to use the
|
TOC trees will be included in the Texinfo file. (If you want to use the
|
||||||
default master document for your Texinfo build, provide your
|
default master document for your Texinfo build, provide your
|
||||||
:confval:`master_doc` here.)
|
:confval:`root_doc` here.)
|
||||||
|
|
||||||
*targetname*
|
*targetname*
|
||||||
File name (no extension) of the Texinfo file in the output directory.
|
File name (no extension) of the Texinfo file in the output directory.
|
||||||
@ -2705,6 +2714,17 @@ Options for the C++ domain
|
|||||||
|
|
||||||
.. versionadded:: 1.5
|
.. versionadded:: 1.5
|
||||||
|
|
||||||
|
Options for the Python domain
|
||||||
|
-----------------------------
|
||||||
|
|
||||||
|
.. confval:: python_use_unqualified_type_names
|
||||||
|
|
||||||
|
If true, suppress the module name of the python reference if it can be
|
||||||
|
resolved. The default is ``False``.
|
||||||
|
|
||||||
|
.. versionadded:: 4.0
|
||||||
|
|
||||||
|
.. note:: This configuration is still in experimental
|
||||||
|
|
||||||
Example of configuration file
|
Example of configuration file
|
||||||
=============================
|
=============================
|
||||||
|
@ -89,10 +89,15 @@ inserting them into the page source under a suitable :rst:dir:`py:module`,
|
|||||||
|
|
||||||
Boil the noodle *time* minutes.
|
Boil the noodle *time* minutes.
|
||||||
|
|
||||||
**Options and advanced usage**
|
.. rubric:: Options
|
||||||
|
|
||||||
* If you want to automatically document members, there's a ``members``
|
.. rst:directive:option:: members
|
||||||
option::
|
:type: no value or comma separated list
|
||||||
|
|
||||||
|
If set, autodoc will generate document for the members of the target
|
||||||
|
module, class or exception.
|
||||||
|
|
||||||
|
For example::
|
||||||
|
|
||||||
.. automodule:: noodle
|
.. automodule:: noodle
|
||||||
:members:
|
:members:
|
||||||
@ -102,8 +107,11 @@ inserting them into the page source under a suitable :rst:dir:`py:module`,
|
|||||||
.. autoclass:: Noodle
|
.. autoclass:: Noodle
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
will document all non-private member functions and properties (that is,
|
will document all class member methods and properties.
|
||||||
those whose name doesn't start with ``_``).
|
|
||||||
|
By default, autodoc will not generate document for the members that are
|
||||||
|
private, not having docstrings, inherited from super class, or special
|
||||||
|
members.
|
||||||
|
|
||||||
For modules, ``__all__`` will be respected when looking for members unless
|
For modules, ``__all__`` will be respected when looking for members unless
|
||||||
you give the ``ignore-module-all`` flag option. Without
|
you give the ``ignore-module-all`` flag option. Without
|
||||||
@ -116,6 +124,61 @@ inserting them into the page source under a suitable :rst:dir:`py:module`,
|
|||||||
.. autoclass:: Noodle
|
.. autoclass:: Noodle
|
||||||
:members: eat, slurp
|
:members: eat, slurp
|
||||||
|
|
||||||
|
.. rst:directive:option:: undoc-members
|
||||||
|
:type: no value
|
||||||
|
|
||||||
|
If set, autodoc will also generate document for the members not having
|
||||||
|
docstrings::
|
||||||
|
|
||||||
|
.. automodule:: noodle
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
||||||
|
|
||||||
|
.. rst:directive:option:: private-members
|
||||||
|
:type: no value or comma separated list
|
||||||
|
|
||||||
|
If set, autodoc will also generate document for the private members
|
||||||
|
(that is, those named like ``_private`` or ``__private``)::
|
||||||
|
|
||||||
|
.. automodule:: noodle
|
||||||
|
:members:
|
||||||
|
:private-members:
|
||||||
|
|
||||||
|
It can also take an explicit list of member names to be documented as
|
||||||
|
arguments::
|
||||||
|
|
||||||
|
.. automodule:: noodle
|
||||||
|
:members:
|
||||||
|
:private-members: _spicy, _garlickly
|
||||||
|
|
||||||
|
.. versionadded:: 1.1
|
||||||
|
.. versionchanged:: 3.2
|
||||||
|
The option can now take arguments.
|
||||||
|
|
||||||
|
.. rst:directive:option:: special-members
|
||||||
|
:type: no value or comma separated list
|
||||||
|
|
||||||
|
If set, autodoc will also generate document for the special members
|
||||||
|
(that is, those named like ``__special__``)::
|
||||||
|
|
||||||
|
.. autoclass:: my.Class
|
||||||
|
:members:
|
||||||
|
:special-members:
|
||||||
|
|
||||||
|
It can also take an explicit list of member names to be documented as
|
||||||
|
arguments::
|
||||||
|
|
||||||
|
.. autoclass:: my.Class
|
||||||
|
:members:
|
||||||
|
:special-members: __init__, __name__
|
||||||
|
|
||||||
|
.. versionadded:: 1.1
|
||||||
|
|
||||||
|
.. versionchanged:: 1.2
|
||||||
|
The option can now take arguments
|
||||||
|
|
||||||
|
**Options and advanced usage**
|
||||||
|
|
||||||
* If you want to make the ``members`` option (or other options described
|
* If you want to make the ``members`` option (or other options described
|
||||||
below) the default, see :confval:`autodoc_default_options`.
|
below) the default, see :confval:`autodoc_default_options`.
|
||||||
|
|
||||||
@ -139,31 +202,6 @@ inserting them into the page source under a suitable :rst:dir:`py:module`,
|
|||||||
.. versionchanged:: 3.5
|
.. versionchanged:: 3.5
|
||||||
The default options can be overridden or extended temporarily.
|
The default options can be overridden or extended temporarily.
|
||||||
|
|
||||||
* Members without docstrings will be left out, unless you give the
|
|
||||||
``undoc-members`` flag option::
|
|
||||||
|
|
||||||
.. automodule:: noodle
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
|
|
||||||
* "Private" members (that is, those named like ``_private`` or ``__private``)
|
|
||||||
will be included if the ``private-members`` flag option is given::
|
|
||||||
|
|
||||||
.. automodule:: noodle
|
|
||||||
:members:
|
|
||||||
:private-members:
|
|
||||||
|
|
||||||
It can also take an explicit list of member names to be documented as
|
|
||||||
arguments::
|
|
||||||
|
|
||||||
.. automodule:: noodle
|
|
||||||
:members:
|
|
||||||
:private-members: _spicy, _garlickly
|
|
||||||
|
|
||||||
.. versionadded:: 1.1
|
|
||||||
.. versionchanged:: 3.2
|
|
||||||
The option can now take arguments.
|
|
||||||
|
|
||||||
* autodoc considers a member private if its docstring contains
|
* autodoc considers a member private if its docstring contains
|
||||||
``:meta private:`` in its :ref:`info-field-lists`.
|
``:meta private:`` in its :ref:`info-field-lists`.
|
||||||
For example:
|
For example:
|
||||||
@ -203,21 +241,6 @@ inserting them into the page source under a suitable :rst:dir:`py:module`,
|
|||||||
|
|
||||||
.. versionadded:: 3.5
|
.. versionadded:: 3.5
|
||||||
|
|
||||||
* Python "special" members (that is, those named like ``__special__``) will
|
|
||||||
be included if the ``special-members`` flag option is given::
|
|
||||||
|
|
||||||
.. autoclass:: my.Class
|
|
||||||
:members:
|
|
||||||
:private-members:
|
|
||||||
:special-members:
|
|
||||||
|
|
||||||
would document both "private" and "special" members of the class.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1
|
|
||||||
|
|
||||||
.. versionchanged:: 1.2
|
|
||||||
The option can now take arguments, i.e. the special members to document.
|
|
||||||
|
|
||||||
* For classes and exceptions, members inherited from base classes will be
|
* For classes and exceptions, members inherited from base classes will be
|
||||||
left out when documenting all members, unless you give the
|
left out when documenting all members, unless you give the
|
||||||
``inherited-members`` option, in addition to ``members``::
|
``inherited-members`` option, in addition to ``members``::
|
||||||
@ -586,6 +609,16 @@ There are also config values that you can set:
|
|||||||
.. __: https://mypy.readthedocs.io/en/latest/kinds_of_types.html#type-aliases
|
.. __: https://mypy.readthedocs.io/en/latest/kinds_of_types.html#type-aliases
|
||||||
.. versionadded:: 3.3
|
.. versionadded:: 3.3
|
||||||
|
|
||||||
|
.. confval:: autodoc_preserve_defaults
|
||||||
|
|
||||||
|
If True, the default argument values of functions will be not evaluated on
|
||||||
|
generating document. It preserves them as is in the source code.
|
||||||
|
|
||||||
|
.. versionadded:: 4.0
|
||||||
|
|
||||||
|
Added as an experimental feature. This will be integrated into autodoc core
|
||||||
|
in the future.
|
||||||
|
|
||||||
.. confval:: autodoc_warningiserror
|
.. confval:: autodoc_warningiserror
|
||||||
|
|
||||||
This value controls the behavior of :option:`sphinx-build -W` during
|
This value controls the behavior of :option:`sphinx-build -W` during
|
||||||
|
@ -19,11 +19,13 @@ The :mod:`sphinx.ext.autosummary` extension does this in two parts:
|
|||||||
that contain links to the documented items, and short summary blurbs
|
that contain links to the documented items, and short summary blurbs
|
||||||
extracted from their docstrings.
|
extracted from their docstrings.
|
||||||
|
|
||||||
2. Optionally, the convenience script :program:`sphinx-autogen` or the new
|
2. A :rst:dir:`autosummary` directive also generates short "stub" files for the
|
||||||
:confval:`autosummary_generate` config value can be used to generate short
|
entries listed in its content. These files by default contain only the
|
||||||
"stub" files for the entries listed in the :rst:dir:`autosummary` directives.
|
corresponding :mod:`sphinx.ext.autodoc` directive, but can be customized with
|
||||||
These files by default contain only the corresponding
|
templates.
|
||||||
:mod:`sphinx.ext.autodoc` directive, but can be customized with templates.
|
|
||||||
|
The :program:`sphinx-autogen` script is also able to generate "stub" files
|
||||||
|
from command line.
|
||||||
|
|
||||||
.. rst:directive:: autosummary
|
.. rst:directive:: autosummary
|
||||||
|
|
||||||
@ -161,7 +163,7 @@ also use these config values:
|
|||||||
.. confval:: autosummary_generate
|
.. confval:: autosummary_generate
|
||||||
|
|
||||||
Boolean indicating whether to scan all found documents for autosummary
|
Boolean indicating whether to scan all found documents for autosummary
|
||||||
directives, and to generate stub pages for each. It is disabled by default.
|
directives, and to generate stub pages for each. It is enabled by default.
|
||||||
|
|
||||||
Can also be a list of documents for which stub pages should be generated.
|
Can also be a list of documents for which stub pages should be generated.
|
||||||
|
|
||||||
@ -173,6 +175,10 @@ also use these config values:
|
|||||||
Emits :event:`autodoc-skip-member` event as :mod:`~sphinx.ext.autodoc`
|
Emits :event:`autodoc-skip-member` event as :mod:`~sphinx.ext.autodoc`
|
||||||
does.
|
does.
|
||||||
|
|
||||||
|
.. versionchanged:: 4.0
|
||||||
|
|
||||||
|
Enabled by default.
|
||||||
|
|
||||||
.. confval:: autosummary_generate_overwrite
|
.. confval:: autosummary_generate_overwrite
|
||||||
|
|
||||||
If true, autosummary overwrites existing files by generated stub pages.
|
If true, autosummary overwrites existing files by generated stub pages.
|
||||||
|
@ -107,7 +107,30 @@ Anaconda
|
|||||||
Windows
|
Windows
|
||||||
-------
|
-------
|
||||||
|
|
||||||
.. todo:: Could we start packaging this?
|
Sphinx can be install using `Chocolatey`__ or
|
||||||
|
:ref:`installed manually <windows-other-method>`.
|
||||||
|
|
||||||
|
__ https://chocolatey.org/
|
||||||
|
|
||||||
|
Chocolatey
|
||||||
|
~~~~~~~~~~
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
$ choco install sphinx
|
||||||
|
|
||||||
|
You would need to `install Chocolatey
|
||||||
|
<https://chocolatey.org/install/>`_
|
||||||
|
before running this.
|
||||||
|
|
||||||
|
For more information, refer to the `chocolatey page`__.
|
||||||
|
|
||||||
|
__ https://chocolatey.org/packages/sphinx/
|
||||||
|
|
||||||
|
.. _windows-other-method:
|
||||||
|
|
||||||
|
Other Methods
|
||||||
|
~~~~~~~~~~~~~
|
||||||
|
|
||||||
Most Windows users do not have Python installed by default, so we begin with
|
Most Windows users do not have Python installed by default, so we begin with
|
||||||
the installation of Python itself. To check if you already have Python
|
the installation of Python itself. To check if you already have Python
|
||||||
|
@ -197,9 +197,9 @@ tables of contents. The ``toctree`` directive is the central element.
|
|||||||
<metadata>` to let a document be built, but notify Sphinx that it is not
|
<metadata>` to let a document be built, but notify Sphinx that it is not
|
||||||
reachable via a toctree.
|
reachable via a toctree.
|
||||||
|
|
||||||
The "master document" (selected by :confval:`master_doc`) is the "root" of
|
The "root document" (selected by :confval:`root_doc`) is the "root" of the TOC
|
||||||
the TOC tree hierarchy. It can be used as the documentation's main page, or
|
tree hierarchy. It can be used as the documentation's main page, or as a
|
||||||
as a "full table of contents" if you don't give a ``maxdepth`` option.
|
"full table of contents" if you don't give a ``maxdepth`` option.
|
||||||
|
|
||||||
.. versionchanged:: 0.3
|
.. versionchanged:: 0.3
|
||||||
Added "globbing" option.
|
Added "globbing" option.
|
||||||
|
@ -338,6 +338,54 @@ class pending_xref(nodes.Inline, nodes.Element):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class pending_xref_condition(nodes.Inline, nodes.TextElement):
|
||||||
|
"""Node for cross-references that are used to choose appropriate
|
||||||
|
content of the reference by conditions on the resolving phase.
|
||||||
|
|
||||||
|
When the :py:class:`pending_xref` node contains one or more
|
||||||
|
**pending_xref_condition** nodes, the cross-reference resolver
|
||||||
|
should choose the content of the reference using defined conditions
|
||||||
|
in ``condition`` attribute of each pending_xref_condition nodes::
|
||||||
|
|
||||||
|
<pending_xref refdomain="py" reftarget="io.StringIO ...>
|
||||||
|
<pending_xref_condition condition="resolved">
|
||||||
|
<literal>
|
||||||
|
StringIO
|
||||||
|
<pending_xref_condition condition="*">
|
||||||
|
<literal>
|
||||||
|
io.StringIO
|
||||||
|
|
||||||
|
After the processing of cross-reference resolver, one of the content node
|
||||||
|
under pending_xref_condition node is chosen by its condition and to be
|
||||||
|
removed all of pending_xref_condition nodes::
|
||||||
|
|
||||||
|
# When resolved the cross-reference successfully
|
||||||
|
<reference>
|
||||||
|
<literal>
|
||||||
|
StringIO
|
||||||
|
|
||||||
|
# When resolution is failed
|
||||||
|
<reference>
|
||||||
|
<literal>
|
||||||
|
io.StringIO
|
||||||
|
|
||||||
|
.. note:: This node is only allowed to be placed under pending_xref node.
|
||||||
|
It is not allows to place it under other nodes. In addition,
|
||||||
|
pending_xref node must contain only pending_xref_condition
|
||||||
|
nodes if it contains one or more pending_xref_condition nodes.
|
||||||
|
|
||||||
|
The pending_xref_condition node should have **condition** attribute.
|
||||||
|
Domains can be store their individual conditions into the attribute to
|
||||||
|
filter contents on resolving phase. As a reserved condition name,
|
||||||
|
``condition="*"`` is used for the fallback of resolution failure.
|
||||||
|
Additionally, as a recommended condition name, ``condition="resolved"``
|
||||||
|
is used for the representation of resolstion success in the intersphinx
|
||||||
|
module.
|
||||||
|
|
||||||
|
.. versionadded:: 4.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
class number_reference(nodes.reference):
|
class number_reference(nodes.reference):
|
||||||
"""Node for number references, similar to pending_xref."""
|
"""Node for number references, similar to pending_xref."""
|
||||||
|
|
||||||
|
@ -1102,7 +1102,7 @@ class Sphinx:
|
|||||||
If *override* is True, the given *cls* is forcedly installed even if
|
If *override* is True, the given *cls* is forcedly installed even if
|
||||||
a documenter having the same name is already installed.
|
a documenter having the same name is already installed.
|
||||||
|
|
||||||
.. todo:: Add real docs for Documenter and subclassing
|
See :ref:`autodoc_ext_tutorial`.
|
||||||
|
|
||||||
.. versionadded:: 0.6
|
.. versionadded:: 0.6
|
||||||
.. versionchanged:: 2.2
|
.. versionchanged:: 2.2
|
||||||
|
@ -413,9 +413,9 @@ class Builder:
|
|||||||
else:
|
else:
|
||||||
self._read_serial(docnames)
|
self._read_serial(docnames)
|
||||||
|
|
||||||
if self.config.master_doc not in self.env.all_docs:
|
if self.config.root_doc not in self.env.all_docs:
|
||||||
raise SphinxError('master file %s not found' %
|
raise SphinxError('root file %s not found' %
|
||||||
self.env.doc2path(self.config.master_doc))
|
self.env.doc2path(self.config.root_doc))
|
||||||
|
|
||||||
for retval in self.events.emit('env-updated', self.env):
|
for retval in self.events.emit('env-updated', self.env):
|
||||||
if retval is not None:
|
if retval is not None:
|
||||||
@ -517,7 +517,7 @@ class Builder:
|
|||||||
for tocdocname in self.env.files_to_rebuild.get(docname, set()):
|
for tocdocname in self.env.files_to_rebuild.get(docname, set()):
|
||||||
if tocdocname in self.env.found_docs:
|
if tocdocname in self.env.found_docs:
|
||||||
docnames.add(tocdocname)
|
docnames.add(tocdocname)
|
||||||
docnames.add(self.config.master_doc)
|
docnames.add(self.config.root_doc)
|
||||||
|
|
||||||
with progress_message(__('preparing documents')):
|
with progress_message(__('preparing documents')):
|
||||||
self.prepare_writing(docnames)
|
self.prepare_writing(docnames)
|
||||||
|
@ -222,14 +222,14 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
|||||||
appeared.add(node['refuri'])
|
appeared.add(node['refuri'])
|
||||||
|
|
||||||
def get_toc(self) -> None:
|
def get_toc(self) -> None:
|
||||||
"""Get the total table of contents, containing the master_doc
|
"""Get the total table of contents, containing the root_doc
|
||||||
and pre and post files not managed by sphinx.
|
and pre and post files not managed by sphinx.
|
||||||
"""
|
"""
|
||||||
doctree = self.env.get_and_resolve_doctree(self.config.master_doc,
|
doctree = self.env.get_and_resolve_doctree(self.config.root_doc,
|
||||||
self, prune_toctrees=False,
|
self, prune_toctrees=False,
|
||||||
includehidden=True)
|
includehidden=True)
|
||||||
self.refnodes = self.get_refnodes(doctree, [])
|
self.refnodes = self.get_refnodes(doctree, [])
|
||||||
master_dir = path.dirname(self.config.master_doc)
|
master_dir = path.dirname(self.config.root_doc)
|
||||||
if master_dir:
|
if master_dir:
|
||||||
master_dir += '/' # XXX or os.sep?
|
master_dir += '/' # XXX or os.sep?
|
||||||
for item in self.refnodes:
|
for item in self.refnodes:
|
||||||
@ -237,13 +237,13 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
|||||||
self.toc_add_files(self.refnodes)
|
self.toc_add_files(self.refnodes)
|
||||||
|
|
||||||
def toc_add_files(self, refnodes: List[Dict[str, Any]]) -> None:
|
def toc_add_files(self, refnodes: List[Dict[str, Any]]) -> None:
|
||||||
"""Add the master_doc, pre and post files to a list of refnodes.
|
"""Add the root_doc, pre and post files to a list of refnodes.
|
||||||
"""
|
"""
|
||||||
refnodes.insert(0, {
|
refnodes.insert(0, {
|
||||||
'level': 1,
|
'level': 1,
|
||||||
'refuri': html.escape(self.config.master_doc + self.out_suffix),
|
'refuri': html.escape(self.config.root_doc + self.out_suffix),
|
||||||
'text': ssp(html.escape(
|
'text': ssp(html.escape(
|
||||||
self.env.titles[self.config.master_doc].astext()))
|
self.env.titles[self.config.root_doc].astext()))
|
||||||
})
|
})
|
||||||
for file, text in reversed(self.config.epub_pre_files):
|
for file, text in reversed(self.config.epub_pre_files):
|
||||||
refnodes.insert(0, {
|
refnodes.insert(0, {
|
||||||
@ -677,7 +677,7 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
|||||||
logger.info(__('writing toc.ncx file...'))
|
logger.info(__('writing toc.ncx file...'))
|
||||||
|
|
||||||
if self.config.epub_tocscope == 'default':
|
if self.config.epub_tocscope == 'default':
|
||||||
doctree = self.env.get_and_resolve_doctree(self.config.master_doc,
|
doctree = self.env.get_and_resolve_doctree(self.config.root_doc,
|
||||||
self, prune_toctrees=False,
|
self, prune_toctrees=False,
|
||||||
includehidden=False)
|
includehidden=False)
|
||||||
refnodes = self.get_refnodes(doctree, [])
|
refnodes = self.get_refnodes(doctree, [])
|
||||||
|
@ -166,7 +166,7 @@ class Epub3Builder(_epub_base.EpubBuilder):
|
|||||||
|
|
||||||
if self.config.epub_tocscope == 'default':
|
if self.config.epub_tocscope == 'default':
|
||||||
doctree = self.env.get_and_resolve_doctree(
|
doctree = self.env.get_and_resolve_doctree(
|
||||||
self.config.master_doc, self,
|
self.config.root_doc, self,
|
||||||
prune_toctrees=False, includehidden=False)
|
prune_toctrees=False, includehidden=False)
|
||||||
refnodes = self.get_refnodes(doctree, [])
|
refnodes = self.get_refnodes(doctree, [])
|
||||||
self.toc_add_files(refnodes)
|
self.toc_add_files(refnodes)
|
||||||
|
@ -494,7 +494,8 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
'version': self.config.version,
|
'version': self.config.version,
|
||||||
'last_updated': self.last_updated,
|
'last_updated': self.last_updated,
|
||||||
'copyright': self.config.copyright,
|
'copyright': self.config.copyright,
|
||||||
'master_doc': self.config.master_doc,
|
'master_doc': self.config.root_doc,
|
||||||
|
'root_doc': self.config.root_doc,
|
||||||
'use_opensearch': self.config.html_use_opensearch,
|
'use_opensearch': self.config.html_use_opensearch,
|
||||||
'docstitle': self.config.html_title,
|
'docstitle': self.config.html_title,
|
||||||
'shorttitle': self.config.html_short_title,
|
'shorttitle': self.config.html_short_title,
|
||||||
@ -1247,18 +1248,31 @@ def validate_html_favicon(app: Sphinx, config: Config) -> None:
|
|||||||
config.html_favicon = None # type: ignore
|
config.html_favicon = None # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
class _stable_repr_object():
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '<object>'
|
||||||
|
|
||||||
|
|
||||||
|
UNSET = _stable_repr_object()
|
||||||
|
|
||||||
|
|
||||||
def migrate_html_add_permalinks(app: Sphinx, config: Config) -> None:
|
def migrate_html_add_permalinks(app: Sphinx, config: Config) -> None:
|
||||||
"""Migrate html_add_permalinks to html_permalinks*."""
|
"""Migrate html_add_permalinks to html_permalinks*."""
|
||||||
if config.html_add_permalinks:
|
html_add_permalinks = config.html_add_permalinks
|
||||||
|
if html_add_permalinks is UNSET:
|
||||||
|
return
|
||||||
|
|
||||||
# RemovedInSphinx60Warning
|
# RemovedInSphinx60Warning
|
||||||
logger.warning(__('html_add_permalinks has been deprecated since v3.5.0. '
|
logger.warning(__('html_add_permalinks has been deprecated since v3.5.0. '
|
||||||
'Please use html_permalinks and html_permalinks_icon instead.'))
|
'Please use html_permalinks and html_permalinks_icon instead.'))
|
||||||
if (isinstance(config.html_add_permalinks, bool) and
|
if not html_add_permalinks:
|
||||||
config.html_add_permalinks is False):
|
config.html_permalinks = False # type: ignore[attr-defined]
|
||||||
config.html_permalinks = False # type: ignore
|
return
|
||||||
else:
|
|
||||||
config.html_permalinks_icon = html.escape(config.html_add_permalinks) # type: ignore # NOQA
|
|
||||||
|
|
||||||
|
config.html_permalinks_icon = html.escape( # type: ignore[attr-defined]
|
||||||
|
html_add_permalinks
|
||||||
|
)
|
||||||
|
|
||||||
# for compatibility
|
# for compatibility
|
||||||
import sphinxcontrib.serializinghtml # NOQA
|
import sphinxcontrib.serializinghtml # NOQA
|
||||||
@ -1290,7 +1304,7 @@ def setup(app: Sphinx) -> Dict[str, Any]:
|
|||||||
app.add_config_value('html_sidebars', {}, 'html')
|
app.add_config_value('html_sidebars', {}, 'html')
|
||||||
app.add_config_value('html_additional_pages', {}, 'html')
|
app.add_config_value('html_additional_pages', {}, 'html')
|
||||||
app.add_config_value('html_domain_indices', True, 'html', [list])
|
app.add_config_value('html_domain_indices', True, 'html', [list])
|
||||||
app.add_config_value('html_add_permalinks', None, 'html')
|
app.add_config_value('html_add_permalinks', UNSET, 'html')
|
||||||
app.add_config_value('html_permalinks', True, 'html')
|
app.add_config_value('html_permalinks', True, 'html')
|
||||||
app.add_config_value('html_permalinks_icon', '¶', 'html')
|
app.add_config_value('html_permalinks_icon', '¶', 'html')
|
||||||
app.add_config_value('html_use_index', True, 'html')
|
app.add_config_value('html_use_index', True, 'html')
|
||||||
|
@ -517,7 +517,7 @@ def default_latex_documents(config: Config) -> List[Tuple[str, str, str, str, st
|
|||||||
""" Better default latex_documents settings. """
|
""" Better default latex_documents settings. """
|
||||||
project = texescape.escape(config.project, config.latex_engine)
|
project = texescape.escape(config.project, config.latex_engine)
|
||||||
author = texescape.escape(config.author, config.latex_engine)
|
author = texescape.escape(config.author, config.latex_engine)
|
||||||
return [(config.master_doc,
|
return [(config.root_doc,
|
||||||
make_filename_from_project(config.project) + '.tex',
|
make_filename_from_project(config.project) + '.tex',
|
||||||
texescape.escape_abbr(project),
|
texescape.escape_abbr(project),
|
||||||
texescape.escape_abbr(author),
|
texescape.escape_abbr(author),
|
||||||
|
@ -42,7 +42,7 @@ class SubstitutionDefinitionsRemover(SphinxPostTransform):
|
|||||||
|
|
||||||
# should be invoked after Substitutions process
|
# should be invoked after Substitutions process
|
||||||
default_priority = Substitutions.default_priority + 1
|
default_priority = Substitutions.default_priority + 1
|
||||||
builders = ('latex',)
|
formats = ('latex',)
|
||||||
|
|
||||||
def run(self, **kwargs: Any) -> None:
|
def run(self, **kwargs: Any) -> None:
|
||||||
for node in self.document.traverse(nodes.substitution_definition):
|
for node in self.document.traverse(nodes.substitution_definition):
|
||||||
@ -57,7 +57,7 @@ class ShowUrlsTransform(SphinxPostTransform):
|
|||||||
.. note:: This transform is used for integrated doctree
|
.. note:: This transform is used for integrated doctree
|
||||||
"""
|
"""
|
||||||
default_priority = 400
|
default_priority = 400
|
||||||
builders = ('latex',)
|
formats = ('latex',)
|
||||||
|
|
||||||
# references are expanded to footnotes (or not)
|
# references are expanded to footnotes (or not)
|
||||||
expanded = False
|
expanded = False
|
||||||
@ -345,7 +345,7 @@ class LaTeXFootnoteTransform(SphinxPostTransform):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
default_priority = 600
|
default_priority = 600
|
||||||
builders = ('latex',)
|
formats = ('latex',)
|
||||||
|
|
||||||
def run(self, **kwargs: Any) -> None:
|
def run(self, **kwargs: Any) -> None:
|
||||||
footnotes = list(self.document.traverse(nodes.footnote))
|
footnotes = list(self.document.traverse(nodes.footnote))
|
||||||
@ -497,7 +497,7 @@ class BibliographyTransform(SphinxPostTransform):
|
|||||||
...
|
...
|
||||||
"""
|
"""
|
||||||
default_priority = 750
|
default_priority = 750
|
||||||
builders = ('latex',)
|
formats = ('latex',)
|
||||||
|
|
||||||
def run(self, **kwargs: Any) -> None:
|
def run(self, **kwargs: Any) -> None:
|
||||||
citations = thebibliography()
|
citations = thebibliography()
|
||||||
@ -516,7 +516,7 @@ class CitationReferenceTransform(SphinxPostTransform):
|
|||||||
pending_xref nodes to citation_reference.
|
pending_xref nodes to citation_reference.
|
||||||
"""
|
"""
|
||||||
default_priority = 5 # before ReferencesResolver
|
default_priority = 5 # before ReferencesResolver
|
||||||
builders = ('latex',)
|
formats = ('latex',)
|
||||||
|
|
||||||
def run(self, **kwargs: Any) -> None:
|
def run(self, **kwargs: Any) -> None:
|
||||||
domain = cast(CitationDomain, self.env.get_domain('citation'))
|
domain = cast(CitationDomain, self.env.get_domain('citation'))
|
||||||
@ -536,7 +536,7 @@ class MathReferenceTransform(SphinxPostTransform):
|
|||||||
nodes to math_reference.
|
nodes to math_reference.
|
||||||
"""
|
"""
|
||||||
default_priority = 5 # before ReferencesResolver
|
default_priority = 5 # before ReferencesResolver
|
||||||
builders = ('latex',)
|
formats = ('latex',)
|
||||||
|
|
||||||
def run(self, **kwargs: Any) -> None:
|
def run(self, **kwargs: Any) -> None:
|
||||||
equations = self.env.get_domain('math').data['objects']
|
equations = self.env.get_domain('math').data['objects']
|
||||||
@ -551,7 +551,7 @@ class MathReferenceTransform(SphinxPostTransform):
|
|||||||
class LiteralBlockTransform(SphinxPostTransform):
|
class LiteralBlockTransform(SphinxPostTransform):
|
||||||
"""Replace container nodes for literal_block by captioned_literal_block."""
|
"""Replace container nodes for literal_block by captioned_literal_block."""
|
||||||
default_priority = 400
|
default_priority = 400
|
||||||
builders = ('latex',)
|
formats = ('latex',)
|
||||||
|
|
||||||
def run(self, **kwargs: Any) -> None:
|
def run(self, **kwargs: Any) -> None:
|
||||||
matcher = NodeMatcher(nodes.container, literal_block=True)
|
matcher = NodeMatcher(nodes.container, literal_block=True)
|
||||||
@ -563,7 +563,7 @@ class LiteralBlockTransform(SphinxPostTransform):
|
|||||||
class DocumentTargetTransform(SphinxPostTransform):
|
class DocumentTargetTransform(SphinxPostTransform):
|
||||||
"""Add :doc label to the first section of each document."""
|
"""Add :doc label to the first section of each document."""
|
||||||
default_priority = 400
|
default_priority = 400
|
||||||
builders = ('latex',)
|
formats = ('latex',)
|
||||||
|
|
||||||
def run(self, **kwargs: Any) -> None:
|
def run(self, **kwargs: Any) -> None:
|
||||||
for node in self.document.traverse(addnodes.start_of_file):
|
for node in self.document.traverse(addnodes.start_of_file):
|
||||||
@ -599,7 +599,7 @@ class IndexInSectionTitleTransform(SphinxPostTransform):
|
|||||||
...
|
...
|
||||||
"""
|
"""
|
||||||
default_priority = 400
|
default_priority = 400
|
||||||
builders = ('latex',)
|
formats = ('latex',)
|
||||||
|
|
||||||
def run(self, **kwargs: Any) -> None:
|
def run(self, **kwargs: Any) -> None:
|
||||||
for node in self.document.traverse(nodes.title):
|
for node in self.document.traverse(nodes.title):
|
||||||
|
@ -109,7 +109,7 @@ class ManualPageBuilder(Builder):
|
|||||||
def default_man_pages(config: Config) -> List[Tuple[str, str, str, List[str], int]]:
|
def default_man_pages(config: Config) -> List[Tuple[str, str, str, List[str], int]]:
|
||||||
""" Better default man_pages settings. """
|
""" Better default man_pages settings. """
|
||||||
filename = make_filename_from_project(config.project)
|
filename = make_filename_from_project(config.project)
|
||||||
return [(config.master_doc, filename, '%s %s' % (config.project, config.release),
|
return [(config.root_doc, filename, '%s %s' % (config.project, config.release),
|
||||||
[config.author], 1)]
|
[config.author], 1)]
|
||||||
|
|
||||||
|
|
||||||
|
@ -42,7 +42,7 @@ class SingleFileHTMLBuilder(StandaloneHTMLBuilder):
|
|||||||
def get_target_uri(self, docname: str, typ: str = None) -> str:
|
def get_target_uri(self, docname: str, typ: str = None) -> str:
|
||||||
if docname in self.env.all_docs:
|
if docname in self.env.all_docs:
|
||||||
# all references are on the same page...
|
# all references are on the same page...
|
||||||
return self.config.master_doc + self.out_suffix + \
|
return self.config.root_doc + self.out_suffix + \
|
||||||
'#document-' + docname
|
'#document-' + docname
|
||||||
else:
|
else:
|
||||||
# chances are this is a html_additional_page
|
# chances are this is a html_additional_page
|
||||||
@ -54,7 +54,7 @@ class SingleFileHTMLBuilder(StandaloneHTMLBuilder):
|
|||||||
|
|
||||||
def fix_refuris(self, tree: Node) -> None:
|
def fix_refuris(self, tree: Node) -> None:
|
||||||
# fix refuris with double anchor
|
# fix refuris with double anchor
|
||||||
fname = self.config.master_doc + self.out_suffix
|
fname = self.config.root_doc + self.out_suffix
|
||||||
for refnode in tree.traverse(nodes.reference):
|
for refnode in tree.traverse(nodes.reference):
|
||||||
if 'refuri' not in refnode:
|
if 'refuri' not in refnode:
|
||||||
continue
|
continue
|
||||||
@ -75,7 +75,7 @@ class SingleFileHTMLBuilder(StandaloneHTMLBuilder):
|
|||||||
return self.render_partial(toctree)['fragment']
|
return self.render_partial(toctree)['fragment']
|
||||||
|
|
||||||
def assemble_doctree(self) -> nodes.document:
|
def assemble_doctree(self) -> nodes.document:
|
||||||
master = self.config.master_doc
|
master = self.config.root_doc
|
||||||
tree = self.env.get_doctree(master)
|
tree = self.env.get_doctree(master)
|
||||||
tree = inline_all_toctrees(self, set(), master, tree, darkgreen, [master])
|
tree = inline_all_toctrees(self, set(), master, tree, darkgreen, [master])
|
||||||
tree['docname'] = master
|
tree['docname'] = master
|
||||||
@ -99,7 +99,7 @@ class SingleFileHTMLBuilder(StandaloneHTMLBuilder):
|
|||||||
alias = "%s/%s" % (docname, id)
|
alias = "%s/%s" % (docname, id)
|
||||||
new_secnumbers[alias] = secnum
|
new_secnumbers[alias] = secnum
|
||||||
|
|
||||||
return {self.config.master_doc: new_secnumbers}
|
return {self.config.root_doc: new_secnumbers}
|
||||||
|
|
||||||
def assemble_toc_fignumbers(self) -> Dict[str, Dict[str, Dict[str, Tuple[int, ...]]]]:
|
def assemble_toc_fignumbers(self) -> Dict[str, Dict[str, Dict[str, Tuple[int, ...]]]]:
|
||||||
# Assemble toc_fignumbers to resolve figure numbers on SingleHTML.
|
# Assemble toc_fignumbers to resolve figure numbers on SingleHTML.
|
||||||
@ -120,11 +120,11 @@ class SingleFileHTMLBuilder(StandaloneHTMLBuilder):
|
|||||||
for id, fignum in fignums.items():
|
for id, fignum in fignums.items():
|
||||||
new_fignumbers[alias][id] = fignum
|
new_fignumbers[alias][id] = fignum
|
||||||
|
|
||||||
return {self.config.master_doc: new_fignumbers}
|
return {self.config.root_doc: new_fignumbers}
|
||||||
|
|
||||||
def get_doc_context(self, docname: str, body: str, metatags: str) -> Dict:
|
def get_doc_context(self, docname: str, body: str, metatags: str) -> Dict:
|
||||||
# no relation links...
|
# no relation links...
|
||||||
toctree = TocTree(self.env).get_toctree_for(self.config.master_doc, self, False)
|
toctree = TocTree(self.env).get_toctree_for(self.config.root_doc, self, False)
|
||||||
# if there is no toctree, toc is None
|
# if there is no toctree, toc is None
|
||||||
if toctree:
|
if toctree:
|
||||||
self.fix_refuris(toctree)
|
self.fix_refuris(toctree)
|
||||||
@ -160,8 +160,8 @@ class SingleFileHTMLBuilder(StandaloneHTMLBuilder):
|
|||||||
self.env.toc_fignumbers = self.assemble_toc_fignumbers()
|
self.env.toc_fignumbers = self.assemble_toc_fignumbers()
|
||||||
|
|
||||||
with progress_message(__('writing')):
|
with progress_message(__('writing')):
|
||||||
self.write_doc_serialized(self.config.master_doc, doctree)
|
self.write_doc_serialized(self.config.root_doc, doctree)
|
||||||
self.write_doc(self.config.master_doc, doctree)
|
self.write_doc(self.config.root_doc, doctree)
|
||||||
|
|
||||||
def finish(self) -> None:
|
def finish(self) -> None:
|
||||||
self.write_additional_files()
|
self.write_additional_files()
|
||||||
|
@ -197,7 +197,7 @@ class TexinfoBuilder(Builder):
|
|||||||
def default_texinfo_documents(config: Config) -> List[Tuple[str, str, str, str, str, str, str]]: # NOQA
|
def default_texinfo_documents(config: Config) -> List[Tuple[str, str, str, str, str, str, str]]: # NOQA
|
||||||
""" Better default texinfo_documents settings. """
|
""" Better default texinfo_documents settings. """
|
||||||
filename = make_filename_from_project(config.project)
|
filename = make_filename_from_project(config.project)
|
||||||
return [(config.master_doc, filename, config.project, config.author, filename,
|
return [(config.root_doc, filename, config.project, config.author, filename,
|
||||||
'One line description of project', 'Miscellaneous')]
|
'One line description of project', 'Miscellaneous')]
|
||||||
|
|
||||||
|
|
||||||
|
@ -162,10 +162,22 @@ class QuickstartRenderer(SphinxRenderer):
|
|||||||
self.templatedir = templatedir or ''
|
self.templatedir = templatedir or ''
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
|
def _has_custom_template(self, template_name: str) -> bool:
|
||||||
|
"""Check if custom template file exists.
|
||||||
|
|
||||||
|
Note: Please don't use this function from extensions.
|
||||||
|
It will be removed in the future without deprecation period.
|
||||||
|
"""
|
||||||
|
template = path.join(self.templatedir, path.basename(template_name))
|
||||||
|
if self.templatedir and path.exists(template):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
def render(self, template_name: str, context: Dict) -> str:
|
def render(self, template_name: str, context: Dict) -> str:
|
||||||
user_template = path.join(self.templatedir, path.basename(template_name))
|
if self._has_custom_template(template_name):
|
||||||
if self.templatedir and path.exists(user_template):
|
custom_template = path.join(self.templatedir, path.basename(template_name))
|
||||||
return self.render_from_file(user_template, context)
|
return self.render_from_file(custom_template, context)
|
||||||
else:
|
else:
|
||||||
return super().render(template_name, context)
|
return super().render(template_name, context)
|
||||||
|
|
||||||
@ -318,6 +330,7 @@ def generate(d: Dict, overwrite: bool = True, silent: bool = False, templatedir:
|
|||||||
if 'mastertocmaxdepth' not in d:
|
if 'mastertocmaxdepth' not in d:
|
||||||
d['mastertocmaxdepth'] = 2
|
d['mastertocmaxdepth'] = 2
|
||||||
|
|
||||||
|
d['root_doc'] = d['master']
|
||||||
d['now'] = time.asctime()
|
d['now'] = time.asctime()
|
||||||
d['project_underline'] = column_width(d['project']) * '='
|
d['project_underline'] = column_width(d['project']) * '='
|
||||||
d.setdefault('extensions', [])
|
d.setdefault('extensions', [])
|
||||||
@ -362,7 +375,13 @@ def generate(d: Dict, overwrite: bool = True, silent: bool = False, templatedir:
|
|||||||
write_file(path.join(srcdir, 'conf.py'), template.render_string(conf_text, d))
|
write_file(path.join(srcdir, 'conf.py'), template.render_string(conf_text, d))
|
||||||
|
|
||||||
masterfile = path.join(srcdir, d['master'] + d['suffix'])
|
masterfile = path.join(srcdir, d['master'] + d['suffix'])
|
||||||
|
if template._has_custom_template('quickstart/master_doc.rst_t'):
|
||||||
|
msg = ('A custom template `master_doc.rst_t` found. It has been renamed to '
|
||||||
|
'`root_doc.rst_t`. Please rename it on your project too.')
|
||||||
|
print(colorize('red', msg)) # RemovedInSphinx60Warning
|
||||||
write_file(masterfile, template.render('quickstart/master_doc.rst_t', d))
|
write_file(masterfile, template.render('quickstart/master_doc.rst_t', d))
|
||||||
|
else:
|
||||||
|
write_file(masterfile, template.render('quickstart/root_doc.rst_t', d))
|
||||||
|
|
||||||
if d.get('make_mode') is True:
|
if d.get('make_mode') is True:
|
||||||
makefile_template = 'quickstart/Makefile.new_t'
|
makefile_template = 'quickstart/Makefile.new_t'
|
||||||
|
@ -105,6 +105,7 @@ class Config:
|
|||||||
'figure_language_filename': ('{root}.{language}{ext}', 'env', [str]),
|
'figure_language_filename': ('{root}.{language}{ext}', 'env', [str]),
|
||||||
|
|
||||||
'master_doc': ('index', 'env', []),
|
'master_doc': ('index', 'env', []),
|
||||||
|
'root_doc': (lambda config: config.master_doc, 'env', []),
|
||||||
'source_suffix': ({'.rst': 'restructuredtext'}, 'env', Any),
|
'source_suffix': ({'.rst': 'restructuredtext'}, 'env', Any),
|
||||||
'source_encoding': ('utf-8-sig', 'env', []),
|
'source_encoding': ('utf-8-sig', 'env', []),
|
||||||
'exclude_patterns': ([], 'env', []),
|
'exclude_patterns': ([], 'env', []),
|
||||||
@ -461,17 +462,17 @@ def check_primary_domain(app: "Sphinx", config: Config) -> None:
|
|||||||
config.primary_domain = None # type: ignore
|
config.primary_domain = None # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def check_master_doc(app: "Sphinx", env: "BuildEnvironment", added: Set[str],
|
def check_root_doc(app: "Sphinx", env: "BuildEnvironment", added: Set[str],
|
||||||
changed: Set[str], removed: Set[str]) -> Set[str]:
|
changed: Set[str], removed: Set[str]) -> Set[str]:
|
||||||
"""Adjust master_doc to 'contents' to support an old project which does not have
|
"""Adjust root_doc to 'contents' to support an old project which does not have
|
||||||
no master_doc setting.
|
no root_doc setting.
|
||||||
"""
|
"""
|
||||||
if (app.config.master_doc == 'index' and
|
if (app.config.root_doc == 'index' and
|
||||||
'index' not in app.project.docnames and
|
'index' not in app.project.docnames and
|
||||||
'contents' in app.project.docnames):
|
'contents' in app.project.docnames):
|
||||||
logger.warning(__('Since v2.0, Sphinx uses "index" as master_doc by default. '
|
logger.warning(__('Since v2.0, Sphinx uses "index" as root_doc by default. '
|
||||||
'Please add "master_doc = \'contents\'" to your conf.py.'))
|
'Please add "root_doc = \'contents\'" to your conf.py.'))
|
||||||
app.config.master_doc = "contents" # type: ignore
|
app.config.root_doc = "contents" # type: ignore
|
||||||
|
|
||||||
return changed
|
return changed
|
||||||
|
|
||||||
@ -483,7 +484,7 @@ def setup(app: "Sphinx") -> Dict[str, Any]:
|
|||||||
app.connect('config-inited', correct_copyright_year, priority=800)
|
app.connect('config-inited', correct_copyright_year, priority=800)
|
||||||
app.connect('config-inited', check_confval_types, priority=800)
|
app.connect('config-inited', check_confval_types, priority=800)
|
||||||
app.connect('config-inited', check_primary_domain, priority=800)
|
app.connect('config-inited', check_primary_domain, priority=800)
|
||||||
app.connect('env-get-outdated', check_master_doc)
|
app.connect('env-get-outdated', check_root_doc)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'version': 'builtin',
|
'version': 'builtin',
|
||||||
|
@ -6,7 +6,9 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
import warnings
|
import warnings
|
||||||
|
from os import path
|
||||||
from typing import TYPE_CHECKING, Any, Dict, List, Tuple, cast
|
from typing import TYPE_CHECKING, Any, Dict, List, Tuple, cast
|
||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
@ -18,13 +20,19 @@ from sphinx import addnodes
|
|||||||
from sphinx.deprecation import RemovedInSphinx60Warning
|
from sphinx.deprecation import RemovedInSphinx60Warning
|
||||||
from sphinx.directives import optional_int
|
from sphinx.directives import optional_int
|
||||||
from sphinx.domains.math import MathDomain
|
from sphinx.domains.math import MathDomain
|
||||||
|
from sphinx.locale import __
|
||||||
|
from sphinx.util import logging
|
||||||
from sphinx.util.docutils import SphinxDirective
|
from sphinx.util.docutils import SphinxDirective
|
||||||
from sphinx.util.nodes import set_source_info
|
from sphinx.util.nodes import set_source_info
|
||||||
|
from sphinx.util.osutil import SEP, os_path, relpath
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Figure(images.Figure):
|
class Figure(images.Figure):
|
||||||
"""The figure directive which applies `:name:` option to the figure node
|
"""The figure directive which applies `:name:` option to the figure node
|
||||||
instead of the image node.
|
instead of the image node.
|
||||||
@ -87,22 +95,26 @@ class RSTTable(tables.RSTTable):
|
|||||||
|
|
||||||
|
|
||||||
class CSVTable(tables.CSVTable):
|
class CSVTable(tables.CSVTable):
|
||||||
"""The csv-table directive which sets source and line information to its caption.
|
"""The csv-table directive which searches a CSV file from Sphinx project's source
|
||||||
|
directory when an absolute path is given via :file: option.
|
||||||
Only for docutils-0.13 or older version."""
|
"""
|
||||||
|
|
||||||
def run(self) -> List[Node]:
|
def run(self) -> List[Node]:
|
||||||
warnings.warn('RSTTable is deprecated.',
|
if 'file' in self.options and self.options['file'].startswith((SEP, os.sep)):
|
||||||
RemovedInSphinx60Warning)
|
env = self.state.document.settings.env
|
||||||
|
filename = self.options['file']
|
||||||
|
if path.exists(filename):
|
||||||
|
logger.warning(__('":file:" option for csv-table directive now recognizes '
|
||||||
|
'an absolute path as a relative path from source directory. '
|
||||||
|
'Please update your document.'),
|
||||||
|
location=(env.docname, self.lineno))
|
||||||
|
else:
|
||||||
|
abspath = path.join(env.srcdir, os_path(self.options['file'][1:]))
|
||||||
|
docdir = path.dirname(env.doc2path(env.docname))
|
||||||
|
self.options['file'] = relpath(abspath, docdir)
|
||||||
|
|
||||||
return super().run()
|
return super().run()
|
||||||
|
|
||||||
def make_title(self) -> Tuple[nodes.title, List[system_message]]:
|
|
||||||
title, message = super().make_title()
|
|
||||||
if title:
|
|
||||||
set_source_info(self, title)
|
|
||||||
|
|
||||||
return title, message
|
|
||||||
|
|
||||||
|
|
||||||
class ListTable(tables.ListTable):
|
class ListTable(tables.ListTable):
|
||||||
"""The list-table directive which sets source and line information to its caption.
|
"""The list-table directive which sets source and line information to its caption.
|
||||||
@ -110,7 +122,7 @@ class ListTable(tables.ListTable):
|
|||||||
Only for docutils-0.13 or older version."""
|
Only for docutils-0.13 or older version."""
|
||||||
|
|
||||||
def run(self) -> List[Node]:
|
def run(self) -> List[Node]:
|
||||||
warnings.warn('RSTTable is deprecated.',
|
warnings.warn('ListTable is deprecated.',
|
||||||
RemovedInSphinx60Warning)
|
RemovedInSphinx60Warning)
|
||||||
return super().run()
|
return super().run()
|
||||||
|
|
||||||
@ -224,6 +236,7 @@ class MathDirective(SphinxDirective):
|
|||||||
def setup(app: "Sphinx") -> Dict[str, Any]:
|
def setup(app: "Sphinx") -> Dict[str, Any]:
|
||||||
directives.register_directive('figure', Figure)
|
directives.register_directive('figure', Figure)
|
||||||
directives.register_directive('meta', Meta)
|
directives.register_directive('meta', Meta)
|
||||||
|
directives.register_directive('csv-table', CSVTable)
|
||||||
directives.register_directive('code', Code)
|
directives.register_directive('code', Code)
|
||||||
directives.register_directive('math', MathDirective)
|
directives.register_directive('math', MathDirective)
|
||||||
|
|
||||||
|
@ -387,19 +387,6 @@ class ASTPostfixDec(ASTPostfixOp):
|
|||||||
signode.append(nodes.Text('--'))
|
signode.append(nodes.Text('--'))
|
||||||
|
|
||||||
|
|
||||||
class ASTPostfixMember(ASTPostfixOp):
|
|
||||||
def __init__(self, name):
|
|
||||||
self.name = name
|
|
||||||
|
|
||||||
def _stringify(self, transform: StringifyTransform) -> str:
|
|
||||||
return '.' + transform(self.name)
|
|
||||||
|
|
||||||
def describe_signature(self, signode: TextElement, mode: str,
|
|
||||||
env: "BuildEnvironment", symbol: "Symbol") -> None:
|
|
||||||
signode.append(nodes.Text('.'))
|
|
||||||
self.name.describe_signature(signode, 'noneIsName', env, symbol)
|
|
||||||
|
|
||||||
|
|
||||||
class ASTPostfixMemberOfPointer(ASTPostfixOp):
|
class ASTPostfixMemberOfPointer(ASTPostfixOp):
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
self.name = name
|
self.name = name
|
||||||
@ -682,15 +669,24 @@ class ASTParameters(ASTBase):
|
|||||||
def describe_signature(self, signode: TextElement, mode: str,
|
def describe_signature(self, signode: TextElement, mode: str,
|
||||||
env: "BuildEnvironment", symbol: "Symbol") -> None:
|
env: "BuildEnvironment", symbol: "Symbol") -> None:
|
||||||
verify_description_mode(mode)
|
verify_description_mode(mode)
|
||||||
|
# only use the desc_parameterlist for the outer list, not for inner lists
|
||||||
|
if mode == 'lastIsName':
|
||||||
paramlist = addnodes.desc_parameterlist()
|
paramlist = addnodes.desc_parameterlist()
|
||||||
for arg in self.args:
|
for arg in self.args:
|
||||||
param = addnodes.desc_parameter('', '', noemph=True)
|
param = addnodes.desc_parameter('', '', noemph=True)
|
||||||
if mode == 'lastIsName': # i.e., outer-function params
|
|
||||||
arg.describe_signature(param, 'param', env, symbol=symbol)
|
arg.describe_signature(param, 'param', env, symbol=symbol)
|
||||||
else:
|
|
||||||
arg.describe_signature(param, 'markType', env, symbol=symbol)
|
|
||||||
paramlist += param
|
paramlist += param
|
||||||
signode += paramlist
|
signode += paramlist
|
||||||
|
else:
|
||||||
|
signode += nodes.Text('(', '(')
|
||||||
|
first = True
|
||||||
|
for arg in self.args:
|
||||||
|
if not first:
|
||||||
|
signode += nodes.Text(', ', ', ')
|
||||||
|
first = False
|
||||||
|
arg.describe_signature(signode, 'markType', env, symbol=symbol)
|
||||||
|
signode += nodes.Text(')', ')')
|
||||||
|
|
||||||
for attr in self.attrs:
|
for attr in self.attrs:
|
||||||
signode += nodes.Text(' ')
|
signode += nodes.Text(' ')
|
||||||
attr.describe_signature(signode)
|
attr.describe_signature(signode)
|
||||||
@ -2256,7 +2252,7 @@ class DefinitionParser(BaseParser):
|
|||||||
# | postfix "[" expression "]"
|
# | postfix "[" expression "]"
|
||||||
# | postfix "[" braced-init-list [opt] "]"
|
# | postfix "[" braced-init-list [opt] "]"
|
||||||
# | postfix "(" expression-list [opt] ")"
|
# | postfix "(" expression-list [opt] ")"
|
||||||
# | postfix "." id-expression
|
# | postfix "." id-expression // taken care of in primary by nested name
|
||||||
# | postfix "->" id-expression
|
# | postfix "->" id-expression
|
||||||
# | postfix "++"
|
# | postfix "++"
|
||||||
# | postfix "--"
|
# | postfix "--"
|
||||||
@ -2274,17 +2270,6 @@ class DefinitionParser(BaseParser):
|
|||||||
self.fail("Expected ']' in end of postfix expression.")
|
self.fail("Expected ']' in end of postfix expression.")
|
||||||
postFixes.append(ASTPostfixArray(expr))
|
postFixes.append(ASTPostfixArray(expr))
|
||||||
continue
|
continue
|
||||||
if self.skip_string('.'):
|
|
||||||
if self.skip_string('*'):
|
|
||||||
# don't steal the dot
|
|
||||||
self.pos -= 2
|
|
||||||
elif self.skip_string('..'):
|
|
||||||
# don't steal the dot
|
|
||||||
self.pos -= 3
|
|
||||||
else:
|
|
||||||
name = self._parse_nested_name()
|
|
||||||
postFixes.append(ASTPostfixMember(name))
|
|
||||||
continue
|
|
||||||
if self.skip_string('->'):
|
if self.skip_string('->'):
|
||||||
if self.skip_string('*'):
|
if self.skip_string('*'):
|
||||||
# don't steal the arrow
|
# don't steal the arrow
|
||||||
@ -2693,16 +2678,13 @@ class DefinitionParser(BaseParser):
|
|||||||
def _parse_declarator_name_suffix(
|
def _parse_declarator_name_suffix(
|
||||||
self, named: Union[bool, str], paramMode: str, typed: bool
|
self, named: Union[bool, str], paramMode: str, typed: bool
|
||||||
) -> ASTDeclarator:
|
) -> ASTDeclarator:
|
||||||
|
assert named in (True, False, 'single')
|
||||||
# now we should parse the name, and then suffixes
|
# now we should parse the name, and then suffixes
|
||||||
if named == 'maybe':
|
if named == 'single':
|
||||||
pos = self.pos
|
|
||||||
try:
|
|
||||||
declId = self._parse_nested_name()
|
|
||||||
except DefinitionError:
|
|
||||||
self.pos = pos
|
|
||||||
declId = None
|
|
||||||
elif named == 'single':
|
|
||||||
if self.match(identifier_re):
|
if self.match(identifier_re):
|
||||||
|
if self.matched_text in _keywords:
|
||||||
|
self.fail("Expected identifier, "
|
||||||
|
"got keyword: %s" % self.matched_text)
|
||||||
identifier = ASTIdentifier(self.matched_text)
|
identifier = ASTIdentifier(self.matched_text)
|
||||||
declId = ASTNestedName([identifier], rooted=False)
|
declId = ASTNestedName([identifier], rooted=False)
|
||||||
else:
|
else:
|
||||||
@ -2880,8 +2862,8 @@ class DefinitionParser(BaseParser):
|
|||||||
|
|
||||||
def _parse_type(self, named: Union[bool, str], outer: str = None) -> ASTType:
|
def _parse_type(self, named: Union[bool, str], outer: str = None) -> ASTType:
|
||||||
"""
|
"""
|
||||||
named=False|'maybe'|True: 'maybe' is e.g., for function objects which
|
named=False|'single'|True: 'single' is e.g., for function objects which
|
||||||
doesn't need to name the arguments
|
doesn't need to name the arguments, but otherwise is a single name
|
||||||
"""
|
"""
|
||||||
if outer: # always named
|
if outer: # always named
|
||||||
if outer not in ('type', 'member', 'function'):
|
if outer not in ('type', 'member', 'function'):
|
||||||
|
@ -306,6 +306,7 @@ _operator_re = re.compile(r'''(?x)
|
|||||||
| \+\+ | --
|
| \+\+ | --
|
||||||
| ->\*? | \,
|
| ->\*? | \,
|
||||||
| (<<|>>)=? | && | \|\|
|
| (<<|>>)=? | && | \|\|
|
||||||
|
| <=>
|
||||||
| [!<>=/*%+|&^~-]=?
|
| [!<>=/*%+|&^~-]=?
|
||||||
| (\b(and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|xor|xor_eq)\b)
|
| (\b(and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|xor|xor_eq)\b)
|
||||||
''')
|
''')
|
||||||
@ -494,6 +495,7 @@ _id_operator_v2 = {
|
|||||||
'>': 'gt',
|
'>': 'gt',
|
||||||
'<=': 'le',
|
'<=': 'le',
|
||||||
'>=': 'ge',
|
'>=': 'ge',
|
||||||
|
'<=>': 'ss',
|
||||||
'!': 'nt', 'not': 'nt',
|
'!': 'nt', 'not': 'nt',
|
||||||
'&&': 'aa', 'and': 'aa',
|
'&&': 'aa', 'and': 'aa',
|
||||||
'||': 'oo', 'or': 'oo',
|
'||': 'oo', 'or': 'oo',
|
||||||
@ -528,7 +530,7 @@ _expression_bin_ops = [
|
|||||||
['^', 'xor'],
|
['^', 'xor'],
|
||||||
['&', 'bitand'],
|
['&', 'bitand'],
|
||||||
['==', '!=', 'not_eq'],
|
['==', '!=', 'not_eq'],
|
||||||
['<=', '>=', '<', '>'],
|
['<=>', '<=', '>=', '<', '>'],
|
||||||
['<<', '>>'],
|
['<<', '>>'],
|
||||||
['+', '-'],
|
['+', '-'],
|
||||||
['*', '/', '%'],
|
['*', '/', '%'],
|
||||||
@ -1965,15 +1967,23 @@ class ASTParametersQualifiers(ASTBase):
|
|||||||
def describe_signature(self, signode: TextElement, mode: str,
|
def describe_signature(self, signode: TextElement, mode: str,
|
||||||
env: "BuildEnvironment", symbol: "Symbol") -> None:
|
env: "BuildEnvironment", symbol: "Symbol") -> None:
|
||||||
verify_description_mode(mode)
|
verify_description_mode(mode)
|
||||||
|
# only use the desc_parameterlist for the outer list, not for inner lists
|
||||||
|
if mode == 'lastIsName':
|
||||||
paramlist = addnodes.desc_parameterlist()
|
paramlist = addnodes.desc_parameterlist()
|
||||||
for arg in self.args:
|
for arg in self.args:
|
||||||
param = addnodes.desc_parameter('', '', noemph=True)
|
param = addnodes.desc_parameter('', '', noemph=True)
|
||||||
if mode == 'lastIsName': # i.e., outer-function params
|
|
||||||
arg.describe_signature(param, 'param', env, symbol=symbol)
|
arg.describe_signature(param, 'param', env, symbol=symbol)
|
||||||
else:
|
|
||||||
arg.describe_signature(param, 'markType', env, symbol=symbol)
|
|
||||||
paramlist += param
|
paramlist += param
|
||||||
signode += paramlist
|
signode += paramlist
|
||||||
|
else:
|
||||||
|
signode += nodes.Text('(', '(')
|
||||||
|
first = True
|
||||||
|
for arg in self.args:
|
||||||
|
if not first:
|
||||||
|
signode += nodes.Text(', ', ', ')
|
||||||
|
first = False
|
||||||
|
arg.describe_signature(signode, 'markType', env, symbol=symbol)
|
||||||
|
signode += nodes.Text(')', ')')
|
||||||
|
|
||||||
def _add_anno(signode: TextElement, text: str) -> None:
|
def _add_anno(signode: TextElement, text: str) -> None:
|
||||||
signode += nodes.Text(' ')
|
signode += nodes.Text(' ')
|
||||||
@ -5309,7 +5319,7 @@ class DefinitionParser(BaseParser):
|
|||||||
# exclusive-or = and ^
|
# exclusive-or = and ^
|
||||||
# and = equality &
|
# and = equality &
|
||||||
# equality = relational ==, !=
|
# equality = relational ==, !=
|
||||||
# relational = shift <, >, <=, >=
|
# relational = shift <, >, <=, >=, <=>
|
||||||
# shift = additive <<, >>
|
# shift = additive <<, >>
|
||||||
# additive = multiplicative +, -
|
# additive = multiplicative +, -
|
||||||
# multiplicative = pm *, /, %
|
# multiplicative = pm *, /, %
|
||||||
@ -7644,10 +7654,11 @@ def setup(app: Sphinx) -> Dict[str, Any]:
|
|||||||
app.add_config_value("cpp_debug_lookup", False, '')
|
app.add_config_value("cpp_debug_lookup", False, '')
|
||||||
app.add_config_value("cpp_debug_show_tree", False, '')
|
app.add_config_value("cpp_debug_show_tree", False, '')
|
||||||
|
|
||||||
def setDebugFlags(app):
|
def initStuff(app):
|
||||||
Symbol.debug_lookup = app.config.cpp_debug_lookup
|
Symbol.debug_lookup = app.config.cpp_debug_lookup
|
||||||
Symbol.debug_show_tree = app.config.cpp_debug_show_tree
|
Symbol.debug_show_tree = app.config.cpp_debug_show_tree
|
||||||
app.connect("builder-inited", setDebugFlags)
|
app.config.cpp_index_common_prefix.sort(reverse=True)
|
||||||
|
app.connect("builder-inited", initStuff)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'version': 'builtin',
|
'version': 'builtin',
|
||||||
|
@ -22,7 +22,7 @@ from docutils.nodes import Element, Node
|
|||||||
from docutils.parsers.rst import directives
|
from docutils.parsers.rst import directives
|
||||||
|
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.addnodes import desc_signature, pending_xref
|
from sphinx.addnodes import desc_signature, pending_xref, pending_xref_condition
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
from sphinx.builders import Builder
|
from sphinx.builders import Builder
|
||||||
from sphinx.deprecation import RemovedInSphinx50Warning
|
from sphinx.deprecation import RemovedInSphinx50Warning
|
||||||
@ -37,7 +37,7 @@ from sphinx.util import logging
|
|||||||
from sphinx.util.docfields import Field, GroupedField, TypedField
|
from sphinx.util.docfields import Field, GroupedField, TypedField
|
||||||
from sphinx.util.docutils import SphinxDirective
|
from sphinx.util.docutils import SphinxDirective
|
||||||
from sphinx.util.inspect import signature_from_str
|
from sphinx.util.inspect import signature_from_str
|
||||||
from sphinx.util.nodes import make_id, make_refnode
|
from sphinx.util.nodes import find_pending_xref_condition, make_id, make_refnode
|
||||||
from sphinx.util.typing import TextlikeNode
|
from sphinx.util.typing import TextlikeNode
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -92,7 +92,17 @@ def type_to_xref(text: str, env: BuildEnvironment = None) -> addnodes.pending_xr
|
|||||||
else:
|
else:
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
|
|
||||||
return pending_xref('', nodes.Text(text),
|
if env.config.python_use_unqualified_type_names:
|
||||||
|
# Note: It would be better to use qualname to describe the object to support support
|
||||||
|
# nested classes. But python domain can't access the real python object because this
|
||||||
|
# module should work not-dynamically.
|
||||||
|
shortname = text.split('.')[-1]
|
||||||
|
contnodes = [pending_xref_condition('', shortname, condition='resolved'),
|
||||||
|
pending_xref_condition('', text, condition='*')] # type: List[Node]
|
||||||
|
else:
|
||||||
|
contnodes = [nodes.Text(text)]
|
||||||
|
|
||||||
|
return pending_xref('', *contnodes,
|
||||||
refdomain='py', reftype=reftype, reftarget=text, **kwargs)
|
refdomain='py', reftype=reftype, reftarget=text, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
@ -1209,7 +1219,15 @@ class PythonDomain(Domain):
|
|||||||
if obj[2] == 'module':
|
if obj[2] == 'module':
|
||||||
return self._make_module_refnode(builder, fromdocname, name, contnode)
|
return self._make_module_refnode(builder, fromdocname, name, contnode)
|
||||||
else:
|
else:
|
||||||
return make_refnode(builder, fromdocname, obj[0], obj[1], contnode, name)
|
# determine the content of the reference by conditions
|
||||||
|
content = find_pending_xref_condition(node, 'resolved')
|
||||||
|
if content:
|
||||||
|
children = content.children
|
||||||
|
else:
|
||||||
|
# if not found, use contnode
|
||||||
|
children = [contnode]
|
||||||
|
|
||||||
|
return make_refnode(builder, fromdocname, obj[0], obj[1], children, name)
|
||||||
|
|
||||||
def resolve_any_xref(self, env: BuildEnvironment, fromdocname: str, builder: Builder,
|
def resolve_any_xref(self, env: BuildEnvironment, fromdocname: str, builder: Builder,
|
||||||
target: str, node: pending_xref, contnode: Element
|
target: str, node: pending_xref, contnode: Element
|
||||||
@ -1226,9 +1244,17 @@ class PythonDomain(Domain):
|
|||||||
self._make_module_refnode(builder, fromdocname,
|
self._make_module_refnode(builder, fromdocname,
|
||||||
name, contnode)))
|
name, contnode)))
|
||||||
else:
|
else:
|
||||||
|
# determine the content of the reference by conditions
|
||||||
|
content = find_pending_xref_condition(node, 'resolved')
|
||||||
|
if content:
|
||||||
|
children = content.children
|
||||||
|
else:
|
||||||
|
# if not found, use contnode
|
||||||
|
children = [contnode]
|
||||||
|
|
||||||
results.append(('py:' + self.role_for_objtype(obj[2]),
|
results.append(('py:' + self.role_for_objtype(obj[2]),
|
||||||
make_refnode(builder, fromdocname, obj[0], obj[1],
|
make_refnode(builder, fromdocname, obj[0], obj[1],
|
||||||
contnode, name)))
|
children, name)))
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def _make_module_refnode(self, builder: Builder, fromdocname: str, name: str,
|
def _make_module_refnode(self, builder: Builder, fromdocname: str, name: str,
|
||||||
@ -1295,6 +1321,7 @@ def setup(app: Sphinx) -> Dict[str, Any]:
|
|||||||
app.setup_extension('sphinx.directives')
|
app.setup_extension('sphinx.directives')
|
||||||
|
|
||||||
app.add_domain(PythonDomain)
|
app.add_domain(PythonDomain)
|
||||||
|
app.add_config_value('python_use_unqualified_type_names', False, 'env')
|
||||||
app.connect('object-description-transform', filter_meta_fields)
|
app.connect('object-description-transform', filter_meta_fields)
|
||||||
app.connect('missing-reference', builtin_resolver, priority=900)
|
app.connect('missing-reference', builtin_resolver, priority=900)
|
||||||
|
|
||||||
|
@ -600,7 +600,7 @@ class BuildEnvironment:
|
|||||||
traversed.add(subdocname)
|
traversed.add(subdocname)
|
||||||
|
|
||||||
relations = {}
|
relations = {}
|
||||||
docnames = traverse_toctree(None, self.config.master_doc)
|
docnames = traverse_toctree(None, self.config.root_doc)
|
||||||
prevdoc = None
|
prevdoc = None
|
||||||
parent, docname = next(docnames)
|
parent, docname = next(docnames)
|
||||||
for nextparent, nextdoc in docnames:
|
for nextparent, nextdoc in docnames:
|
||||||
@ -618,7 +618,7 @@ class BuildEnvironment:
|
|||||||
included = set().union(*self.included.values()) # type: ignore
|
included = set().union(*self.included.values()) # type: ignore
|
||||||
for docname in sorted(self.all_docs):
|
for docname in sorted(self.all_docs):
|
||||||
if docname not in self.files_to_rebuild:
|
if docname not in self.files_to_rebuild:
|
||||||
if docname == self.config.master_doc:
|
if docname == self.config.root_doc:
|
||||||
# the master file is not included anywhere ;)
|
# the master file is not included anywhere ;)
|
||||||
continue
|
continue
|
||||||
if docname in included:
|
if docname in included:
|
||||||
|
@ -315,7 +315,7 @@ class TocTree:
|
|||||||
def get_toctree_for(self, docname: str, builder: "Builder", collapse: bool,
|
def get_toctree_for(self, docname: str, builder: "Builder", collapse: bool,
|
||||||
**kwargs: Any) -> Element:
|
**kwargs: Any) -> Element:
|
||||||
"""Return the global TOC nodetree."""
|
"""Return the global TOC nodetree."""
|
||||||
doctree = self.env.get_doctree(self.env.config.master_doc)
|
doctree = self.env.get_doctree(self.env.config.root_doc)
|
||||||
toctrees = [] # type: List[Element]
|
toctrees = [] # type: List[Element]
|
||||||
if 'includehidden' not in kwargs:
|
if 'includehidden' not in kwargs:
|
||||||
kwargs['includehidden'] = True
|
kwargs['includehidden'] = True
|
||||||
|
@ -50,7 +50,7 @@ class TitleCollector(EnvironmentCollector):
|
|||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
# document has no title
|
# document has no title
|
||||||
titlenode += nodes.Text('<no title>')
|
titlenode += nodes.Text(doctree.get('title', '<no title>'))
|
||||||
app.env.titles[app.env.docname] = titlenode
|
app.env.titles[app.env.docname] = titlenode
|
||||||
app.env.longtitles[app.env.docname] = longtitlenode
|
app.env.longtitles[app.env.docname] = longtitlenode
|
||||||
|
|
||||||
|
@ -281,7 +281,7 @@ class TocTreeCollector(EnvironmentCollector):
|
|||||||
_walk_doctree(docname, doctree, secnum)
|
_walk_doctree(docname, doctree, secnum)
|
||||||
|
|
||||||
if env.config.numfig:
|
if env.config.numfig:
|
||||||
_walk_doc(env.config.master_doc, tuple())
|
_walk_doc(env.config.root_doc, tuple())
|
||||||
for docname, fignums in env.toc_fignumbers.items():
|
for docname, fignums in env.toc_fignumbers.items():
|
||||||
if fignums != old_fignumbers.get(docname):
|
if fignums != old_fignumbers.get(docname):
|
||||||
rewrite_needed.append(docname)
|
rewrite_needed.append(docname)
|
||||||
|
@ -1812,6 +1812,8 @@ class TypeVarMixin(DataDocumenterMixinBase):
|
|||||||
attrs = [repr(self.object.__name__)]
|
attrs = [repr(self.object.__name__)]
|
||||||
for constraint in self.object.__constraints__:
|
for constraint in self.object.__constraints__:
|
||||||
attrs.append(stringify_typehint(constraint))
|
attrs.append(stringify_typehint(constraint))
|
||||||
|
if self.object.__bound__:
|
||||||
|
attrs.append(r"bound=\ " + restify(self.object.__bound__))
|
||||||
if self.object.__covariant__:
|
if self.object.__covariant__:
|
||||||
attrs.append("covariant=True")
|
attrs.append("covariant=True")
|
||||||
if self.object.__contravariant__:
|
if self.object.__contravariant__:
|
||||||
@ -2632,6 +2634,7 @@ def setup(app: Sphinx) -> Dict[str, Any]:
|
|||||||
|
|
||||||
app.connect('config-inited', migrate_autodoc_member_order, priority=800)
|
app.connect('config-inited', migrate_autodoc_member_order, priority=800)
|
||||||
|
|
||||||
|
app.setup_extension('sphinx.ext.autodoc.preserve_defaults')
|
||||||
app.setup_extension('sphinx.ext.autodoc.type_comment')
|
app.setup_extension('sphinx.ext.autodoc.type_comment')
|
||||||
app.setup_extension('sphinx.ext.autodoc.typehints')
|
app.setup_extension('sphinx.ext.autodoc.typehints')
|
||||||
|
|
||||||
|
88
sphinx/ext/autodoc/preserve_defaults.py
Normal file
88
sphinx/ext/autodoc/preserve_defaults.py
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
"""
|
||||||
|
sphinx.ext.autodoc.preserve_defaults
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Preserve the default argument values of function signatures in source code
|
||||||
|
and keep them not evaluated for readability.
|
||||||
|
|
||||||
|
:copyright: Copyright 2007-2021 by the Sphinx team, see AUTHORS.
|
||||||
|
:license: BSD, see LICENSE for details.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import ast
|
||||||
|
import inspect
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from sphinx.application import Sphinx
|
||||||
|
from sphinx.locale import __
|
||||||
|
from sphinx.pycode.ast import parse as ast_parse
|
||||||
|
from sphinx.pycode.ast import unparse as ast_unparse
|
||||||
|
from sphinx.util import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DefaultValue:
|
||||||
|
def __init__(self, name: str) -> None:
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
|
||||||
|
def get_function_def(obj: Any) -> ast.FunctionDef:
|
||||||
|
"""Get FunctionDef object from living object.
|
||||||
|
This tries to parse original code for living object and returns
|
||||||
|
AST node for given *obj*.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
source = inspect.getsource(obj)
|
||||||
|
if source.startswith((' ', r'\t')):
|
||||||
|
# subject is placed inside class or block. To read its docstring,
|
||||||
|
# this adds if-block before the declaration.
|
||||||
|
module = ast_parse('if True:\n' + source)
|
||||||
|
return module.body[0].body[0] # type: ignore
|
||||||
|
else:
|
||||||
|
module = ast_parse(source)
|
||||||
|
return module.body[0] # type: ignore
|
||||||
|
except (OSError, TypeError): # failed to load source code
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def update_defvalue(app: Sphinx, obj: Any, bound_method: bool) -> None:
|
||||||
|
"""Update defvalue info of *obj* using type_comments."""
|
||||||
|
if not app.config.autodoc_preserve_defaults:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
function = get_function_def(obj)
|
||||||
|
if function.args.defaults or function.args.kw_defaults:
|
||||||
|
sig = inspect.signature(obj)
|
||||||
|
defaults = list(function.args.defaults)
|
||||||
|
kw_defaults = list(function.args.kw_defaults)
|
||||||
|
parameters = list(sig.parameters.values())
|
||||||
|
for i, param in enumerate(parameters):
|
||||||
|
if param.default is not param.empty:
|
||||||
|
if param.kind in (param.POSITIONAL_ONLY, param.POSITIONAL_OR_KEYWORD):
|
||||||
|
value = DefaultValue(ast_unparse(defaults.pop(0))) # type: ignore
|
||||||
|
parameters[i] = param.replace(default=value)
|
||||||
|
else:
|
||||||
|
value = DefaultValue(ast_unparse(kw_defaults.pop(0))) # type: ignore
|
||||||
|
parameters[i] = param.replace(default=value)
|
||||||
|
sig = sig.replace(parameters=parameters)
|
||||||
|
obj.__signature__ = sig
|
||||||
|
except (AttributeError, TypeError):
|
||||||
|
# failed to update signature (ex. built-in or extension types)
|
||||||
|
pass
|
||||||
|
except NotImplementedError as exc: # failed to ast.unparse()
|
||||||
|
logger.warning(__("Failed to parse a default argument value for %r: %s"), obj, exc)
|
||||||
|
|
||||||
|
|
||||||
|
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||||
|
app.add_config_value('autodoc_preserve_defaults', False, True)
|
||||||
|
app.connect('autodoc-before-process-signature', update_defvalue)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'version': '1.0',
|
||||||
|
'parallel_read_safe': True
|
||||||
|
}
|
@ -772,7 +772,7 @@ def setup(app: Sphinx) -> Dict[str, Any]:
|
|||||||
app.connect('builder-inited', process_generate_options)
|
app.connect('builder-inited', process_generate_options)
|
||||||
app.add_config_value('autosummary_context', {}, True)
|
app.add_config_value('autosummary_context', {}, True)
|
||||||
app.add_config_value('autosummary_filename_map', {}, 'html')
|
app.add_config_value('autosummary_filename_map', {}, 'html')
|
||||||
app.add_config_value('autosummary_generate', [], True, [bool])
|
app.add_config_value('autosummary_generate', True, True, [bool])
|
||||||
app.add_config_value('autosummary_generate_overwrite', True, False)
|
app.add_config_value('autosummary_generate_overwrite', True, False)
|
||||||
app.add_config_value('autosummary_mock_imports',
|
app.add_config_value('autosummary_mock_imports',
|
||||||
lambda config: config.autodoc_mock_imports, 'env')
|
lambda config: config.autodoc_mock_imports, 'env')
|
||||||
|
@ -37,10 +37,10 @@ class ImagemagickConverter(ImageConverter):
|
|||||||
logger.debug('Invoking %r ...', args)
|
logger.debug('Invoking %r ...', args)
|
||||||
subprocess.run(args, stdout=PIPE, stderr=PIPE, check=True)
|
subprocess.run(args, stdout=PIPE, stderr=PIPE, check=True)
|
||||||
return True
|
return True
|
||||||
except OSError:
|
except OSError as exc:
|
||||||
logger.warning(__('convert command %r cannot be run, '
|
logger.warning(__('convert command %r cannot be run, '
|
||||||
'check the image_converter setting'),
|
'check the image_converter setting: %s'),
|
||||||
self.config.image_converter)
|
self.config.image_converter, exc)
|
||||||
return False
|
return False
|
||||||
except CalledProcessError as exc:
|
except CalledProcessError as exc:
|
||||||
logger.warning(__('convert exited with error:\n'
|
logger.warning(__('convert exited with error:\n'
|
||||||
|
@ -33,10 +33,11 @@ from typing import IO, Any, Dict, List, Tuple
|
|||||||
from urllib.parse import urlsplit, urlunsplit
|
from urllib.parse import urlsplit, urlunsplit
|
||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from docutils.nodes import Element, TextElement
|
from docutils.nodes import TextElement
|
||||||
from docutils.utils import relative_path
|
from docutils.utils import relative_path
|
||||||
|
|
||||||
import sphinx
|
import sphinx
|
||||||
|
from sphinx.addnodes import pending_xref
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
from sphinx.builders.html import INVENTORY_FILENAME
|
from sphinx.builders.html import INVENTORY_FILENAME
|
||||||
from sphinx.config import Config
|
from sphinx.config import Config
|
||||||
@ -44,6 +45,7 @@ from sphinx.environment import BuildEnvironment
|
|||||||
from sphinx.locale import _, __
|
from sphinx.locale import _, __
|
||||||
from sphinx.util import logging, requests
|
from sphinx.util import logging, requests
|
||||||
from sphinx.util.inventory import InventoryFile
|
from sphinx.util.inventory import InventoryFile
|
||||||
|
from sphinx.util.nodes import find_pending_xref_condition
|
||||||
from sphinx.util.typing import Inventory
|
from sphinx.util.typing import Inventory
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -257,8 +259,8 @@ def load_mappings(app: Sphinx) -> None:
|
|||||||
inventories.main_inventory.setdefault(type, {}).update(objects)
|
inventories.main_inventory.setdefault(type, {}).update(objects)
|
||||||
|
|
||||||
|
|
||||||
def missing_reference(app: Sphinx, env: BuildEnvironment, node: Element, contnode: TextElement
|
def missing_reference(app: Sphinx, env: BuildEnvironment, node: pending_xref,
|
||||||
) -> nodes.reference:
|
contnode: TextElement) -> nodes.reference:
|
||||||
"""Attempt to resolve a missing reference via intersphinx references."""
|
"""Attempt to resolve a missing reference via intersphinx references."""
|
||||||
target = node['reftarget']
|
target = node['reftarget']
|
||||||
inventories = InventoryAdapter(env)
|
inventories = InventoryAdapter(env)
|
||||||
@ -284,6 +286,17 @@ def missing_reference(app: Sphinx, env: BuildEnvironment, node: Element, contnod
|
|||||||
if 'py:attribute' in objtypes:
|
if 'py:attribute' in objtypes:
|
||||||
# Since Sphinx-2.1, properties are stored as py:method
|
# Since Sphinx-2.1, properties are stored as py:method
|
||||||
objtypes.append('py:method')
|
objtypes.append('py:method')
|
||||||
|
|
||||||
|
# determine the contnode by pending_xref_condition
|
||||||
|
content = find_pending_xref_condition(node, 'resolved')
|
||||||
|
if content:
|
||||||
|
# resolved condition found.
|
||||||
|
contnodes = content.children
|
||||||
|
contnode = content.children[0] # type: ignore
|
||||||
|
else:
|
||||||
|
# not resolved. Use the given contnode
|
||||||
|
contnodes = [contnode]
|
||||||
|
|
||||||
to_try = [(inventories.main_inventory, target)]
|
to_try = [(inventories.main_inventory, target)]
|
||||||
if domain:
|
if domain:
|
||||||
full_qualified_name = env.get_domain(domain).get_full_qualified_name(node)
|
full_qualified_name = env.get_domain(domain).get_full_qualified_name(node)
|
||||||
@ -316,7 +329,7 @@ def missing_reference(app: Sphinx, env: BuildEnvironment, node: Element, contnod
|
|||||||
newnode = nodes.reference('', '', internal=False, refuri=uri, reftitle=reftitle)
|
newnode = nodes.reference('', '', internal=False, refuri=uri, reftitle=reftitle)
|
||||||
if node.get('refexplicit'):
|
if node.get('refexplicit'):
|
||||||
# use whatever title was given
|
# use whatever title was given
|
||||||
newnode.append(contnode)
|
newnode.extend(contnodes)
|
||||||
elif dispname == '-' or \
|
elif dispname == '-' or \
|
||||||
(domain == 'std' and node['reftype'] == 'keyword'):
|
(domain == 'std' and node['reftype'] == 'keyword'):
|
||||||
# use whatever title was given, but strip prefix
|
# use whatever title was given, but strip prefix
|
||||||
@ -325,7 +338,7 @@ def missing_reference(app: Sphinx, env: BuildEnvironment, node: Element, contnod
|
|||||||
newnode.append(contnode.__class__(title[len(in_set) + 1:],
|
newnode.append(contnode.__class__(title[len(in_set) + 1:],
|
||||||
title[len(in_set) + 1:]))
|
title[len(in_set) + 1:]))
|
||||||
else:
|
else:
|
||||||
newnode.append(contnode)
|
newnode.extend(contnodes)
|
||||||
else:
|
else:
|
||||||
# else use the given display name (used for :ref:)
|
# else use the given display name (used for :ref:)
|
||||||
newnode.append(contnode.__class__(dispname, dispname))
|
newnode.append(contnode.__class__(dispname, dispname))
|
||||||
|
@ -146,7 +146,14 @@ def env_merge_info(app: Sphinx, env: BuildEnvironment, docnames: Iterable[str],
|
|||||||
if not hasattr(env, '_viewcode_modules'):
|
if not hasattr(env, '_viewcode_modules'):
|
||||||
env._viewcode_modules = {} # type: ignore
|
env._viewcode_modules = {} # type: ignore
|
||||||
# now merge in the information from the subprocess
|
# now merge in the information from the subprocess
|
||||||
env._viewcode_modules.update(other._viewcode_modules) # type: ignore
|
for modname, entry in other._viewcode_modules.items(): # type: ignore
|
||||||
|
if modname not in env._viewcode_modules: # type: ignore
|
||||||
|
env._viewcode_modules[modname] = entry # type: ignore
|
||||||
|
else:
|
||||||
|
used = env._viewcode_modules[modname][2] # type: ignore
|
||||||
|
for fullname, docname in entry[2].items():
|
||||||
|
if fullname not in used:
|
||||||
|
used[fullname] = docname
|
||||||
|
|
||||||
|
|
||||||
def env_purge_doc(app: Sphinx, env: BuildEnvironment, docname: str) -> None:
|
def env_purge_doc(app: Sphinx, env: BuildEnvironment, docname: str) -> None:
|
||||||
|
15
sphinx/io.py
15
sphinx/io.py
@ -178,20 +178,6 @@ def read_doc(app: "Sphinx", env: BuildEnvironment, filename: str) -> nodes.docum
|
|||||||
# CommonMarkParser.
|
# CommonMarkParser.
|
||||||
parser.settings_spec = RSTParser.settings_spec
|
parser.settings_spec = RSTParser.settings_spec
|
||||||
|
|
||||||
input_class = app.registry.get_source_input(filetype)
|
|
||||||
if input_class:
|
|
||||||
# Sphinx-1.8 style
|
|
||||||
source = input_class(app, env, source=None, source_path=filename, # type: ignore
|
|
||||||
encoding=env.config.source_encoding)
|
|
||||||
pub = Publisher(reader=reader,
|
|
||||||
parser=parser,
|
|
||||||
writer=SphinxDummyWriter(),
|
|
||||||
source_class=SphinxDummySourceClass, # type: ignore
|
|
||||||
destination=NullOutput())
|
|
||||||
pub.process_programmatic_settings(None, env.settings, None)
|
|
||||||
pub.set_source(source, filename)
|
|
||||||
else:
|
|
||||||
# Sphinx-2.0 style
|
|
||||||
pub = Publisher(reader=reader,
|
pub = Publisher(reader=reader,
|
||||||
parser=parser,
|
parser=parser,
|
||||||
writer=SphinxDummyWriter(),
|
writer=SphinxDummyWriter(),
|
||||||
@ -199,6 +185,5 @@ def read_doc(app: "Sphinx", env: BuildEnvironment, filename: str) -> nodes.docum
|
|||||||
destination=NullOutput())
|
destination=NullOutput())
|
||||||
pub.process_programmatic_settings(None, env.settings, None)
|
pub.process_programmatic_settings(None, env.settings, None)
|
||||||
pub.set_source(source_path=filename)
|
pub.set_source(source_path=filename)
|
||||||
|
|
||||||
pub.publish()
|
pub.publish()
|
||||||
return pub.document
|
return pub.document
|
||||||
|
@ -3301,12 +3301,12 @@ msgstr "περισσότεροι από ένας στόχοι βρέθηκαν
|
|||||||
#: sphinx/transforms/post_transforms/__init__.py:171
|
#: sphinx/transforms/post_transforms/__init__.py:171
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "%s:%s reference target not found: %%(target)s"
|
msgid "%s:%s reference target not found: %%(target)s"
|
||||||
msgstr "Ο %s:%s στόχος αναφοράς δεν βρέθηκε: %% (στόχος)"
|
msgstr "Ο %s:%s στόχος αναφοράς δεν βρέθηκε: %%(target)s"
|
||||||
|
|
||||||
#: sphinx/transforms/post_transforms/__init__.py:174
|
#: sphinx/transforms/post_transforms/__init__.py:174
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "%r reference target not found: %%(target)s"
|
msgid "%r reference target not found: %%(target)s"
|
||||||
msgstr "ο στόχος αναφοράς %r δεν βρέθηκε: %%(στόχος)"
|
msgstr "ο στόχος αναφοράς %r δεν βρέθηκε: %%(target)s"
|
||||||
|
|
||||||
#: sphinx/transforms/post_transforms/images.py:86
|
#: sphinx/transforms/post_transforms/images.py:86
|
||||||
#, python-format
|
#, python-format
|
||||||
|
Binary file not shown.
@ -3306,12 +3306,12 @@ msgstr "más de un objetivo destino encontrado para 'cualquier' referencia cruza
|
|||||||
#: sphinx/transforms/post_transforms/__init__.py:171
|
#: sphinx/transforms/post_transforms/__init__.py:171
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "%s:%s reference target not found: %%(target)s"
|
msgid "%s:%s reference target not found: %%(target)s"
|
||||||
msgstr "%s:%s destino de referencia no encontrada: %% (destino)s"
|
msgstr "%s:%s destino de referencia no encontrada: %%(target)s"
|
||||||
|
|
||||||
#: sphinx/transforms/post_transforms/__init__.py:174
|
#: sphinx/transforms/post_transforms/__init__.py:174
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "%r reference target not found: %%(target)s"
|
msgid "%r reference target not found: %%(target)s"
|
||||||
msgstr "%r destino de referencia no encontrada: %% (destino)s"
|
msgstr "%r destino de referencia no encontrada: %%(target)s"
|
||||||
|
|
||||||
#: sphinx/transforms/post_transforms/images.py:86
|
#: sphinx/transforms/post_transforms/images.py:86
|
||||||
#, python-format
|
#, python-format
|
||||||
|
@ -3305,7 +3305,7 @@ msgstr "%s:%s reference target nije pronađen: %%(target)s"
|
|||||||
#: sphinx/transforms/post_transforms/__init__.py:174
|
#: sphinx/transforms/post_transforms/__init__.py:174
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "%r reference target not found: %%(target)s"
|
msgid "%r reference target not found: %%(target)s"
|
||||||
msgstr "%r referenca target nije pronađena: %% (target)"
|
msgstr "%r referenca target nije pronađena: %%(target)s"
|
||||||
|
|
||||||
#: sphinx/transforms/post_transforms/images.py:86
|
#: sphinx/transforms/post_transforms/images.py:86
|
||||||
#, python-format
|
#, python-format
|
||||||
|
@ -9,6 +9,7 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
import warnings
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
from types import MethodType
|
from types import MethodType
|
||||||
from typing import TYPE_CHECKING, Any, Callable, Dict, Iterator, List, Tuple, Type, Union
|
from typing import TYPE_CHECKING, Any, Callable, Dict, Iterator, List, Tuple, Type, Union
|
||||||
@ -23,6 +24,7 @@ from pkg_resources import iter_entry_points
|
|||||||
|
|
||||||
from sphinx.builders import Builder
|
from sphinx.builders import Builder
|
||||||
from sphinx.config import Config
|
from sphinx.config import Config
|
||||||
|
from sphinx.deprecation import RemovedInSphinx60Warning
|
||||||
from sphinx.domains import Domain, Index, ObjType
|
from sphinx.domains import Domain, Index, ObjType
|
||||||
from sphinx.domains.std import GenericObject, Target
|
from sphinx.domains.std import GenericObject, Target
|
||||||
from sphinx.environment import BuildEnvironment
|
from sphinx.environment import BuildEnvironment
|
||||||
@ -285,6 +287,9 @@ class SphinxComponentRegistry:
|
|||||||
return parser
|
return parser
|
||||||
|
|
||||||
def get_source_input(self, filetype: str) -> "Type[Input]":
|
def get_source_input(self, filetype: str) -> "Type[Input]":
|
||||||
|
warnings.warn('SphinxComponentRegistry.get_source_input() is deprecated.',
|
||||||
|
RemovedInSphinx60Warning)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self.source_inputs[filetype]
|
return self.source_inputs[filetype]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
@ -190,6 +190,6 @@ class BuildDoc(Command):
|
|||||||
if not self.link_index:
|
if not self.link_index:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
src = app.config.master_doc + app.builder.out_suffix # type: ignore
|
src = app.config.root_doc + app.builder.out_suffix # type: ignore
|
||||||
dst = app.builder.get_outfilename('index') # type: ignore
|
dst = app.builder.get_outfilename('index') # type: ignore
|
||||||
os.symlink(src, dst)
|
os.symlink(src, dst)
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
</OBJECT>
|
</OBJECT>
|
||||||
<UL>
|
<UL>
|
||||||
<LI>
|
<LI>
|
||||||
{{ sitemap(short_title, master_doc)|indent(8) }}
|
{{ sitemap(short_title, root_doc)|indent(8) }}
|
||||||
</LI>
|
</LI>
|
||||||
{%- for indexname, indexcls, content, collapse in domain_indices %}
|
{%- for indexname, indexcls, content, collapse in domain_indices %}
|
||||||
<LI>
|
<LI>
|
||||||
|
@ -4,7 +4,7 @@ Binary Index=No
|
|||||||
Compiled file={{ outname }}.chm
|
Compiled file={{ outname }}.chm
|
||||||
Contents file={{ outname }}.hhc
|
Contents file={{ outname }}.hhc
|
||||||
Default Window={{ outname }}
|
Default Window={{ outname }}
|
||||||
Default topic={{ master_doc }}
|
Default topic={{ root_doc }}
|
||||||
Display compile progress=No
|
Display compile progress=No
|
||||||
Full text search stop list file={{ outname }}.stp
|
Full text search stop list file={{ outname }}.stp
|
||||||
Full-text search=Yes
|
Full-text search=Yes
|
||||||
@ -13,7 +13,7 @@ Language={{ "%#x"|format(lcid) }}
|
|||||||
Title={{ title }}
|
Title={{ title }}
|
||||||
|
|
||||||
[WINDOWS]
|
[WINDOWS]
|
||||||
{{ outname }}="{{ title }}","{{ outname }}.hhc","{{ outname }}.hhk","{{ master_doc }}","{{ master_doc }}",,,,,0x63520,220,0x10384e,[0,0,1024,768],,,,,,,0
|
{{ outname }}="{{ title }}","{{ outname }}.hhc","{{ outname }}.hhk","{{ root_doc }}","{{ root_doc }}",,,,,0x63520,220,0x10384e,[0,0,1024,768],,,,,,,0
|
||||||
|
|
||||||
[FILES]
|
[FILES]
|
||||||
{%- for filename in files %}
|
{%- for filename in files %}
|
||||||
|
@ -64,9 +64,9 @@ templates_path = ['{{ dot }}templates']
|
|||||||
source_suffix = {{ suffix | repr }}
|
source_suffix = {{ suffix | repr }}
|
||||||
|
|
||||||
{% endif -%}
|
{% endif -%}
|
||||||
{% if master != 'index' -%}
|
{% if root_doc != 'index' -%}
|
||||||
# The master toctree document.
|
# The root document.
|
||||||
master_doc = {{ master | repr }}
|
root_doc = {{ root_doc | repr }}
|
||||||
|
|
||||||
{% endif -%}
|
{% endif -%}
|
||||||
{% if language -%}
|
{% if language -%}
|
||||||
|
@ -343,8 +343,21 @@
|
|||||||
\fi\fi
|
\fi\fi
|
||||||
}%
|
}%
|
||||||
% auxiliary paragraph dissector to get max and min widths
|
% auxiliary paragraph dissector to get max and min widths
|
||||||
|
% but minwidth must not take into account the last line
|
||||||
\newbox\spx@scratchbox
|
\newbox\spx@scratchbox
|
||||||
\def\spx@verb@getwidths {%
|
\def\spx@verb@getwidths {%
|
||||||
|
\unskip\unpenalty
|
||||||
|
\setbox\spx@scratchbox\lastbox
|
||||||
|
\ifvoid\spx@scratchbox
|
||||||
|
\else
|
||||||
|
\setbox\spx@scratchbox\hbox{\unhbox\spx@scratchbox}%
|
||||||
|
\ifdim\spx@verb@maxwidth<\wd\spx@scratchbox
|
||||||
|
\xdef\spx@verb@maxwidth{\number\wd\spx@scratchbox sp}%
|
||||||
|
\fi
|
||||||
|
\expandafter\spx@verb@getwidths@loop
|
||||||
|
\fi
|
||||||
|
}%
|
||||||
|
\def\spx@verb@getwidths@loop {%
|
||||||
\unskip\unpenalty
|
\unskip\unpenalty
|
||||||
\setbox\spx@scratchbox\lastbox
|
\setbox\spx@scratchbox\lastbox
|
||||||
\ifvoid\spx@scratchbox
|
\ifvoid\spx@scratchbox
|
||||||
@ -356,7 +369,7 @@
|
|||||||
\ifdim\spx@verb@minwidth>\wd\spx@scratchbox
|
\ifdim\spx@verb@minwidth>\wd\spx@scratchbox
|
||||||
\xdef\spx@verb@minwidth{\number\wd\spx@scratchbox sp}%
|
\xdef\spx@verb@minwidth{\number\wd\spx@scratchbox sp}%
|
||||||
\fi
|
\fi
|
||||||
\expandafter\spx@verb@getwidths
|
\expandafter\spx@verb@getwidths@loop
|
||||||
\fi
|
\fi
|
||||||
}%
|
}%
|
||||||
% auxiliary macros to implement "cut long line even in middle of word"
|
% auxiliary macros to implement "cut long line even in middle of word"
|
||||||
|
@ -14,13 +14,13 @@
|
|||||||
<div class="header-wrapper" role="banner">
|
<div class="header-wrapper" role="banner">
|
||||||
<div class="header">
|
<div class="header">
|
||||||
{%- if logo_url %}
|
{%- if logo_url %}
|
||||||
<p class="logo"><a href="{{ pathto(master_doc)|e }}">
|
<p class="logo"><a href="{{ pathto(root_doc)|e }}">
|
||||||
<img class="logo" src="{{ logo_url|e }}" alt="Logo"/>
|
<img class="logo" src="{{ logo_url|e }}" alt="Logo"/>
|
||||||
</a></p>
|
</a></p>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{%- block headertitle %}
|
{%- block headertitle %}
|
||||||
<div class="headertitle"><a
|
<div class="headertitle"><a
|
||||||
href="{{ pathto(master_doc)|e }}">{{ shorttitle|e }}</a></div>
|
href="{{ pathto(root_doc)|e }}">{{ shorttitle|e }}</a></div>
|
||||||
{%- endblock %}
|
{%- endblock %}
|
||||||
<div class="rel" role="navigation" aria-label="related navigation">
|
<div class="rel" role="navigation" aria-label="related navigation">
|
||||||
{%- for rellink in rellinks|reverse %}
|
{%- for rellink in rellinks|reverse %}
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
:copyright: Copyright 2007-2021 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2021 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
#}
|
#}
|
||||||
<h3><a href="{{ pathto(master_doc)|e }}">{{ _('Table of Contents') }}</a></h3>
|
<h3><a href="{{ pathto(root_doc)|e }}">{{ _('Table of Contents') }}</a></h3>
|
||||||
{{ toctree(includehidden=theme_globaltoc_includehidden, collapse=theme_globaltoc_collapse, maxdepth=theme_globaltoc_maxdepth) }}
|
{{ toctree(includehidden=theme_globaltoc_includehidden, collapse=theme_globaltoc_collapse, maxdepth=theme_globaltoc_maxdepth) }}
|
||||||
|
@ -35,7 +35,7 @@
|
|||||||
{%- if not loop.first %}{{ reldelim2 }}{% endif %}</li>
|
{%- if not loop.first %}{{ reldelim2 }}{% endif %}</li>
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
{%- block rootrellink %}
|
{%- block rootrellink %}
|
||||||
<li class="nav-item nav-item-0"><a href="{{ pathto(master_doc)|e }}">{{ shorttitle|e }}</a>{{ reldelim1 }}</li>
|
<li class="nav-item nav-item-0"><a href="{{ pathto(root_doc)|e }}">{{ shorttitle|e }}</a>{{ reldelim1 }}</li>
|
||||||
{%- endblock %}
|
{%- endblock %}
|
||||||
{%- for parent in parents %}
|
{%- for parent in parents %}
|
||||||
<li class="nav-item nav-item-{{ loop.index }}"><a href="{{ parent.link|e }}" {% if loop.last %}{{ accesskey("U") }}{% endif %}>{{ parent.title }}</a>{{ reldelim1 }}</li>
|
<li class="nav-item nav-item-{{ loop.index }}"><a href="{{ parent.link|e }}" {% if loop.last %}{{ accesskey("U") }}{% endif %}>{{ parent.title }}</a>{{ reldelim1 }}</li>
|
||||||
@ -52,7 +52,7 @@
|
|||||||
<div class="sphinxsidebarwrapper">
|
<div class="sphinxsidebarwrapper">
|
||||||
{%- block sidebarlogo %}
|
{%- block sidebarlogo %}
|
||||||
{%- if logo_url %}
|
{%- if logo_url %}
|
||||||
<p class="logo"><a href="{{ pathto(master_doc)|e }}">
|
<p class="logo"><a href="{{ pathto(root_doc)|e }}">
|
||||||
<img class="logo" src="{{ logo_url|e }}" alt="Logo"/>
|
<img class="logo" src="{{ logo_url|e }}" alt="Logo"/>
|
||||||
</a></p>
|
</a></p>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
@ -8,6 +8,6 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
#}
|
#}
|
||||||
{%- if display_toc %}
|
{%- if display_toc %}
|
||||||
<h3><a href="{{ pathto(master_doc)|e }}">{{ _('Table of Contents') }}</a></h3>
|
<h3><a href="{{ pathto(root_doc)|e }}">{{ _('Table of Contents') }}</a></h3>
|
||||||
{{ toc }}
|
{{ toc }}
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
@ -21,7 +21,7 @@
|
|||||||
«  <a href="{{ prev.link|e }}">{{ prev.title }}</a>
|
«  <a href="{{ prev.link|e }}">{{ prev.title }}</a>
|
||||||
  ::  
|
  ::  
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
<a class="uplink" href="{{ pathto(master_doc)|e }}">{{ _('Contents') }}</a>
|
<a class="uplink" href="{{ pathto(root_doc)|e }}">{{ _('Contents') }}</a>
|
||||||
{%- if next %}
|
{%- if next %}
|
||||||
  ::  
|
  ::  
|
||||||
<a href="{{ next.link|e }}">{{ next.title }}</a>  »
|
<a href="{{ next.link|e }}">{{ next.title }}</a>  »
|
||||||
|
@ -12,7 +12,7 @@
|
|||||||
{%- if logo %}
|
{%- if logo %}
|
||||||
<div class="header" role="banner">
|
<div class="header" role="banner">
|
||||||
<div class="logo">
|
<div class="logo">
|
||||||
<a href="{{ pathto(master_doc)|e }}">
|
<a href="{{ pathto(root_doc)|e }}">
|
||||||
<img class="logo" src="{{ pathto(logo, 1)|e }}" alt="Logo"/>
|
<img class="logo" src="{{ pathto(logo, 1)|e }}" alt="Logo"/>
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
|
@ -178,8 +178,6 @@ class HTMLThemeFactory:
|
|||||||
"""Try to load a theme having specifed name."""
|
"""Try to load a theme having specifed name."""
|
||||||
if name == 'alabaster':
|
if name == 'alabaster':
|
||||||
self.load_alabaster_theme()
|
self.load_alabaster_theme()
|
||||||
elif name == 'sphinx_rtd_theme':
|
|
||||||
self.load_sphinx_rtd_theme()
|
|
||||||
else:
|
else:
|
||||||
self.load_external_theme(name)
|
self.load_external_theme(name)
|
||||||
|
|
||||||
@ -237,13 +235,13 @@ class HTMLThemeFactory:
|
|||||||
if name not in self.themes:
|
if name not in self.themes:
|
||||||
self.load_extra_theme(name)
|
self.load_extra_theme(name)
|
||||||
|
|
||||||
|
if name not in self.themes and name == 'sphinx_rtd_theme':
|
||||||
|
# sphinx_rtd_theme (< 0.2.5) # RemovedInSphinx60Warning
|
||||||
|
logger.warning(__('sphinx_rtd_theme (< 0.3.0) found. '
|
||||||
|
'It will not be available since Sphinx-6.0'))
|
||||||
|
self.load_sphinx_rtd_theme()
|
||||||
|
|
||||||
if name not in self.themes:
|
if name not in self.themes:
|
||||||
if name == 'sphinx_rtd_theme':
|
raise ThemeError(__('no theme named %r found (missing theme.conf?)') % name)
|
||||||
raise ThemeError(__('sphinx_rtd_theme is no longer a hard dependency '
|
|
||||||
'since version 1.4.0. Please install it manually.'
|
|
||||||
'(pip install sphinx_rtd_theme)'))
|
|
||||||
else:
|
|
||||||
raise ThemeError(__('no theme named %r found '
|
|
||||||
'(missing theme.conf?)') % name)
|
|
||||||
|
|
||||||
return Theme(name, self.themes[name], factory=self)
|
return Theme(name, self.themes[name], factory=self)
|
||||||
|
@ -22,10 +22,14 @@ from sphinx.locale import __
|
|||||||
from sphinx.transforms import SphinxTransform
|
from sphinx.transforms import SphinxTransform
|
||||||
from sphinx.util import logging
|
from sphinx.util import logging
|
||||||
from sphinx.util.docutils import SphinxTranslator
|
from sphinx.util.docutils import SphinxTranslator
|
||||||
from sphinx.util.nodes import process_only_nodes
|
from sphinx.util.nodes import find_pending_xref_condition, process_only_nodes
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
if False:
|
||||||
|
# For type annotation
|
||||||
|
from docutils.nodes import Node
|
||||||
|
|
||||||
|
|
||||||
class SphinxPostTransform(SphinxTransform):
|
class SphinxPostTransform(SphinxTransform):
|
||||||
"""A base class of post-transforms.
|
"""A base class of post-transforms.
|
||||||
@ -97,8 +101,21 @@ class ReferencesResolver(SphinxPostTransform):
|
|||||||
if newnode is None:
|
if newnode is None:
|
||||||
self.warn_missing_reference(refdoc, typ, target, node, domain)
|
self.warn_missing_reference(refdoc, typ, target, node, domain)
|
||||||
except NoUri:
|
except NoUri:
|
||||||
newnode = contnode
|
newnode = None
|
||||||
node.replace_self(newnode or contnode)
|
|
||||||
|
if newnode:
|
||||||
|
newnodes = [newnode] # type: List[Node]
|
||||||
|
else:
|
||||||
|
newnodes = [contnode]
|
||||||
|
if newnode is None and isinstance(node[0], addnodes.pending_xref_condition):
|
||||||
|
matched = find_pending_xref_condition(node, "*")
|
||||||
|
if matched:
|
||||||
|
newnodes = matched.children
|
||||||
|
else:
|
||||||
|
logger.warning(__('Could not determine the fallback text for the '
|
||||||
|
'cross-reference. Might be a bug.'), location=node)
|
||||||
|
|
||||||
|
node.replace_self(newnodes)
|
||||||
|
|
||||||
def resolve_anyref(self, refdoc: str, node: pending_xref, contnode: Element) -> Element:
|
def resolve_anyref(self, refdoc: str, node: pending_xref, contnode: Element) -> Element:
|
||||||
"""Resolve reference generated by the "any" role."""
|
"""Resolve reference generated by the "any" role."""
|
||||||
@ -168,14 +185,13 @@ class ReferencesResolver(SphinxPostTransform):
|
|||||||
if self.app.emit_firstresult('warn-missing-reference', domain, node):
|
if self.app.emit_firstresult('warn-missing-reference', domain, node):
|
||||||
return
|
return
|
||||||
elif domain and typ in domain.dangling_warnings:
|
elif domain and typ in domain.dangling_warnings:
|
||||||
msg = domain.dangling_warnings[typ]
|
msg = domain.dangling_warnings[typ] % {'target': target}
|
||||||
elif node.get('refdomain', 'std') not in ('', 'std'):
|
elif node.get('refdomain', 'std') not in ('', 'std'):
|
||||||
msg = (__('%s:%s reference target not found: %%(target)s') %
|
msg = (__('%s:%s reference target not found: %s') %
|
||||||
(node['refdomain'], typ))
|
(node['refdomain'], typ, target))
|
||||||
else:
|
else:
|
||||||
msg = __('%r reference target not found: %%(target)s') % typ
|
msg = __('%r reference target not found: %s') % (typ, target)
|
||||||
logger.warning(msg % {'target': target},
|
logger.warning(msg, location=node, type='ref', subtype=typ)
|
||||||
location=node, type='ref', subtype=typ)
|
|
||||||
|
|
||||||
|
|
||||||
class OnlyNodeTransform(SphinxPostTransform):
|
class OnlyNodeTransform(SphinxPostTransform):
|
||||||
|
@ -47,8 +47,8 @@ if TYPE_CHECKING:
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Generally useful regular expressions.
|
# Generally useful regular expressions.
|
||||||
ws_re = re.compile(r'\s+') # type: Pattern
|
ws_re: Pattern = re.compile(r'\s+')
|
||||||
url_re = re.compile(r'(?P<schema>.+)://.*') # type: Pattern
|
url_re: Pattern = re.compile(r'(?P<schema>.+)://.*')
|
||||||
|
|
||||||
|
|
||||||
# High-level utility functions.
|
# High-level utility functions.
|
||||||
@ -107,7 +107,7 @@ class FilenameUniqDict(dict):
|
|||||||
appear in. Used for images and downloadable files in the environment.
|
appear in. Used for images and downloadable files in the environment.
|
||||||
"""
|
"""
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self._existing = set() # type: Set[str]
|
self._existing: Set[str] = set()
|
||||||
|
|
||||||
def add_file(self, docname: str, newfile: str) -> str:
|
def add_file(self, docname: str, newfile: str) -> str:
|
||||||
if newfile in self:
|
if newfile in self:
|
||||||
@ -379,7 +379,7 @@ def format_exception_cut_frames(x: int = 1) -> str:
|
|||||||
"""Format an exception with traceback, but only the last x frames."""
|
"""Format an exception with traceback, but only the last x frames."""
|
||||||
typ, val, tb = sys.exc_info()
|
typ, val, tb = sys.exc_info()
|
||||||
# res = ['Traceback (most recent call last):\n']
|
# res = ['Traceback (most recent call last):\n']
|
||||||
res = [] # type: List[str]
|
res: List[str] = []
|
||||||
tbres = traceback.format_tb(tb)
|
tbres = traceback.format_tb(tb)
|
||||||
res += tbres[-x:]
|
res += tbres[-x:]
|
||||||
res += traceback.format_exception_only(typ, val)
|
res += traceback.format_exception_only(typ, val)
|
||||||
|
@ -98,7 +98,7 @@ class ASTBaseBase:
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
__hash__ = None # type: Callable[[], int]
|
__hash__: Callable[[], int] = None
|
||||||
|
|
||||||
def clone(self) -> Any:
|
def clone(self) -> Any:
|
||||||
return deepcopy(self)
|
return deepcopy(self)
|
||||||
@ -223,9 +223,9 @@ class BaseParser:
|
|||||||
|
|
||||||
self.pos = 0
|
self.pos = 0
|
||||||
self.end = len(self.definition)
|
self.end = len(self.definition)
|
||||||
self.last_match = None # type: Match
|
self.last_match: Match = None
|
||||||
self._previous_state = (0, None) # type: Tuple[int, Match]
|
self._previous_state: Tuple[int, Match] = (0, None)
|
||||||
self.otherErrors = [] # type: List[DefinitionError]
|
self.otherErrors: List[DefinitionError] = []
|
||||||
|
|
||||||
# in our tests the following is set to False to capture bad parsing
|
# in our tests the following is set to False to capture bad parsing
|
||||||
self.allowFallbackExpressionParsing = True
|
self.allowFallbackExpressionParsing = True
|
||||||
@ -356,7 +356,7 @@ class BaseParser:
|
|||||||
# TODO: add handling of string literals and similar
|
# TODO: add handling of string literals and similar
|
||||||
brackets = {'(': ')', '[': ']', '{': '}'}
|
brackets = {'(': ')', '[': ']', '{': '}'}
|
||||||
startPos = self.pos
|
startPos = self.pos
|
||||||
symbols = [] # type: List[str]
|
symbols: List[str] = []
|
||||||
while not self.eof:
|
while not self.eof:
|
||||||
if len(symbols) == 0 and self.current_char in end:
|
if len(symbols) == 0 and self.current_char in end:
|
||||||
break
|
break
|
||||||
|
@ -11,7 +11,7 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import Dict
|
from typing import Dict, Pattern
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# check if colorama is installed to support color on Windows
|
# check if colorama is installed to support color on Windows
|
||||||
@ -20,8 +20,8 @@ except ImportError:
|
|||||||
colorama = None
|
colorama = None
|
||||||
|
|
||||||
|
|
||||||
_ansi_re = re.compile('\x1b\\[(\\d\\d;){0,2}\\d\\dm')
|
_ansi_re: Pattern = re.compile('\x1b\\[(\\d\\d;){0,2}\\d\\dm')
|
||||||
codes = {} # type: Dict[str, str]
|
codes: Dict[str, str] = {}
|
||||||
|
|
||||||
|
|
||||||
def terminal_safe(s: str) -> str:
|
def terminal_safe(s: str) -> str:
|
||||||
@ -44,7 +44,7 @@ def get_terminal_width() -> int:
|
|||||||
return terminal_width
|
return terminal_width
|
||||||
|
|
||||||
|
|
||||||
_tw = get_terminal_width()
|
_tw: int = get_terminal_width()
|
||||||
|
|
||||||
|
|
||||||
def term_width_line(text: str) -> str:
|
def term_width_line(text: str) -> str:
|
||||||
|
@ -27,7 +27,7 @@ def _is_single_paragraph(node: nodes.field_body) -> bool:
|
|||||||
if len(node) == 0:
|
if len(node) == 0:
|
||||||
return False
|
return False
|
||||||
elif len(node) > 1:
|
elif len(node) > 1:
|
||||||
for subnode in node[1:]: # type: nodes.Node
|
for subnode in node[1:]: # type: Node
|
||||||
if not isinstance(subnode, nodes.system_message):
|
if not isinstance(subnode, nodes.system_message):
|
||||||
return False
|
return False
|
||||||
if isinstance(node[0], nodes.paragraph):
|
if isinstance(node[0], nodes.paragraph):
|
||||||
@ -195,7 +195,7 @@ class TypedField(GroupedField):
|
|||||||
fieldname = nodes.field_name('', self.label)
|
fieldname = nodes.field_name('', self.label)
|
||||||
if len(items) == 1 and self.can_collapse:
|
if len(items) == 1 and self.can_collapse:
|
||||||
fieldarg, content = items[0]
|
fieldarg, content = items[0]
|
||||||
bodynode = handle_item(fieldarg, content) # type: nodes.Node
|
bodynode: Node = handle_item(fieldarg, content)
|
||||||
else:
|
else:
|
||||||
bodynode = self.list_type()
|
bodynode = self.list_type()
|
||||||
for fieldarg, content in items:
|
for fieldarg, content in items:
|
||||||
@ -209,7 +209,7 @@ class DocFieldTransformer:
|
|||||||
Transforms field lists in "doc field" syntax into better-looking
|
Transforms field lists in "doc field" syntax into better-looking
|
||||||
equivalents, using the field type definitions given on a domain.
|
equivalents, using the field type definitions given on a domain.
|
||||||
"""
|
"""
|
||||||
typemap = None # type: Dict[str, Tuple[Field, bool]]
|
typemap: Dict[str, Tuple[Field, bool]] = None
|
||||||
|
|
||||||
def __init__(self, directive: "ObjectDescription") -> None:
|
def __init__(self, directive: "ObjectDescription") -> None:
|
||||||
self.directive = directive
|
self.directive = directive
|
||||||
@ -227,9 +227,9 @@ class DocFieldTransformer:
|
|||||||
"""Transform a single field list *node*."""
|
"""Transform a single field list *node*."""
|
||||||
typemap = self.typemap
|
typemap = self.typemap
|
||||||
|
|
||||||
entries = [] # type: List[Union[nodes.field, Tuple[Field, Any]]]
|
entries: List[Union[nodes.field, Tuple[Field, Any]]] = []
|
||||||
groupindices = {} # type: Dict[str, int]
|
groupindices: Dict[str, int] = {}
|
||||||
types = {} # type: Dict[str, Dict]
|
types: Dict[str, Dict] = {}
|
||||||
|
|
||||||
# step 1: traverse all fields and collect field types and content
|
# step 1: traverse all fields and collect field types and content
|
||||||
for field in cast(List[nodes.field], node):
|
for field in cast(List[nodes.field], node):
|
||||||
|
@ -23,7 +23,7 @@ field_list_item_re = re.compile(Body.patterns['field_marker'])
|
|||||||
def extract_metadata(s: str) -> Dict[str, str]:
|
def extract_metadata(s: str) -> Dict[str, str]:
|
||||||
"""Extract metadata from docstring."""
|
"""Extract metadata from docstring."""
|
||||||
in_other_element = False
|
in_other_element = False
|
||||||
metadata = {} # type: Dict[str, str]
|
metadata: Dict[str, str] = {}
|
||||||
|
|
||||||
if not s:
|
if not s:
|
||||||
return metadata
|
return metadata
|
||||||
|
@ -42,7 +42,7 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
|
|
||||||
__version_info__ = tuple(LooseVersion(docutils.__version__).version)
|
__version_info__ = tuple(LooseVersion(docutils.__version__).version)
|
||||||
additional_nodes = set() # type: Set[Type[nodes.Element]]
|
additional_nodes: Set[Type[Element]] = set()
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
@ -176,8 +176,8 @@ class sphinx_domains:
|
|||||||
"""
|
"""
|
||||||
def __init__(self, env: "BuildEnvironment") -> None:
|
def __init__(self, env: "BuildEnvironment") -> None:
|
||||||
self.env = env
|
self.env = env
|
||||||
self.directive_func = None # type: Callable
|
self.directive_func: Callable = None
|
||||||
self.roles_func = None # type: Callable
|
self.roles_func: Callable = None
|
||||||
|
|
||||||
def __enter__(self) -> None:
|
def __enter__(self) -> None:
|
||||||
self.enable()
|
self.enable()
|
||||||
@ -491,7 +491,7 @@ class SphinxTranslator(nodes.NodeVisitor):
|
|||||||
|
|
||||||
# cache a vanilla instance of nodes.document
|
# cache a vanilla instance of nodes.document
|
||||||
# Used in new_document() function
|
# Used in new_document() function
|
||||||
__document_cache__ = None # type: nodes.document
|
__document_cache__: nodes.document = None
|
||||||
|
|
||||||
|
|
||||||
def new_document(source_path: str, settings: Any = None) -> nodes.document:
|
def new_document(source_path: str, settings: Any = None) -> nodes.document:
|
||||||
|
@ -166,6 +166,15 @@ def getannotations(obj: Any) -> Mapping[str, Any]:
|
|||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def getglobals(obj: Any) -> Mapping[str, Any]:
|
||||||
|
"""Get __globals__ from given *obj* safely."""
|
||||||
|
__globals__ = safe_getattr(obj, '__globals__', None)
|
||||||
|
if isinstance(__globals__, Mapping):
|
||||||
|
return __globals__
|
||||||
|
else:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
def getmro(obj: Any) -> Tuple["Type", ...]:
|
def getmro(obj: Any) -> Tuple["Type", ...]:
|
||||||
"""Get __mro__ from given *obj* safely."""
|
"""Get __mro__ from given *obj* safely."""
|
||||||
__mro__ = safe_getattr(obj, '__mro__', None)
|
__mro__ = safe_getattr(obj, '__mro__', None)
|
||||||
@ -484,9 +493,9 @@ class DefaultValue:
|
|||||||
|
|
||||||
def _should_unwrap(subject: Callable) -> bool:
|
def _should_unwrap(subject: Callable) -> bool:
|
||||||
"""Check the function should be unwrapped on getting signature."""
|
"""Check the function should be unwrapped on getting signature."""
|
||||||
if (safe_getattr(subject, '__globals__', None) and
|
__globals__ = getglobals(subject)
|
||||||
subject.__globals__.get('__name__') == 'contextlib' and # type: ignore
|
if (__globals__.get('__name__') == 'contextlib' and
|
||||||
subject.__globals__.get('__file__') == contextlib.__file__): # type: ignore
|
__globals__.get('__file__') == contextlib.__file__):
|
||||||
# contextmanger should be unwrapped
|
# contextmanger should be unwrapped
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -93,7 +93,7 @@ class InventoryFile:
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load_v1(cls, stream: InventoryFileReader, uri: str, join: Callable) -> Inventory:
|
def load_v1(cls, stream: InventoryFileReader, uri: str, join: Callable) -> Inventory:
|
||||||
invdata = {} # type: Inventory
|
invdata: Inventory = {}
|
||||||
projname = stream.readline().rstrip()[11:]
|
projname = stream.readline().rstrip()[11:]
|
||||||
version = stream.readline().rstrip()[11:]
|
version = stream.readline().rstrip()[11:]
|
||||||
for line in stream.readlines():
|
for line in stream.readlines():
|
||||||
@ -111,7 +111,7 @@ class InventoryFile:
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load_v2(cls, stream: InventoryFileReader, uri: str, join: Callable) -> Inventory:
|
def load_v2(cls, stream: InventoryFileReader, uri: str, join: Callable) -> Inventory:
|
||||||
invdata = {} # type: Inventory
|
invdata: Inventory = {}
|
||||||
projname = stream.readline().rstrip()[11:]
|
projname = stream.readline().rstrip()[11:]
|
||||||
version = stream.readline().rstrip()[11:]
|
version = stream.readline().rstrip()[11:]
|
||||||
line = stream.readline()
|
line = stream.readline()
|
||||||
|
@ -109,8 +109,8 @@ def loads(x: str) -> Any:
|
|||||||
nothing = object()
|
nothing = object()
|
||||||
i = 0
|
i = 0
|
||||||
n = len(x)
|
n = len(x)
|
||||||
stack = [] # type: List[Union[List, Dict]]
|
stack: List[Union[List, Dict]] = []
|
||||||
obj = nothing # type: Any
|
obj: Any = nothing
|
||||||
key = False
|
key = False
|
||||||
keys = []
|
keys = []
|
||||||
while i < n:
|
while i < n:
|
||||||
@ -160,7 +160,7 @@ def loads(x: str) -> Any:
|
|||||||
raise ValueError("multiple values")
|
raise ValueError("multiple values")
|
||||||
key = False
|
key = False
|
||||||
else:
|
else:
|
||||||
y = None # type: Any
|
y: Any = None
|
||||||
m = _str_re.match(x, i)
|
m = _str_re.match(x, i)
|
||||||
if m:
|
if m:
|
||||||
y = decode_string(m.group()[1:-1])
|
y = decode_string(m.group()[1:-1])
|
||||||
|
@ -28,7 +28,7 @@ if TYPE_CHECKING:
|
|||||||
NAMESPACE = 'sphinx'
|
NAMESPACE = 'sphinx'
|
||||||
VERBOSE = 15
|
VERBOSE = 15
|
||||||
|
|
||||||
LEVEL_NAMES = defaultdict(lambda: logging.WARNING) # type: Dict[str, int]
|
LEVEL_NAMES: Dict[str, int] = defaultdict(lambda: logging.WARNING)
|
||||||
LEVEL_NAMES.update({
|
LEVEL_NAMES.update({
|
||||||
'CRITICAL': logging.CRITICAL,
|
'CRITICAL': logging.CRITICAL,
|
||||||
'SEVERE': logging.CRITICAL,
|
'SEVERE': logging.CRITICAL,
|
||||||
@ -39,7 +39,7 @@ LEVEL_NAMES.update({
|
|||||||
'DEBUG': logging.DEBUG,
|
'DEBUG': logging.DEBUG,
|
||||||
})
|
})
|
||||||
|
|
||||||
VERBOSITY_MAP = defaultdict(lambda: 0) # type: Dict[int, int]
|
VERBOSITY_MAP: Dict[int, int] = defaultdict(lambda: 0)
|
||||||
VERBOSITY_MAP.update({
|
VERBOSITY_MAP.update({
|
||||||
0: logging.INFO,
|
0: logging.INFO,
|
||||||
1: VERBOSE,
|
1: VERBOSE,
|
||||||
@ -91,7 +91,7 @@ def convert_serializable(records: List[logging.LogRecord]) -> None:
|
|||||||
class SphinxLogRecord(logging.LogRecord):
|
class SphinxLogRecord(logging.LogRecord):
|
||||||
"""Log record class supporting location"""
|
"""Log record class supporting location"""
|
||||||
prefix = ''
|
prefix = ''
|
||||||
location = None # type: Any
|
location: Any = None
|
||||||
|
|
||||||
def getMessage(self) -> str:
|
def getMessage(self) -> str:
|
||||||
message = super().getMessage()
|
message = super().getMessage()
|
||||||
@ -163,6 +163,8 @@ class NewLineStreamHandler(logging.StreamHandler):
|
|||||||
class MemoryHandler(logging.handlers.BufferingHandler):
|
class MemoryHandler(logging.handlers.BufferingHandler):
|
||||||
"""Handler buffering all logs."""
|
"""Handler buffering all logs."""
|
||||||
|
|
||||||
|
buffer: List[logging.LogRecord]
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
super().__init__(-1)
|
super().__init__(-1)
|
||||||
|
|
||||||
@ -174,7 +176,7 @@ class MemoryHandler(logging.handlers.BufferingHandler):
|
|||||||
try:
|
try:
|
||||||
for record in self.buffer:
|
for record in self.buffer:
|
||||||
logger.handle(record)
|
logger.handle(record)
|
||||||
self.buffer = [] # type: List[logging.LogRecord]
|
self.buffer = []
|
||||||
finally:
|
finally:
|
||||||
self.release()
|
self.release()
|
||||||
|
|
||||||
@ -328,7 +330,7 @@ def prefixed_warnings(prefix: str) -> Generator[None, None, None]:
|
|||||||
|
|
||||||
class LogCollector:
|
class LogCollector:
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self.logs = [] # type: List[logging.LogRecord]
|
self.logs: List[logging.LogRecord] = []
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def collect(self) -> Generator[None, None, None]:
|
def collect(self) -> Generator[None, None, None]:
|
||||||
@ -449,7 +451,7 @@ class OnceFilter(logging.Filter):
|
|||||||
|
|
||||||
def __init__(self, name: str = '') -> None:
|
def __init__(self, name: str = '') -> None:
|
||||||
super().__init__(name)
|
super().__init__(name)
|
||||||
self.messages = {} # type: Dict[str, List]
|
self.messages: Dict[str, List] = {}
|
||||||
|
|
||||||
def filter(self, record: logging.LogRecord) -> bool:
|
def filter(self, record: logging.LogRecord) -> bool:
|
||||||
once = getattr(record, 'once', '')
|
once = getattr(record, 'once', '')
|
||||||
@ -470,7 +472,7 @@ class SphinxLogRecordTranslator(logging.Filter):
|
|||||||
* Make a instance of SphinxLogRecord
|
* Make a instance of SphinxLogRecord
|
||||||
* docname to path if location given
|
* docname to path if location given
|
||||||
"""
|
"""
|
||||||
LogRecordClass = None # type: Type[logging.LogRecord]
|
LogRecordClass: Type[logging.LogRecord] = None
|
||||||
|
|
||||||
def __init__(self, app: "Sphinx") -> None:
|
def __init__(self, app: "Sphinx") -> None:
|
||||||
self.app = app
|
self.app = app
|
||||||
|
@ -21,7 +21,7 @@ def _translate_pattern(pat: str) -> str:
|
|||||||
match slashes.
|
match slashes.
|
||||||
"""
|
"""
|
||||||
i, n = 0, len(pat)
|
i, n = 0, len(pat)
|
||||||
res = '' # type: str
|
res = ''
|
||||||
while i < n:
|
while i < n:
|
||||||
c = pat[i]
|
c = pat[i]
|
||||||
i += 1
|
i += 1
|
||||||
@ -86,7 +86,7 @@ class Matcher:
|
|||||||
DOTFILES = Matcher(['**/.*'])
|
DOTFILES = Matcher(['**/.*'])
|
||||||
|
|
||||||
|
|
||||||
_pat_cache = {} # type: Dict[str, Pattern]
|
_pat_cache: Dict[str, Pattern] = {}
|
||||||
|
|
||||||
|
|
||||||
def patmatch(name: str, pat: str) -> Optional[Match[str]]:
|
def patmatch(name: str, pat: str) -> Optional[Match[str]]:
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
import unicodedata
|
import unicodedata
|
||||||
from typing import TYPE_CHECKING, Any, Callable, Iterable, List, Set, Tuple, Type, cast
|
from typing import TYPE_CHECKING, Any, Callable, Iterable, List, Set, Tuple, Type, Union, cast
|
||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from docutils.nodes import Element, Node
|
from docutils.nodes import Element, Node
|
||||||
@ -251,7 +251,7 @@ META_TYPE_NODES = (
|
|||||||
|
|
||||||
def extract_messages(doctree: Element) -> Iterable[Tuple[Element, str]]:
|
def extract_messages(doctree: Element) -> Iterable[Tuple[Element, str]]:
|
||||||
"""Extract translatable messages from a document tree."""
|
"""Extract translatable messages from a document tree."""
|
||||||
for node in doctree.traverse(is_translatable): # type: nodes.Element
|
for node in doctree.traverse(is_translatable): # type: Element
|
||||||
if isinstance(node, addnodes.translatable):
|
if isinstance(node, addnodes.translatable):
|
||||||
for msg in node.extract_original_messages():
|
for msg in node.extract_original_messages():
|
||||||
yield node, msg
|
yield node, msg
|
||||||
@ -363,7 +363,7 @@ indextypes = [
|
|||||||
def process_index_entry(entry: str, targetid: str) -> List[Tuple[str, str, str, str, str]]:
|
def process_index_entry(entry: str, targetid: str) -> List[Tuple[str, str, str, str, str]]:
|
||||||
from sphinx.domains.python import pairindextypes
|
from sphinx.domains.python import pairindextypes
|
||||||
|
|
||||||
indexentries = [] # type: List[Tuple[str, str, str, str, str]]
|
indexentries: List[Tuple[str, str, str, str, str]] = []
|
||||||
entry = entry.strip()
|
entry = entry.strip()
|
||||||
oentry = entry
|
oentry = entry
|
||||||
main = ''
|
main = ''
|
||||||
@ -531,8 +531,18 @@ def make_id(env: "BuildEnvironment", document: nodes.document,
|
|||||||
return node_id
|
return node_id
|
||||||
|
|
||||||
|
|
||||||
|
def find_pending_xref_condition(node: addnodes.pending_xref, condition: str) -> Element:
|
||||||
|
"""Pick matched pending_xref_condition node up from the pending_xref."""
|
||||||
|
for subnode in node:
|
||||||
|
if (isinstance(subnode, addnodes.pending_xref_condition) and
|
||||||
|
subnode.get('condition') == condition):
|
||||||
|
return subnode
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def make_refnode(builder: "Builder", fromdocname: str, todocname: str, targetid: str,
|
def make_refnode(builder: "Builder", fromdocname: str, todocname: str, targetid: str,
|
||||||
child: Node, title: str = None) -> nodes.reference:
|
child: Union[Node, List[Node]], title: str = None) -> nodes.reference:
|
||||||
"""Shortcut to create a reference node."""
|
"""Shortcut to create a reference node."""
|
||||||
node = nodes.reference('', '', internal=True)
|
node = nodes.reference('', '', internal=True)
|
||||||
if fromdocname == todocname and targetid:
|
if fromdocname == todocname and targetid:
|
||||||
@ -545,7 +555,7 @@ def make_refnode(builder: "Builder", fromdocname: str, todocname: str, targetid:
|
|||||||
node['refuri'] = builder.get_relative_uri(fromdocname, todocname)
|
node['refuri'] = builder.get_relative_uri(fromdocname, todocname)
|
||||||
if title:
|
if title:
|
||||||
node['reftitle'] = title
|
node['reftitle'] = title
|
||||||
node.append(child)
|
node += child
|
||||||
return node
|
return node
|
||||||
|
|
||||||
|
|
||||||
|
@ -185,7 +185,7 @@ class FileAvoidWrite:
|
|||||||
"""
|
"""
|
||||||
def __init__(self, path: str) -> None:
|
def __init__(self, path: str) -> None:
|
||||||
self._path = path
|
self._path = path
|
||||||
self._io = None # type: Optional[StringIO]
|
self._io: Optional[StringIO] = None
|
||||||
|
|
||||||
def write(self, data: str) -> None:
|
def write(self, data: str) -> None:
|
||||||
if not self._io:
|
if not self._io:
|
||||||
|
@ -60,15 +60,15 @@ class ParallelTasks:
|
|||||||
def __init__(self, nproc: int) -> None:
|
def __init__(self, nproc: int) -> None:
|
||||||
self.nproc = nproc
|
self.nproc = nproc
|
||||||
# (optional) function performed by each task on the result of main task
|
# (optional) function performed by each task on the result of main task
|
||||||
self._result_funcs = {} # type: Dict[int, Callable]
|
self._result_funcs: Dict[int, Callable] = {}
|
||||||
# task arguments
|
# task arguments
|
||||||
self._args = {} # type: Dict[int, List[Any]]
|
self._args: Dict[int, List[Any]] = {}
|
||||||
# list of subprocesses (both started and waiting)
|
# list of subprocesses (both started and waiting)
|
||||||
self._procs = {} # type: Dict[int, multiprocessing.Process]
|
self._procs: Dict[int, multiprocessing.Process] = {}
|
||||||
# list of receiving pipe connections of running subprocesses
|
# list of receiving pipe connections of running subprocesses
|
||||||
self._precvs = {} # type: Dict[int, Any]
|
self._precvs: Dict[int, Any] = {}
|
||||||
# list of receiving pipe connections of waiting subprocesses
|
# list of receiving pipe connections of waiting subprocesses
|
||||||
self._precvsWaiting = {} # type: Dict[int, Any]
|
self._precvsWaiting: Dict[int, Any] = {}
|
||||||
# number of working subprocesses
|
# number of working subprocesses
|
||||||
self._pworking = 0
|
self._pworking = 0
|
||||||
# task number of each subprocess
|
# task number of each subprocess
|
||||||
@ -103,8 +103,21 @@ class ParallelTasks:
|
|||||||
self._join_one()
|
self._join_one()
|
||||||
|
|
||||||
def join(self) -> None:
|
def join(self) -> None:
|
||||||
|
try:
|
||||||
while self._pworking:
|
while self._pworking:
|
||||||
self._join_one()
|
self._join_one()
|
||||||
|
except Exception:
|
||||||
|
# shutdown other child processes on failure
|
||||||
|
self.terminate()
|
||||||
|
raise
|
||||||
|
|
||||||
|
def terminate(self) -> None:
|
||||||
|
for tid in list(self._precvs):
|
||||||
|
self._procs[tid].terminate()
|
||||||
|
self._result_funcs.pop(tid)
|
||||||
|
self._procs.pop(tid)
|
||||||
|
self._precvs.pop(tid)
|
||||||
|
self._pworking -= 1
|
||||||
|
|
||||||
def _join_one(self) -> None:
|
def _join_one(self) -> None:
|
||||||
for tid, pipe in self._precvs.items():
|
for tid, pipe in self._precvs.items():
|
||||||
|
@ -30,8 +30,7 @@ symbols_re = re.compile(r'([!-\-/:-@\[-`{-~])') # symbols without dot(0x2e)
|
|||||||
SECTIONING_CHARS = ['=', '-', '~']
|
SECTIONING_CHARS = ['=', '-', '~']
|
||||||
|
|
||||||
# width of characters
|
# width of characters
|
||||||
WIDECHARS = defaultdict(lambda: "WF") # type: Dict[str, str]
|
WIDECHARS: Dict[str, str] = defaultdict(lambda: "WF") # WF: Wide + Full-width
|
||||||
# WF: Wide + Full-width
|
|
||||||
WIDECHARS["ja"] = "WFA" # In Japanese, Ambiguous characters also have double width
|
WIDECHARS["ja"] = "WFA" # In Japanese, Ambiguous characters also have double width
|
||||||
|
|
||||||
|
|
||||||
|
@ -98,12 +98,12 @@ unicode_tex_replacements = [
|
|||||||
# %, {, }, \, #, and ~ are the only ones which must be replaced by _ character
|
# %, {, }, \, #, and ~ are the only ones which must be replaced by _ character
|
||||||
# It would be simpler to define it entirely here rather than in init().
|
# It would be simpler to define it entirely here rather than in init().
|
||||||
# Unicode replacements are superfluous, as idescape() uses backslashreplace
|
# Unicode replacements are superfluous, as idescape() uses backslashreplace
|
||||||
tex_replace_map = {} # type: Dict[int, str]
|
tex_replace_map: Dict[int, str] = {}
|
||||||
|
|
||||||
_tex_escape_map = {} # type: Dict[int, str]
|
_tex_escape_map: Dict[int, str] = {}
|
||||||
_tex_escape_map_without_unicode = {} # type: Dict[int, str]
|
_tex_escape_map_without_unicode: Dict[int, str] = {}
|
||||||
_tex_hlescape_map = {} # type: Dict[int, str]
|
_tex_hlescape_map: Dict[int, str] = {}
|
||||||
_tex_hlescape_map_without_unicode = {} # type: Dict[int, str]
|
_tex_hlescape_map_without_unicode: Dict[int, str] = {}
|
||||||
|
|
||||||
|
|
||||||
def escape(s: str, latex_engine: str = None) -> str:
|
def escape(s: str, latex_engine: str = None) -> str:
|
||||||
|
@ -263,7 +263,10 @@ def stringify(annotation: Any) -> str:
|
|||||||
else:
|
else:
|
||||||
return annotation
|
return annotation
|
||||||
elif isinstance(annotation, TypeVar):
|
elif isinstance(annotation, TypeVar):
|
||||||
|
if annotation.__module__ == 'typing':
|
||||||
return annotation.__name__
|
return annotation.__name__
|
||||||
|
else:
|
||||||
|
return '.'.join([annotation.__module__, annotation.__name__])
|
||||||
elif inspect.isNewType(annotation):
|
elif inspect.isNewType(annotation):
|
||||||
# Could not get the module where it defiend
|
# Could not get the module where it defiend
|
||||||
return annotation.__name__
|
return annotation.__name__
|
||||||
|
@ -164,15 +164,15 @@ class Table:
|
|||||||
return self.colspec
|
return self.colspec
|
||||||
elif self.colwidths and 'colwidths-given' in self.classes:
|
elif self.colwidths and 'colwidths-given' in self.classes:
|
||||||
total = sum(self.colwidths)
|
total = sum(self.colwidths)
|
||||||
colspecs = ['\\X{%d}{%d}' % (width, total) for width in self.colwidths]
|
colspecs = [r'\X{%d}{%d}' % (width, total) for width in self.colwidths]
|
||||||
return '{|%s|}' % '|'.join(colspecs) + CR
|
return '{|%s|}' % '|'.join(colspecs) + CR
|
||||||
elif self.has_problematic:
|
elif self.has_problematic:
|
||||||
return '{|*{%d}{\\X{1}{%d}|}}' % (self.colcount, self.colcount) + CR
|
return r'{|*{%d}{\X{1}{%d}|}}' % (self.colcount, self.colcount) + CR
|
||||||
elif self.get_table_type() == 'tabulary':
|
elif self.get_table_type() == 'tabulary':
|
||||||
# sphinx.sty sets T to be J by default.
|
# sphinx.sty sets T to be J by default.
|
||||||
return '{|' + ('T|' * self.colcount) + '}' + CR
|
return '{|' + ('T|' * self.colcount) + '}' + CR
|
||||||
elif self.has_oldproblematic:
|
elif self.has_oldproblematic:
|
||||||
return '{|*{%d}{\\X{1}{%d}|}}' % (self.colcount, self.colcount) + CR
|
return r'{|*{%d}{\X{1}{%d}|}}' % (self.colcount, self.colcount) + CR
|
||||||
else:
|
else:
|
||||||
return '{|' + ('l|' * self.colcount) + '}' + CR
|
return '{|' + ('l|' * self.colcount) + '}' + CR
|
||||||
|
|
||||||
@ -253,19 +253,19 @@ def rstdim_to_latexdim(width_str: str, scale: int = 100) -> str:
|
|||||||
if scale == 100:
|
if scale == 100:
|
||||||
float(amount) # validate amount is float
|
float(amount) # validate amount is float
|
||||||
if unit in ('', "px"):
|
if unit in ('', "px"):
|
||||||
res = "%s\\sphinxpxdimen" % amount
|
res = r"%s\sphinxpxdimen" % amount
|
||||||
elif unit == 'pt':
|
elif unit == 'pt':
|
||||||
res = '%sbp' % amount # convert to 'bp'
|
res = '%sbp' % amount # convert to 'bp'
|
||||||
elif unit == "%":
|
elif unit == "%":
|
||||||
res = "%.3f\\linewidth" % (float(amount) / 100.0)
|
res = r"%.3f\linewidth" % (float(amount) / 100.0)
|
||||||
else:
|
else:
|
||||||
amount_float = float(amount) * scale / 100.0
|
amount_float = float(amount) * scale / 100.0
|
||||||
if unit in ('', "px"):
|
if unit in ('', "px"):
|
||||||
res = "%.5f\\sphinxpxdimen" % amount_float
|
res = r"%.5f\sphinxpxdimen" % amount_float
|
||||||
elif unit == 'pt':
|
elif unit == 'pt':
|
||||||
res = '%.5fbp' % amount_float
|
res = '%.5fbp' % amount_float
|
||||||
elif unit == "%":
|
elif unit == "%":
|
||||||
res = "%.5f\\linewidth" % (amount_float / 100.0)
|
res = r"%.5f\linewidth" % (amount_float / 100.0)
|
||||||
else:
|
else:
|
||||||
res = "%.5f%s" % (amount_float, unit)
|
res = "%.5f%s" % (amount_float, unit)
|
||||||
return res
|
return res
|
||||||
@ -373,9 +373,9 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
if (self.config.language not in {None, 'en', 'ja'} and
|
if (self.config.language not in {None, 'en', 'ja'} and
|
||||||
'fncychap' not in self.config.latex_elements):
|
'fncychap' not in self.config.latex_elements):
|
||||||
# use Sonny style if any language specified (except English)
|
# use Sonny style if any language specified (except English)
|
||||||
self.elements['fncychap'] = ('\\usepackage[Sonny]{fncychap}' + CR +
|
self.elements['fncychap'] = (r'\usepackage[Sonny]{fncychap}' + CR +
|
||||||
'\\ChNameVar{\\Large\\normalfont\\sffamily}' + CR +
|
r'\ChNameVar{\Large\normalfont\sffamily}' + CR +
|
||||||
'\\ChTitleVar{\\Large\\normalfont\\sffamily}')
|
r'\ChTitleVar{\Large\normalfont\sffamily}')
|
||||||
|
|
||||||
self.babel = self.builder.babel
|
self.babel = self.builder.babel
|
||||||
if self.config.language and not self.babel.is_supported_language():
|
if self.config.language and not self.babel.is_supported_language():
|
||||||
@ -400,19 +400,19 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
logger.warning(__('too large :maxdepth:, ignored.'))
|
logger.warning(__('too large :maxdepth:, ignored.'))
|
||||||
tocdepth = len(LATEXSECTIONNAMES) - 2
|
tocdepth = len(LATEXSECTIONNAMES) - 2
|
||||||
|
|
||||||
self.elements['tocdepth'] = '\\setcounter{tocdepth}{%d}' % tocdepth
|
self.elements['tocdepth'] = r'\setcounter{tocdepth}{%d}' % tocdepth
|
||||||
minsecnumdepth = max(minsecnumdepth, tocdepth)
|
minsecnumdepth = max(minsecnumdepth, tocdepth)
|
||||||
|
|
||||||
if self.config.numfig and (self.config.numfig_secnum_depth > 0):
|
if self.config.numfig and (self.config.numfig_secnum_depth > 0):
|
||||||
minsecnumdepth = max(minsecnumdepth, self.numfig_secnum_depth - 1)
|
minsecnumdepth = max(minsecnumdepth, self.numfig_secnum_depth - 1)
|
||||||
|
|
||||||
if minsecnumdepth > self.secnumdepth:
|
if minsecnumdepth > self.secnumdepth:
|
||||||
self.elements['secnumdepth'] = '\\setcounter{secnumdepth}{%d}' %\
|
self.elements['secnumdepth'] = r'\setcounter{secnumdepth}{%d}' %\
|
||||||
minsecnumdepth
|
minsecnumdepth
|
||||||
|
|
||||||
contentsname = document.get('contentsname')
|
contentsname = document.get('contentsname')
|
||||||
if contentsname:
|
if contentsname:
|
||||||
self.elements['contentsname'] = self.babel_renewcommand('\\contentsname',
|
self.elements['contentsname'] = self.babel_renewcommand(r'\contentsname',
|
||||||
contentsname)
|
contentsname)
|
||||||
|
|
||||||
if self.elements['maxlistdepth']:
|
if self.elements['maxlistdepth']:
|
||||||
@ -420,8 +420,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
if sphinxpkgoptions:
|
if sphinxpkgoptions:
|
||||||
self.elements['sphinxpkgoptions'] = '[,%s]' % ','.join(sphinxpkgoptions)
|
self.elements['sphinxpkgoptions'] = '[,%s]' % ','.join(sphinxpkgoptions)
|
||||||
if self.elements['sphinxsetup']:
|
if self.elements['sphinxsetup']:
|
||||||
self.elements['sphinxsetup'] = ('\\sphinxsetup{%s}' %
|
self.elements['sphinxsetup'] = (r'\sphinxsetup{%s}' % self.elements['sphinxsetup'])
|
||||||
self.elements['sphinxsetup'])
|
|
||||||
if self.elements['extraclassoptions']:
|
if self.elements['extraclassoptions']:
|
||||||
self.elements['classoptions'] += ',' + \
|
self.elements['classoptions'] += ',' + \
|
||||||
self.elements['extraclassoptions']
|
self.elements['extraclassoptions']
|
||||||
@ -466,8 +465,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
def hypertarget(self, id: str, withdoc: bool = True, anchor: bool = True) -> str:
|
def hypertarget(self, id: str, withdoc: bool = True, anchor: bool = True) -> str:
|
||||||
if withdoc:
|
if withdoc:
|
||||||
id = self.curfilestack[-1] + ':' + id
|
id = self.curfilestack[-1] + ':' + id
|
||||||
return ('\\phantomsection' if anchor else '') + \
|
return (r'\phantomsection' if anchor else '') + r'\label{%s}' % self.idescape(id)
|
||||||
'\\label{%s}' % self.idescape(id)
|
|
||||||
|
|
||||||
def hypertarget_to(self, node: Element, anchor: bool = False) -> str:
|
def hypertarget_to(self, node: Element, anchor: bool = False) -> str:
|
||||||
labels = ''.join(self.hypertarget(node_id, anchor=False) for node_id in node['ids'])
|
labels = ''.join(self.hypertarget(node_id, anchor=False) for node_id in node['ids'])
|
||||||
@ -477,48 +475,48 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
return labels
|
return labels
|
||||||
|
|
||||||
def hyperlink(self, id: str) -> str:
|
def hyperlink(self, id: str) -> str:
|
||||||
return '{\\hyperref[%s]{' % self.idescape(id)
|
return r'{\hyperref[%s]{' % self.idescape(id)
|
||||||
|
|
||||||
def hyperpageref(self, id: str) -> str:
|
def hyperpageref(self, id: str) -> str:
|
||||||
return '\\autopageref*{%s}' % self.idescape(id)
|
return r'\autopageref*{%s}' % self.idescape(id)
|
||||||
|
|
||||||
def escape(self, s: str) -> str:
|
def escape(self, s: str) -> str:
|
||||||
return texescape.escape(s, self.config.latex_engine)
|
return texescape.escape(s, self.config.latex_engine)
|
||||||
|
|
||||||
def idescape(self, id: str) -> str:
|
def idescape(self, id: str) -> str:
|
||||||
return '\\detokenize{%s}' % str(id).translate(tex_replace_map).\
|
return r'\detokenize{%s}' % str(id).translate(tex_replace_map).\
|
||||||
encode('ascii', 'backslashreplace').decode('ascii').\
|
encode('ascii', 'backslashreplace').decode('ascii').\
|
||||||
replace('\\', '_')
|
replace('\\', '_')
|
||||||
|
|
||||||
def babel_renewcommand(self, command: str, definition: str) -> str:
|
def babel_renewcommand(self, command: str, definition: str) -> str:
|
||||||
if self.elements['multilingual']:
|
if self.elements['multilingual']:
|
||||||
prefix = '\\addto\\captions%s{' % self.babel.get_language()
|
prefix = r'\addto\captions%s{' % self.babel.get_language()
|
||||||
suffix = '}'
|
suffix = '}'
|
||||||
else: # babel is disabled (mainly for Japanese environment)
|
else: # babel is disabled (mainly for Japanese environment)
|
||||||
prefix = ''
|
prefix = ''
|
||||||
suffix = ''
|
suffix = ''
|
||||||
|
|
||||||
return '%s\\renewcommand{%s}{%s}%s' % (prefix, command, definition, suffix) + CR
|
return r'%s\renewcommand{%s}{%s}%s' % (prefix, command, definition, suffix) + CR
|
||||||
|
|
||||||
def generate_indices(self) -> str:
|
def generate_indices(self) -> str:
|
||||||
def generate(content: List[Tuple[str, List[IndexEntry]]], collapsed: bool) -> None:
|
def generate(content: List[Tuple[str, List[IndexEntry]]], collapsed: bool) -> None:
|
||||||
ret.append('\\begin{sphinxtheindex}' + CR)
|
ret.append(r'\begin{sphinxtheindex}' + CR)
|
||||||
ret.append('\\let\\bigletter\\sphinxstyleindexlettergroup' + CR)
|
ret.append(r'\let\bigletter\sphinxstyleindexlettergroup' + CR)
|
||||||
for i, (letter, entries) in enumerate(content):
|
for i, (letter, entries) in enumerate(content):
|
||||||
if i > 0:
|
if i > 0:
|
||||||
ret.append('\\indexspace' + CR)
|
ret.append(r'\indexspace' + CR)
|
||||||
ret.append('\\bigletter{%s}' % self.escape(letter) + CR)
|
ret.append(r'\bigletter{%s}' % self.escape(letter) + CR)
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
if not entry[3]:
|
if not entry[3]:
|
||||||
continue
|
continue
|
||||||
ret.append('\\item\\relax\\sphinxstyleindexentry{%s}' %
|
ret.append(r'\item\relax\sphinxstyleindexentry{%s}' %
|
||||||
self.encode(entry[0]))
|
self.encode(entry[0]))
|
||||||
if entry[4]:
|
if entry[4]:
|
||||||
# add "extra" info
|
# add "extra" info
|
||||||
ret.append('\\sphinxstyleindexextra{%s}' % self.encode(entry[4]))
|
ret.append(r'\sphinxstyleindexextra{%s}' % self.encode(entry[4]))
|
||||||
ret.append('\\sphinxstyleindexpageref{%s:%s}' %
|
ret.append(r'\sphinxstyleindexpageref{%s:%s}' %
|
||||||
(entry[2], self.idescape(entry[3])) + CR)
|
(entry[2], self.idescape(entry[3])) + CR)
|
||||||
ret.append('\\end{sphinxtheindex}' + CR)
|
ret.append(r'\end{sphinxtheindex}' + CR)
|
||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
# latex_domain_indices can be False/True or a list of index names
|
# latex_domain_indices can be False/True or a list of index names
|
||||||
@ -534,7 +532,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
self.builder.docnames)
|
self.builder.docnames)
|
||||||
if not content:
|
if not content:
|
||||||
continue
|
continue
|
||||||
ret.append('\\renewcommand{\\indexname}{%s}' % indexcls.localname + CR)
|
ret.append(r'\renewcommand{\indexname}{%s}' % indexcls.localname + CR)
|
||||||
generate(content, collapsed)
|
generate(content, collapsed)
|
||||||
|
|
||||||
return ''.join(ret)
|
return ''.join(ret)
|
||||||
@ -564,7 +562,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
self.first_document = 0
|
self.first_document = 0
|
||||||
elif self.first_document == 0:
|
elif self.first_document == 0:
|
||||||
# ... and all others are the appendices
|
# ... and all others are the appendices
|
||||||
self.body.append(CR + '\\appendix' + CR)
|
self.body.append(CR + r'\appendix' + CR)
|
||||||
self.first_document = -1
|
self.first_document = -1
|
||||||
if 'docname' in node:
|
if 'docname' in node:
|
||||||
self.body.append(self.hypertarget(':doc'))
|
self.body.append(self.hypertarget(':doc'))
|
||||||
@ -597,11 +595,11 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
|
|
||||||
def visit_topic(self, node: Element) -> None:
|
def visit_topic(self, node: Element) -> None:
|
||||||
self.in_minipage = 1
|
self.in_minipage = 1
|
||||||
self.body.append(CR + '\\begin{sphinxShadowBox}' + CR)
|
self.body.append(CR + r'\begin{sphinxShadowBox}' + CR)
|
||||||
|
|
||||||
def depart_topic(self, node: Element) -> None:
|
def depart_topic(self, node: Element) -> None:
|
||||||
self.in_minipage = 0
|
self.in_minipage = 0
|
||||||
self.body.append('\\end{sphinxShadowBox}' + CR)
|
self.body.append(r'\end{sphinxShadowBox}' + CR)
|
||||||
visit_sidebar = visit_topic
|
visit_sidebar = visit_topic
|
||||||
depart_sidebar = depart_topic
|
depart_sidebar = depart_topic
|
||||||
|
|
||||||
@ -613,20 +611,20 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
|
|
||||||
def visit_productionlist(self, node: Element) -> None:
|
def visit_productionlist(self, node: Element) -> None:
|
||||||
self.body.append(BLANKLINE)
|
self.body.append(BLANKLINE)
|
||||||
self.body.append('\\begin{productionlist}' + CR)
|
self.body.append(r'\begin{productionlist}' + CR)
|
||||||
self.in_production_list = 1
|
self.in_production_list = 1
|
||||||
|
|
||||||
def depart_productionlist(self, node: Element) -> None:
|
def depart_productionlist(self, node: Element) -> None:
|
||||||
self.body.append('\\end{productionlist}' + BLANKLINE)
|
self.body.append(r'\end{productionlist}' + BLANKLINE)
|
||||||
self.in_production_list = 0
|
self.in_production_list = 0
|
||||||
|
|
||||||
def visit_production(self, node: Element) -> None:
|
def visit_production(self, node: Element) -> None:
|
||||||
if node['tokenname']:
|
if node['tokenname']:
|
||||||
tn = node['tokenname']
|
tn = node['tokenname']
|
||||||
self.body.append(self.hypertarget('grammar-token-' + tn))
|
self.body.append(self.hypertarget('grammar-token-' + tn))
|
||||||
self.body.append('\\production{%s}{' % self.encode(tn))
|
self.body.append(r'\production{%s}{' % self.encode(tn))
|
||||||
else:
|
else:
|
||||||
self.body.append('\\productioncont{')
|
self.body.append(r'\productioncont{')
|
||||||
|
|
||||||
def depart_production(self, node: Element) -> None:
|
def depart_production(self, node: Element) -> None:
|
||||||
self.body.append('}' + CR)
|
self.body.append('}' + CR)
|
||||||
@ -681,7 +679,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
logger.warning(__('encountered title node not in section, topic, table, '
|
logger.warning(__('encountered title node not in section, topic, table, '
|
||||||
'admonition or sidebar'),
|
'admonition or sidebar'),
|
||||||
location=node)
|
location=node)
|
||||||
self.body.append('\\sphinxstyleothertitle{')
|
self.body.append(r'\sphinxstyleothertitle{')
|
||||||
self.context.append('}' + CR)
|
self.context.append('}' + CR)
|
||||||
self.in_title = 1
|
self.in_title = 1
|
||||||
|
|
||||||
@ -694,7 +692,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
|
|
||||||
def visit_subtitle(self, node: Element) -> None:
|
def visit_subtitle(self, node: Element) -> None:
|
||||||
if isinstance(node.parent, nodes.sidebar):
|
if isinstance(node.parent, nodes.sidebar):
|
||||||
self.body.append('\\sphinxstylesidebarsubtitle{')
|
self.body.append(r'\sphinxstylesidebarsubtitle{')
|
||||||
self.context.append('}' + CR)
|
self.context.append('}' + CR)
|
||||||
else:
|
else:
|
||||||
self.context.append('')
|
self.context.append('')
|
||||||
@ -705,18 +703,18 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
def visit_desc(self, node: Element) -> None:
|
def visit_desc(self, node: Element) -> None:
|
||||||
if self.config.latex_show_urls == 'footnote':
|
if self.config.latex_show_urls == 'footnote':
|
||||||
self.body.append(BLANKLINE)
|
self.body.append(BLANKLINE)
|
||||||
self.body.append('\\begin{savenotes}\\begin{fulllineitems}' + CR)
|
self.body.append(r'\begin{savenotes}\begin{fulllineitems}' + CR)
|
||||||
else:
|
else:
|
||||||
self.body.append(BLANKLINE)
|
self.body.append(BLANKLINE)
|
||||||
self.body.append('\\begin{fulllineitems}' + CR)
|
self.body.append(r'\begin{fulllineitems}' + CR)
|
||||||
if self.table:
|
if self.table:
|
||||||
self.table.has_problematic = True
|
self.table.has_problematic = True
|
||||||
|
|
||||||
def depart_desc(self, node: Element) -> None:
|
def depart_desc(self, node: Element) -> None:
|
||||||
if self.config.latex_show_urls == 'footnote':
|
if self.config.latex_show_urls == 'footnote':
|
||||||
self.body.append(CR + '\\end{fulllineitems}\\end{savenotes}' + BLANKLINE)
|
self.body.append(CR + r'\end{fulllineitems}\end{savenotes}' + BLANKLINE)
|
||||||
else:
|
else:
|
||||||
self.body.append(CR + '\\end{fulllineitems}' + BLANKLINE)
|
self.body.append(CR + r'\end{fulllineitems}' + BLANKLINE)
|
||||||
|
|
||||||
def _visit_signature_line(self, node: Element) -> None:
|
def _visit_signature_line(self, node: Element) -> None:
|
||||||
for child in node:
|
for child in node:
|
||||||
@ -739,14 +737,14 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
self._visit_signature_line(node)
|
self._visit_signature_line(node)
|
||||||
else:
|
else:
|
||||||
self.body.append('%' + CR)
|
self.body.append('%' + CR)
|
||||||
self.body.append('\\pysigstartmultiline' + CR)
|
self.body.append(r'\pysigstartmultiline' + CR)
|
||||||
|
|
||||||
def depart_desc_signature(self, node: Element) -> None:
|
def depart_desc_signature(self, node: Element) -> None:
|
||||||
if not node.get('is_multiline'):
|
if not node.get('is_multiline'):
|
||||||
self._depart_signature_line(node)
|
self._depart_signature_line(node)
|
||||||
else:
|
else:
|
||||||
self.body.append('%' + CR)
|
self.body.append('%' + CR)
|
||||||
self.body.append('\\pysigstopmultiline')
|
self.body.append(r'\pysigstopmultiline')
|
||||||
|
|
||||||
def visit_desc_signature_line(self, node: Element) -> None:
|
def visit_desc_signature_line(self, node: Element) -> None:
|
||||||
self._visit_signature_line(node)
|
self._visit_signature_line(node)
|
||||||
@ -825,8 +823,8 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
|
|
||||||
def visit_seealso(self, node: Element) -> None:
|
def visit_seealso(self, node: Element) -> None:
|
||||||
self.body.append(BLANKLINE)
|
self.body.append(BLANKLINE)
|
||||||
self.body.append('\\sphinxstrong{%s:}' % admonitionlabels['seealso'] + CR)
|
self.body.append(r'\sphinxstrong{%s:}' % admonitionlabels['seealso'] + CR)
|
||||||
self.body.append('\\nopagebreak' + BLANKLINE)
|
self.body.append(r'\nopagebreak' + BLANKLINE)
|
||||||
|
|
||||||
def depart_seealso(self, node: Element) -> None:
|
def depart_seealso(self, node: Element) -> None:
|
||||||
self.body.append(BLANKLINE)
|
self.body.append(BLANKLINE)
|
||||||
@ -834,7 +832,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
def visit_rubric(self, node: Element) -> None:
|
def visit_rubric(self, node: Element) -> None:
|
||||||
if len(node) == 1 and node.astext() in ('Footnotes', _('Footnotes')):
|
if len(node) == 1 and node.astext() in ('Footnotes', _('Footnotes')):
|
||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
self.body.append('\\subsubsection*{')
|
self.body.append(r'\subsubsection*{')
|
||||||
self.context.append('}' + CR)
|
self.context.append('}' + CR)
|
||||||
self.in_title = 1
|
self.in_title = 1
|
||||||
|
|
||||||
@ -846,23 +844,23 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
self.in_footnote += 1
|
self.in_footnote += 1
|
||||||
label = cast(nodes.label, node[0])
|
label = cast(nodes.label, node[0])
|
||||||
if 'auto' not in node:
|
if 'auto' not in node:
|
||||||
self.body.append('\\sphinxstepexplicit ')
|
self.body.append(r'\sphinxstepexplicit ')
|
||||||
if self.in_parsed_literal:
|
if self.in_parsed_literal:
|
||||||
self.body.append('\\begin{footnote}[%s]' % label.astext())
|
self.body.append(r'\begin{footnote}[%s]' % label.astext())
|
||||||
else:
|
else:
|
||||||
self.body.append('%' + CR)
|
self.body.append('%' + CR)
|
||||||
self.body.append('\\begin{footnote}[%s]' % label.astext())
|
self.body.append(r'\begin{footnote}[%s]' % label.astext())
|
||||||
if 'auto' not in node:
|
if 'auto' not in node:
|
||||||
self.body.append('\\phantomsection'
|
self.body.append(r'\phantomsection'
|
||||||
'\\label{\\thesphinxscope.%s}%%' % label.astext() + CR)
|
r'\label{\thesphinxscope.%s}%%' % label.astext() + CR)
|
||||||
self.body.append('\\sphinxAtStartFootnote' + CR)
|
self.body.append(r'\sphinxAtStartFootnote' + CR)
|
||||||
|
|
||||||
def depart_footnote(self, node: Element) -> None:
|
def depart_footnote(self, node: Element) -> None:
|
||||||
if self.in_parsed_literal:
|
if self.in_parsed_literal:
|
||||||
self.body.append('\\end{footnote}')
|
self.body.append(r'\end{footnote}')
|
||||||
else:
|
else:
|
||||||
self.body.append('%' + CR)
|
self.body.append('%' + CR)
|
||||||
self.body.append('\\end{footnote}')
|
self.body.append(r'\end{footnote}')
|
||||||
self.in_footnote -= 1
|
self.in_footnote -= 1
|
||||||
|
|
||||||
def visit_label(self, node: Element) -> None:
|
def visit_label(self, node: Element) -> None:
|
||||||
@ -950,25 +948,24 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
self.body.append('&')
|
self.body.append('&')
|
||||||
if cell.width == 1:
|
if cell.width == 1:
|
||||||
# insert suitable strut for equalizing row heights in given multirow
|
# insert suitable strut for equalizing row heights in given multirow
|
||||||
self.body.append('\\sphinxtablestrut{%d}' % cell.cell_id)
|
self.body.append(r'\sphinxtablestrut{%d}' % cell.cell_id)
|
||||||
else: # use \multicolumn for wide multirow cell
|
else: # use \multicolumn for wide multirow cell
|
||||||
self.body.append('\\multicolumn{%d}{|l|}'
|
self.body.append(r'\multicolumn{%d}{|l|}\sphinxtablestrut{%d}}' %
|
||||||
'{\\sphinxtablestrut{%d}}' %
|
|
||||||
(cell.width, cell.cell_id))
|
(cell.width, cell.cell_id))
|
||||||
|
|
||||||
def depart_row(self, node: Element) -> None:
|
def depart_row(self, node: Element) -> None:
|
||||||
self.body.append('\\\\' + CR)
|
self.body.append(r'\\' + CR)
|
||||||
cells = [self.table.cell(self.table.row, i) for i in range(self.table.colcount)]
|
cells = [self.table.cell(self.table.row, i) for i in range(self.table.colcount)]
|
||||||
underlined = [cell.row + cell.height == self.table.row + 1 for cell in cells]
|
underlined = [cell.row + cell.height == self.table.row + 1 for cell in cells]
|
||||||
if all(underlined):
|
if all(underlined):
|
||||||
self.body.append('\\hline')
|
self.body.append(r'\hline')
|
||||||
else:
|
else:
|
||||||
i = 0
|
i = 0
|
||||||
underlined.extend([False]) # sentinel
|
underlined.extend([False]) # sentinel
|
||||||
while i < len(underlined):
|
while i < len(underlined):
|
||||||
if underlined[i] is True:
|
if underlined[i] is True:
|
||||||
j = underlined[i:].index(False)
|
j = underlined[i:].index(False)
|
||||||
self.body.append('\\cline{%d-%d}' % (i + 1, i + j))
|
self.body.append(r'\cline{%d-%d}' % (i + 1, i + j))
|
||||||
i += j
|
i += j
|
||||||
i += 1
|
i += 1
|
||||||
self.table.row += 1
|
self.table.row += 1
|
||||||
@ -982,22 +979,22 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
if cell.width > 1:
|
if cell.width > 1:
|
||||||
if self.config.latex_use_latex_multicolumn:
|
if self.config.latex_use_latex_multicolumn:
|
||||||
if self.table.col == 0:
|
if self.table.col == 0:
|
||||||
self.body.append('\\multicolumn{%d}{|l|}{%%' % cell.width + CR)
|
self.body.append(r'\multicolumn{%d}{|l|}{%%' % cell.width + CR)
|
||||||
else:
|
else:
|
||||||
self.body.append('\\multicolumn{%d}{l|}{%%' % cell.width + CR)
|
self.body.append(r'\multicolumn{%d}{l|}{%%' % cell.width + CR)
|
||||||
context = '}%' + CR
|
context = '}%' + CR
|
||||||
else:
|
else:
|
||||||
self.body.append('\\sphinxstartmulticolumn{%d}%%' % cell.width + CR)
|
self.body.append(r'\sphinxstartmulticolumn{%d}%%' % cell.width + CR)
|
||||||
context = '\\sphinxstopmulticolumn' + CR
|
context = r'\sphinxstopmulticolumn' + CR
|
||||||
if cell.height > 1:
|
if cell.height > 1:
|
||||||
# \sphinxmultirow 2nd arg "cell_id" will serve as id for LaTeX macros as well
|
# \sphinxmultirow 2nd arg "cell_id" will serve as id for LaTeX macros as well
|
||||||
self.body.append('\\sphinxmultirow{%d}{%d}{%%' % (cell.height, cell.cell_id) + CR)
|
self.body.append(r'\sphinxmultirow{%d}{%d}{%%' % (cell.height, cell.cell_id) + CR)
|
||||||
context = '}%' + CR + context
|
context = '}%' + CR + context
|
||||||
if cell.width > 1 or cell.height > 1:
|
if cell.width > 1 or cell.height > 1:
|
||||||
self.body.append('\\begin{varwidth}[t]{\\sphinxcolwidth{%d}{%d}}'
|
self.body.append(r'\begin{varwidth}[t]{\sphinxcolwidth{%d}{%d}}'
|
||||||
% (cell.width, self.table.colcount) + CR)
|
% (cell.width, self.table.colcount) + CR)
|
||||||
context = ('\\par' + CR + '\\vskip-\\baselineskip'
|
context = (r'\par' + CR + r'\vskip-\baselineskip'
|
||||||
'\\vbox{\\hbox{\\strut}}\\end{varwidth}%' + CR + context)
|
r'\vbox{\hbox{\strut}}\end{varwidth}%' + CR + context)
|
||||||
self.needs_linetrimming = 1
|
self.needs_linetrimming = 1
|
||||||
if len(node.traverse(nodes.paragraph)) >= 2:
|
if len(node.traverse(nodes.paragraph)) >= 2:
|
||||||
self.table.has_oldproblematic = True
|
self.table.has_oldproblematic = True
|
||||||
@ -1005,7 +1002,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
if len(node) == 1 and isinstance(node[0], nodes.paragraph) and node.astext() == '':
|
if len(node) == 1 and isinstance(node[0], nodes.paragraph) and node.astext() == '':
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
self.body.append('\\sphinxstyletheadfamily ')
|
self.body.append(r'\sphinxstyletheadfamily ')
|
||||||
if self.needs_linetrimming:
|
if self.needs_linetrimming:
|
||||||
self.pushbody([])
|
self.pushbody([])
|
||||||
self.context.append(context)
|
self.context.append(context)
|
||||||
@ -1036,11 +1033,10 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
if nextcell.width == 1:
|
if nextcell.width == 1:
|
||||||
# insert suitable strut for equalizing row heights in multirow
|
# insert suitable strut for equalizing row heights in multirow
|
||||||
# they also serve to clear colour panels which would hide the text
|
# they also serve to clear colour panels which would hide the text
|
||||||
self.body.append('\\sphinxtablestrut{%d}' % nextcell.cell_id)
|
self.body.append(r'\sphinxtablestrut{%d}' % nextcell.cell_id)
|
||||||
else:
|
else:
|
||||||
# use \multicolumn for wide multirow cell
|
# use \multicolumn for wide multirow cell
|
||||||
self.body.append('\\multicolumn{%d}{l|}'
|
self.body.append(r'\multicolumn{%d}{l|}{\sphinxtablestrut{%d}}' %
|
||||||
'{\\sphinxtablestrut{%d}}' %
|
|
||||||
(nextcell.width, nextcell.cell_id))
|
(nextcell.width, nextcell.cell_id))
|
||||||
|
|
||||||
def visit_acks(self, node: Element) -> None:
|
def visit_acks(self, node: Element) -> None:
|
||||||
@ -1055,13 +1051,13 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
|
|
||||||
def visit_bullet_list(self, node: Element) -> None:
|
def visit_bullet_list(self, node: Element) -> None:
|
||||||
if not self.compact_list:
|
if not self.compact_list:
|
||||||
self.body.append('\\begin{itemize}' + CR)
|
self.body.append(r'\begin{itemize}' + CR)
|
||||||
if self.table:
|
if self.table:
|
||||||
self.table.has_problematic = True
|
self.table.has_problematic = True
|
||||||
|
|
||||||
def depart_bullet_list(self, node: Element) -> None:
|
def depart_bullet_list(self, node: Element) -> None:
|
||||||
if not self.compact_list:
|
if not self.compact_list:
|
||||||
self.body.append('\\end{itemize}' + CR)
|
self.body.append(r'\end{itemize}' + CR)
|
||||||
|
|
||||||
def visit_enumerated_list(self, node: Element) -> None:
|
def visit_enumerated_list(self, node: Element) -> None:
|
||||||
def get_enumtype(node: Element) -> str:
|
def get_enumtype(node: Element) -> str:
|
||||||
@ -1086,16 +1082,16 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
prefix = node.get('prefix', '')
|
prefix = node.get('prefix', '')
|
||||||
suffix = node.get('suffix', '.')
|
suffix = node.get('suffix', '.')
|
||||||
|
|
||||||
self.body.append('\\begin{enumerate}' + CR)
|
self.body.append(r'\begin{enumerate}' + CR)
|
||||||
self.body.append('\\sphinxsetlistlabels{%s}{%s}{%s}{%s}{%s}%%' %
|
self.body.append(r'\sphinxsetlistlabels{%s}{%s}{%s}{%s}{%s}%%' %
|
||||||
(style, enum, enumnext, prefix, suffix) + CR)
|
(style, enum, enumnext, prefix, suffix) + CR)
|
||||||
if 'start' in node:
|
if 'start' in node:
|
||||||
self.body.append('\\setcounter{%s}{%d}' % (enum, node['start'] - 1) + CR)
|
self.body.append(r'\setcounter{%s}{%d}' % (enum, node['start'] - 1) + CR)
|
||||||
if self.table:
|
if self.table:
|
||||||
self.table.has_problematic = True
|
self.table.has_problematic = True
|
||||||
|
|
||||||
def depart_enumerated_list(self, node: Element) -> None:
|
def depart_enumerated_list(self, node: Element) -> None:
|
||||||
self.body.append('\\end{enumerate}' + CR)
|
self.body.append(r'\end{enumerate}' + CR)
|
||||||
|
|
||||||
def visit_list_item(self, node: Element) -> None:
|
def visit_list_item(self, node: Element) -> None:
|
||||||
# Append "{}" in case the next character is "[", which would break
|
# Append "{}" in case the next character is "[", which would break
|
||||||
@ -1106,12 +1102,12 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
self.body.append(CR)
|
self.body.append(CR)
|
||||||
|
|
||||||
def visit_definition_list(self, node: Element) -> None:
|
def visit_definition_list(self, node: Element) -> None:
|
||||||
self.body.append('\\begin{description}' + CR)
|
self.body.append(r'\begin{description}' + CR)
|
||||||
if self.table:
|
if self.table:
|
||||||
self.table.has_problematic = True
|
self.table.has_problematic = True
|
||||||
|
|
||||||
def depart_definition_list(self, node: Element) -> None:
|
def depart_definition_list(self, node: Element) -> None:
|
||||||
self.body.append('\\end{description}' + CR)
|
self.body.append(r'\end{description}' + CR)
|
||||||
|
|
||||||
def visit_definition_list_item(self, node: Element) -> None:
|
def visit_definition_list_item(self, node: Element) -> None:
|
||||||
pass
|
pass
|
||||||
@ -1123,11 +1119,11 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
self.in_term += 1
|
self.in_term += 1
|
||||||
ctx = ''
|
ctx = ''
|
||||||
if node.get('ids'):
|
if node.get('ids'):
|
||||||
ctx = '\\phantomsection'
|
ctx = r'\phantomsection'
|
||||||
for node_id in node['ids']:
|
for node_id in node['ids']:
|
||||||
ctx += self.hypertarget(node_id, anchor=False)
|
ctx += self.hypertarget(node_id, anchor=False)
|
||||||
ctx += '}] \\leavevmode'
|
ctx += r'}] \leavevmode'
|
||||||
self.body.append('\\item[{')
|
self.body.append(r'\item[{')
|
||||||
self.context.append(ctx)
|
self.context.append(ctx)
|
||||||
|
|
||||||
def depart_term(self, node: Element) -> None:
|
def depart_term(self, node: Element) -> None:
|
||||||
@ -1147,12 +1143,12 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
self.body.append(CR)
|
self.body.append(CR)
|
||||||
|
|
||||||
def visit_field_list(self, node: Element) -> None:
|
def visit_field_list(self, node: Element) -> None:
|
||||||
self.body.append('\\begin{quote}\\begin{description}' + CR)
|
self.body.append(r'\begin{quote}\begin{description}' + CR)
|
||||||
if self.table:
|
if self.table:
|
||||||
self.table.has_problematic = True
|
self.table.has_problematic = True
|
||||||
|
|
||||||
def depart_field_list(self, node: Element) -> None:
|
def depart_field_list(self, node: Element) -> None:
|
||||||
self.body.append('\\end{description}\\end{quote}' + CR)
|
self.body.append(r'\end{description}\end{quote}' + CR)
|
||||||
|
|
||||||
def visit_field(self, node: Element) -> None:
|
def visit_field(self, node: Element) -> None:
|
||||||
pass
|
pass
|
||||||
@ -1172,7 +1168,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
not isinstance(node.parent[index - 1], nodes.paragraph) and
|
not isinstance(node.parent[index - 1], nodes.paragraph) and
|
||||||
not isinstance(node.parent[index - 1], nodes.compound)):
|
not isinstance(node.parent[index - 1], nodes.compound)):
|
||||||
# insert blank line, if the paragraph follows a non-paragraph node in a compound
|
# insert blank line, if the paragraph follows a non-paragraph node in a compound
|
||||||
self.body.append('\\noindent' + CR)
|
self.body.append(r'\noindent' + CR)
|
||||||
elif index == 1 and isinstance(node.parent, (nodes.footnote, footnotetext)):
|
elif index == 1 and isinstance(node.parent, (nodes.footnote, footnotetext)):
|
||||||
# don't insert blank line, if the paragraph is second child of a footnote
|
# don't insert blank line, if the paragraph is second child of a footnote
|
||||||
# (first one is label node)
|
# (first one is label node)
|
||||||
@ -1181,33 +1177,33 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
# the \sphinxAtStartPar is to allow hyphenation of first word of
|
# the \sphinxAtStartPar is to allow hyphenation of first word of
|
||||||
# a paragraph in narrow contexts such as in a table cell
|
# a paragraph in narrow contexts such as in a table cell
|
||||||
# added as two items (cf. line trimming in depart_entry())
|
# added as two items (cf. line trimming in depart_entry())
|
||||||
self.body.extend([CR, '\\sphinxAtStartPar' + CR])
|
self.body.extend([CR, r'\sphinxAtStartPar' + CR])
|
||||||
|
|
||||||
def depart_paragraph(self, node: Element) -> None:
|
def depart_paragraph(self, node: Element) -> None:
|
||||||
self.body.append(CR)
|
self.body.append(CR)
|
||||||
|
|
||||||
def visit_centered(self, node: Element) -> None:
|
def visit_centered(self, node: Element) -> None:
|
||||||
self.body.append(CR + '\\begin{center}')
|
self.body.append(CR + r'\begin{center}')
|
||||||
if self.table:
|
if self.table:
|
||||||
self.table.has_problematic = True
|
self.table.has_problematic = True
|
||||||
|
|
||||||
def depart_centered(self, node: Element) -> None:
|
def depart_centered(self, node: Element) -> None:
|
||||||
self.body.append(CR + '\\end{center}')
|
self.body.append(CR + r'\end{center}')
|
||||||
|
|
||||||
def visit_hlist(self, node: Element) -> None:
|
def visit_hlist(self, node: Element) -> None:
|
||||||
self.compact_list += 1
|
self.compact_list += 1
|
||||||
ncolumns = node['ncolumns']
|
ncolumns = node['ncolumns']
|
||||||
if self.compact_list > 1:
|
if self.compact_list > 1:
|
||||||
self.body.append('\\setlength{\\multicolsep}{0pt}' + CR)
|
self.body.append(r'\setlength{\multicolsep}{0pt}' + CR)
|
||||||
self.body.append('\\begin{multicols}{' + ncolumns + '}\\raggedright' + CR)
|
self.body.append(r'\begin{multicols}{' + ncolumns + '}\raggedright' + CR)
|
||||||
self.body.append('\\begin{itemize}\\setlength{\\itemsep}{0pt}'
|
self.body.append(r'\begin{itemize}\setlength{\itemsep}{0pt}'
|
||||||
'\\setlength{\\parskip}{0pt}' + CR)
|
r'\setlength{\parskip}{0pt}' + CR)
|
||||||
if self.table:
|
if self.table:
|
||||||
self.table.has_problematic = True
|
self.table.has_problematic = True
|
||||||
|
|
||||||
def depart_hlist(self, node: Element) -> None:
|
def depart_hlist(self, node: Element) -> None:
|
||||||
self.compact_list -= 1
|
self.compact_list -= 1
|
||||||
self.body.append('\\end{itemize}\\raggedcolumns\\end{multicols}' + CR)
|
self.body.append(r'\end{itemize}\raggedcolumns\end{multicols}' + CR)
|
||||||
|
|
||||||
def visit_hlistcol(self, node: Element) -> None:
|
def visit_hlistcol(self, node: Element) -> None:
|
||||||
pass
|
pass
|
||||||
@ -1217,7 +1213,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
# some testing with long items showed that columns may be too uneven.
|
# some testing with long items showed that columns may be too uneven.
|
||||||
# And in case only of short items, the automatic column breaks should
|
# And in case only of short items, the automatic column breaks should
|
||||||
# match the ones pre-computed by the hlist() directive.
|
# match the ones pre-computed by the hlist() directive.
|
||||||
# self.body.append('\\columnbreak\n')
|
# self.body.append(r'\columnbreak\n')
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def latex_image_length(self, width_str: str, scale: int = 100) -> str:
|
def latex_image_length(self, width_str: str, scale: int = 100) -> str:
|
||||||
@ -1265,14 +1261,14 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
align_prepost = {
|
align_prepost = {
|
||||||
# By default latex aligns the top of an image.
|
# By default latex aligns the top of an image.
|
||||||
(1, 'top'): ('', ''),
|
(1, 'top'): ('', ''),
|
||||||
(1, 'middle'): ('\\raisebox{-0.5\\height}{', '}'),
|
(1, 'middle'): (r'\raisebox{-0.5\height}{', '}'),
|
||||||
(1, 'bottom'): ('\\raisebox{-\\height}{', '}'),
|
(1, 'bottom'): (r'\raisebox{-\height}{', '}'),
|
||||||
(0, 'center'): ('{\\hspace*{\\fill}', '\\hspace*{\\fill}}'),
|
(0, 'center'): (r'{\hspace*{\fill}', r'\hspace*{\fill}}'),
|
||||||
# These 2 don't exactly do the right thing. The image should
|
# These 2 don't exactly do the right thing. The image should
|
||||||
# be floated alongside the paragraph. See
|
# be floated alongside the paragraph. See
|
||||||
# https://www.w3.org/TR/html4/struct/objects.html#adef-align-IMG
|
# https://www.w3.org/TR/html4/struct/objects.html#adef-align-IMG
|
||||||
(0, 'left'): ('{', '\\hspace*{\\fill}}'),
|
(0, 'left'): ('{', r'\hspace*{\fill}}'),
|
||||||
(0, 'right'): ('{\\hspace*{\\fill}', '}'),
|
(0, 'right'): (r'{\hspace*{\fill}', '}'),
|
||||||
}
|
}
|
||||||
try:
|
try:
|
||||||
pre.append(align_prepost[is_inline, node['align']][0])
|
pre.append(align_prepost[is_inline, node['align']][0])
|
||||||
@ -1280,10 +1276,10 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
if self.in_parsed_literal:
|
if self.in_parsed_literal:
|
||||||
pre.append('{\\sphinxunactivateextrasandspace ')
|
pre.append(r'{\sphinxunactivateextrasandspace ')
|
||||||
post.append('}')
|
post.append('}')
|
||||||
if not is_inline and not has_hyperlink:
|
if not is_inline and not has_hyperlink:
|
||||||
pre.append(CR + '\\noindent')
|
pre.append(CR + r'\noindent')
|
||||||
post.append(CR)
|
post.append(CR)
|
||||||
pre.reverse()
|
pre.reverse()
|
||||||
if node['uri'] in self.builder.images:
|
if node['uri'] in self.builder.images:
|
||||||
@ -1304,10 +1300,10 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
if self.in_title and base:
|
if self.in_title and base:
|
||||||
# Lowercase tokens forcely because some fncychap themes capitalize
|
# Lowercase tokens forcely because some fncychap themes capitalize
|
||||||
# the options of \sphinxincludegraphics unexpectly (ex. WIDTH=...).
|
# the options of \sphinxincludegraphics unexpectly (ex. WIDTH=...).
|
||||||
self.body.append('\\lowercase{\\sphinxincludegraphics%s}{{%s}%s}' %
|
self.body.append(r'\lowercase{\sphinxincludegraphics%s}{{%s}%s}' %
|
||||||
(options, base, ext))
|
(options, base, ext))
|
||||||
else:
|
else:
|
||||||
self.body.append('\\sphinxincludegraphics%s{{%s}%s}' %
|
self.body.append(r'\sphinxincludegraphics%s{{%s}%s}' %
|
||||||
(options, base, ext))
|
(options, base, ext))
|
||||||
self.body.extend(post)
|
self.body.extend(post)
|
||||||
|
|
||||||
@ -1323,14 +1319,14 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
if 'width' in node:
|
if 'width' in node:
|
||||||
length = self.latex_image_length(node['width'])
|
length = self.latex_image_length(node['width'])
|
||||||
if length:
|
if length:
|
||||||
self.body.append('\\begin{sphinxfigure-in-table}[%s]' % length + CR)
|
self.body.append(r'\begin{sphinxfigure-in-table}[%s]' % length + CR)
|
||||||
self.body.append('\\centering' + CR)
|
self.body.append(r'\centering' + CR)
|
||||||
else:
|
else:
|
||||||
self.body.append('\\begin{sphinxfigure-in-table}' + CR)
|
self.body.append(r'\begin{sphinxfigure-in-table}' + CR)
|
||||||
self.body.append('\\centering' + CR)
|
self.body.append(r'\centering' + CR)
|
||||||
if any(isinstance(child, nodes.caption) for child in node):
|
if any(isinstance(child, nodes.caption) for child in node):
|
||||||
self.body.append('\\capstart')
|
self.body.append(r'\capstart')
|
||||||
self.context.append('\\end{sphinxfigure-in-table}\\relax' + CR)
|
self.context.append(r'\end{sphinxfigure-in-table}\relax' + CR)
|
||||||
elif node.get('align', '') in ('left', 'right'):
|
elif node.get('align', '') in ('left', 'right'):
|
||||||
length = None
|
length = None
|
||||||
if 'width' in node:
|
if 'width' in node:
|
||||||
@ -1339,19 +1335,19 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
length = self.latex_image_length(node[0]['width'])
|
length = self.latex_image_length(node[0]['width'])
|
||||||
self.body.append(BLANKLINE) # Insert a blank line to prevent infinite loop
|
self.body.append(BLANKLINE) # Insert a blank line to prevent infinite loop
|
||||||
# https://github.com/sphinx-doc/sphinx/issues/7059
|
# https://github.com/sphinx-doc/sphinx/issues/7059
|
||||||
self.body.append('\\begin{wrapfigure}{%s}{%s}' %
|
self.body.append(r'\begin{wrapfigure}{%s}{%s}' %
|
||||||
('r' if node['align'] == 'right' else 'l', length or '0pt') + CR)
|
('r' if node['align'] == 'right' else 'l', length or '0pt') + CR)
|
||||||
self.body.append('\\centering')
|
self.body.append(r'\centering')
|
||||||
self.context.append('\\end{wrapfigure}' + CR)
|
self.context.append(r'\end{wrapfigure}' + CR)
|
||||||
elif self.in_minipage:
|
elif self.in_minipage:
|
||||||
self.body.append(CR + '\\begin{center}')
|
self.body.append(CR + r'\begin{center}')
|
||||||
self.context.append('\\end{center}' + CR)
|
self.context.append(r'\end{center}' + CR)
|
||||||
else:
|
else:
|
||||||
self.body.append(CR + '\\begin{figure}[%s]' % align + CR)
|
self.body.append(CR + r'\begin{figure}[%s]' % align + CR)
|
||||||
self.body.append('\\centering' + CR)
|
self.body.append(r'\centering' + CR)
|
||||||
if any(isinstance(child, nodes.caption) for child in node):
|
if any(isinstance(child, nodes.caption) for child in node):
|
||||||
self.body.append('\\capstart' + CR)
|
self.body.append(r'\capstart' + CR)
|
||||||
self.context.append('\\end{figure}' + CR)
|
self.context.append(r'\end{figure}' + CR)
|
||||||
|
|
||||||
def depart_figure(self, node: Element) -> None:
|
def depart_figure(self, node: Element) -> None:
|
||||||
self.body.append(self.context.pop())
|
self.body.append(self.context.pop())
|
||||||
@ -1359,13 +1355,13 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
def visit_caption(self, node: Element) -> None:
|
def visit_caption(self, node: Element) -> None:
|
||||||
self.in_caption += 1
|
self.in_caption += 1
|
||||||
if isinstance(node.parent, captioned_literal_block):
|
if isinstance(node.parent, captioned_literal_block):
|
||||||
self.body.append('\\sphinxSetupCaptionForVerbatim{')
|
self.body.append(r'\sphinxSetupCaptionForVerbatim{')
|
||||||
elif self.in_minipage and isinstance(node.parent, nodes.figure):
|
elif self.in_minipage and isinstance(node.parent, nodes.figure):
|
||||||
self.body.append('\\captionof{figure}{')
|
self.body.append(r'\captionof{figure}{')
|
||||||
elif self.table and node.parent.tagname == 'figure':
|
elif self.table and node.parent.tagname == 'figure':
|
||||||
self.body.append('\\sphinxfigcaption{')
|
self.body.append(r'\sphinxfigcaption{')
|
||||||
else:
|
else:
|
||||||
self.body.append('\\caption{')
|
self.body.append(r'\caption{')
|
||||||
|
|
||||||
def depart_caption(self, node: Element) -> None:
|
def depart_caption(self, node: Element) -> None:
|
||||||
self.body.append('}')
|
self.body.append('}')
|
||||||
@ -1375,27 +1371,27 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
self.in_caption -= 1
|
self.in_caption -= 1
|
||||||
|
|
||||||
def visit_legend(self, node: Element) -> None:
|
def visit_legend(self, node: Element) -> None:
|
||||||
self.body.append(CR + '\\begin{sphinxlegend}')
|
self.body.append(CR + r'\begin{sphinxlegend}')
|
||||||
|
|
||||||
def depart_legend(self, node: Element) -> None:
|
def depart_legend(self, node: Element) -> None:
|
||||||
self.body.append('\\end{sphinxlegend}' + CR)
|
self.body.append(r'\end{sphinxlegend}' + CR)
|
||||||
|
|
||||||
def visit_admonition(self, node: Element) -> None:
|
def visit_admonition(self, node: Element) -> None:
|
||||||
self.body.append(CR + '\\begin{sphinxadmonition}{note}')
|
self.body.append(CR + r'\begin{sphinxadmonition}{note}')
|
||||||
self.no_latex_floats += 1
|
self.no_latex_floats += 1
|
||||||
|
|
||||||
def depart_admonition(self, node: Element) -> None:
|
def depart_admonition(self, node: Element) -> None:
|
||||||
self.body.append('\\end{sphinxadmonition}' + CR)
|
self.body.append(r'\end{sphinxadmonition}' + CR)
|
||||||
self.no_latex_floats -= 1
|
self.no_latex_floats -= 1
|
||||||
|
|
||||||
def _visit_named_admonition(self, node: Element) -> None:
|
def _visit_named_admonition(self, node: Element) -> None:
|
||||||
label = admonitionlabels[node.tagname]
|
label = admonitionlabels[node.tagname]
|
||||||
self.body.append(CR + '\\begin{sphinxadmonition}{%s}{%s:}' %
|
self.body.append(CR + r'\begin{sphinxadmonition}{%s}{%s:}' %
|
||||||
(node.tagname, label))
|
(node.tagname, label))
|
||||||
self.no_latex_floats += 1
|
self.no_latex_floats += 1
|
||||||
|
|
||||||
def _depart_named_admonition(self, node: Element) -> None:
|
def _depart_named_admonition(self, node: Element) -> None:
|
||||||
self.body.append('\\end{sphinxadmonition}' + CR)
|
self.body.append(r'\end{sphinxadmonition}' + CR)
|
||||||
self.no_latex_floats -= 1
|
self.no_latex_floats -= 1
|
||||||
|
|
||||||
visit_attention = _visit_named_admonition
|
visit_attention = _visit_named_admonition
|
||||||
@ -1473,11 +1469,11 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def visit_attribution(self, node: Element) -> None:
|
def visit_attribution(self, node: Element) -> None:
|
||||||
self.body.append(CR + '\\begin{flushright}' + CR)
|
self.body.append(CR + r'\begin{flushright}' + CR)
|
||||||
self.body.append('---')
|
self.body.append('---')
|
||||||
|
|
||||||
def depart_attribution(self, node: Element) -> None:
|
def depart_attribution(self, node: Element) -> None:
|
||||||
self.body.append(CR + '\\end{flushright}' + CR)
|
self.body.append(CR + r'\end{flushright}' + CR)
|
||||||
|
|
||||||
def visit_index(self, node: Element) -> None:
|
def visit_index(self, node: Element) -> None:
|
||||||
def escape(value: str) -> str:
|
def escape(value: str) -> str:
|
||||||
@ -1495,7 +1491,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
if match:
|
if match:
|
||||||
return match.expand(r'\\spxentry{\1}\\spxextra{\2}')
|
return match.expand(r'\\spxentry{\1}\\spxextra{\2}')
|
||||||
else:
|
else:
|
||||||
return '\\spxentry{%s}' % string
|
return r'\spxentry{%s}' % string
|
||||||
|
|
||||||
if not node.get('inline', True):
|
if not node.get('inline', True):
|
||||||
self.body.append(CR)
|
self.body.append(CR)
|
||||||
@ -1542,7 +1538,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
except ValueError as err:
|
except ValueError as err:
|
||||||
logger.warning(str(err))
|
logger.warning(str(err))
|
||||||
if not node.get('inline', True):
|
if not node.get('inline', True):
|
||||||
self.body.append('\\ignorespaces ')
|
self.body.append(r'\ignorespaces ')
|
||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
|
|
||||||
def visit_raw(self, node: Element) -> None:
|
def visit_raw(self, node: Element) -> None:
|
||||||
@ -1602,12 +1598,12 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
else:
|
else:
|
||||||
if len(node) == 1 and uri == node[0]:
|
if len(node) == 1 and uri == node[0]:
|
||||||
if node.get('nolinkurl'):
|
if node.get('nolinkurl'):
|
||||||
self.body.append('\\sphinxnolinkurl{%s}' % self.encode_uri(uri))
|
self.body.append(r'\sphinxnolinkurl{%s}' % self.encode_uri(uri))
|
||||||
else:
|
else:
|
||||||
self.body.append('\\sphinxurl{%s}' % self.encode_uri(uri))
|
self.body.append(r'\sphinxurl{%s}' % self.encode_uri(uri))
|
||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
else:
|
else:
|
||||||
self.body.append('\\sphinxhref{%s}{' % self.encode_uri(uri))
|
self.body.append(r'\sphinxhref{%s}{' % self.encode_uri(uri))
|
||||||
self.context.append('}')
|
self.context.append('}')
|
||||||
|
|
||||||
def depart_reference(self, node: Element) -> None:
|
def depart_reference(self, node: Element) -> None:
|
||||||
@ -1621,16 +1617,16 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
else:
|
else:
|
||||||
id = node.get('refuri', '')[1:].replace('#', ':')
|
id = node.get('refuri', '')[1:].replace('#', ':')
|
||||||
|
|
||||||
title = self.escape(node.get('title', '%s')).replace('\\%s', '%s')
|
title = self.escape(node.get('title', '%s')).replace(r'\%s', '%s')
|
||||||
if '\\{name\\}' in title or '\\{number\\}' in title:
|
if r'\{name\}' in title or r'\{number\}' in title:
|
||||||
# new style format (cf. "Fig.%{number}")
|
# new style format (cf. "Fig.%{number}")
|
||||||
title = title.replace('\\{name\\}', '{name}').replace('\\{number\\}', '{number}')
|
title = title.replace(r'\{name\}', '{name}').replace(r'\{number\}', '{number}')
|
||||||
text = escape_abbr(title).format(name='\\nameref{%s}' % self.idescape(id),
|
text = escape_abbr(title).format(name=r'\nameref{%s}' % self.idescape(id),
|
||||||
number='\\ref{%s}' % self.idescape(id))
|
number=r'\ref{%s}' % self.idescape(id))
|
||||||
else:
|
else:
|
||||||
# old style format (cf. "Fig.%{number}")
|
# old style format (cf. "Fig.%{number}")
|
||||||
text = escape_abbr(title) % ('\\ref{%s}' % self.idescape(id))
|
text = escape_abbr(title) % (r'\ref{%s}' % self.idescape(id))
|
||||||
hyperref = '\\hyperref[%s]{%s}' % (self.idescape(id), text)
|
hyperref = r'\hyperref[%s]{%s}' % (self.idescape(id), text)
|
||||||
self.body.append(hyperref)
|
self.body.append(hyperref)
|
||||||
|
|
||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
@ -1704,15 +1700,15 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
# adjust max width of citation labels not to break the layout
|
# adjust max width of citation labels not to break the layout
|
||||||
longest_label = longest_label[:MAX_CITATION_LABEL_LENGTH]
|
longest_label = longest_label[:MAX_CITATION_LABEL_LENGTH]
|
||||||
|
|
||||||
self.body.append(CR + '\\begin{sphinxthebibliography}{%s}' %
|
self.body.append(CR + r'\begin{sphinxthebibliography}{%s}' %
|
||||||
self.encode(longest_label) + CR)
|
self.encode(longest_label) + CR)
|
||||||
|
|
||||||
def depart_thebibliography(self, node: Element) -> None:
|
def depart_thebibliography(self, node: Element) -> None:
|
||||||
self.body.append('\\end{sphinxthebibliography}' + CR)
|
self.body.append(r'\end{sphinxthebibliography}' + CR)
|
||||||
|
|
||||||
def visit_citation(self, node: Element) -> None:
|
def visit_citation(self, node: Element) -> None:
|
||||||
label = cast(nodes.label, node[0])
|
label = cast(nodes.label, node[0])
|
||||||
self.body.append('\\bibitem[%s]{%s:%s}' % (self.encode(label.astext()),
|
self.body.append(r'\bibitem[%s]{%s:%s}' % (self.encode(label.astext()),
|
||||||
node['docname'], node['ids'][0]))
|
node['docname'], node['ids'][0]))
|
||||||
|
|
||||||
def depart_citation(self, node: Element) -> None:
|
def depart_citation(self, node: Element) -> None:
|
||||||
@ -1722,7 +1718,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
if self.in_title:
|
if self.in_title:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
self.body.append('\\sphinxcite{%s:%s}' % (node['docname'], node['refname']))
|
self.body.append(r'\sphinxcite{%s:%s}' % (node['docname'], node['refname']))
|
||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
|
|
||||||
def depart_citation_reference(self, node: Element) -> None:
|
def depart_citation_reference(self, node: Element) -> None:
|
||||||
@ -1743,7 +1739,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
|
|
||||||
def visit_footnotemark(self, node: Element) -> None:
|
def visit_footnotemark(self, node: Element) -> None:
|
||||||
self.body.append('\\sphinxfootnotemark[')
|
self.body.append(r'\sphinxfootnotemark[')
|
||||||
|
|
||||||
def depart_footnotemark(self, node: Element) -> None:
|
def depart_footnotemark(self, node: Element) -> None:
|
||||||
self.body.append(']')
|
self.body.append(']')
|
||||||
@ -1751,15 +1747,15 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
def visit_footnotetext(self, node: Element) -> None:
|
def visit_footnotetext(self, node: Element) -> None:
|
||||||
label = cast(nodes.label, node[0])
|
label = cast(nodes.label, node[0])
|
||||||
self.body.append('%' + CR)
|
self.body.append('%' + CR)
|
||||||
self.body.append('\\begin{footnotetext}[%s]'
|
self.body.append(r'\begin{footnotetext}[%s]'
|
||||||
'\\phantomsection\\label{\\thesphinxscope.%s}%%'
|
r'\phantomsection\label{\thesphinxscope.%s}%%'
|
||||||
% (label.astext(), label.astext()) + CR)
|
% (label.astext(), label.astext()) + CR)
|
||||||
self.body.append('\\sphinxAtStartFootnote' + CR)
|
self.body.append(r'\sphinxAtStartFootnote' + CR)
|
||||||
|
|
||||||
def depart_footnotetext(self, node: Element) -> None:
|
def depart_footnotetext(self, node: Element) -> None:
|
||||||
# the \ignorespaces in particular for after table header use
|
# the \ignorespaces in particular for after table header use
|
||||||
self.body.append('%' + CR)
|
self.body.append('%' + CR)
|
||||||
self.body.append('\\end{footnotetext}\\ignorespaces ')
|
self.body.append(r'\end{footnotetext}\ignorespaces ')
|
||||||
|
|
||||||
def visit_captioned_literal_block(self, node: Element) -> None:
|
def visit_captioned_literal_block(self, node: Element) -> None:
|
||||||
pass
|
pass
|
||||||
@ -1771,13 +1767,13 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
if node.rawsource != node.astext():
|
if node.rawsource != node.astext():
|
||||||
# most probably a parsed-literal block -- don't highlight
|
# most probably a parsed-literal block -- don't highlight
|
||||||
self.in_parsed_literal += 1
|
self.in_parsed_literal += 1
|
||||||
self.body.append('\\begin{sphinxalltt}' + CR)
|
self.body.append(r'\begin{sphinxalltt}' + CR)
|
||||||
else:
|
else:
|
||||||
labels = self.hypertarget_to(node)
|
labels = self.hypertarget_to(node)
|
||||||
if isinstance(node.parent, captioned_literal_block):
|
if isinstance(node.parent, captioned_literal_block):
|
||||||
labels += self.hypertarget_to(node.parent)
|
labels += self.hypertarget_to(node.parent)
|
||||||
if labels and not self.in_footnote:
|
if labels and not self.in_footnote:
|
||||||
self.body.append(CR + '\\def\\sphinxLiteralBlockLabel{' + labels + '}')
|
self.body.append(CR + r'\def\sphinxLiteralBlockLabel{' + labels + '}')
|
||||||
|
|
||||||
lang = node.get('language', 'default')
|
lang = node.get('language', 'default')
|
||||||
linenos = node.get('linenos', False)
|
linenos = node.get('linenos', False)
|
||||||
@ -1790,57 +1786,57 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
location=node, **highlight_args
|
location=node, **highlight_args
|
||||||
)
|
)
|
||||||
if self.in_footnote:
|
if self.in_footnote:
|
||||||
self.body.append(CR + '\\sphinxSetupCodeBlockInFootnote')
|
self.body.append(CR + r'\sphinxSetupCodeBlockInFootnote')
|
||||||
hlcode = hlcode.replace('\\begin{Verbatim}',
|
hlcode = hlcode.replace(r'\begin{Verbatim}',
|
||||||
'\\begin{sphinxVerbatim}')
|
r'\begin{sphinxVerbatim}')
|
||||||
# if in table raise verbatim flag to avoid "tabulary" environment
|
# if in table raise verbatim flag to avoid "tabulary" environment
|
||||||
# and opt for sphinxVerbatimintable to handle caption & long lines
|
# and opt for sphinxVerbatimintable to handle caption & long lines
|
||||||
elif self.table:
|
elif self.table:
|
||||||
self.table.has_problematic = True
|
self.table.has_problematic = True
|
||||||
self.table.has_verbatim = True
|
self.table.has_verbatim = True
|
||||||
hlcode = hlcode.replace('\\begin{Verbatim}',
|
hlcode = hlcode.replace(r'\begin{Verbatim}',
|
||||||
'\\begin{sphinxVerbatimintable}')
|
r'\begin{sphinxVerbatimintable}')
|
||||||
else:
|
else:
|
||||||
hlcode = hlcode.replace('\\begin{Verbatim}',
|
hlcode = hlcode.replace(r'\begin{Verbatim}',
|
||||||
'\\begin{sphinxVerbatim}')
|
r'\begin{sphinxVerbatim}')
|
||||||
# get consistent trailer
|
# get consistent trailer
|
||||||
hlcode = hlcode.rstrip()[:-14] # strip \end{Verbatim}
|
hlcode = hlcode.rstrip()[:-14] # strip \end{Verbatim}
|
||||||
if self.table and not self.in_footnote:
|
if self.table and not self.in_footnote:
|
||||||
hlcode += '\\end{sphinxVerbatimintable}'
|
hlcode += r'\end{sphinxVerbatimintable}'
|
||||||
else:
|
else:
|
||||||
hlcode += '\\end{sphinxVerbatim}'
|
hlcode += r'\end{sphinxVerbatim}'
|
||||||
|
|
||||||
hllines = str(highlight_args.get('hl_lines', []))[1:-1]
|
hllines = str(highlight_args.get('hl_lines', []))[1:-1]
|
||||||
if hllines:
|
if hllines:
|
||||||
self.body.append(CR + '\\fvset{hllines={, %s,}}%%' % hllines)
|
self.body.append(CR + r'\fvset{hllines={, %s,}}%%' % hllines)
|
||||||
self.body.append(CR + hlcode + CR)
|
self.body.append(CR + hlcode + CR)
|
||||||
if hllines:
|
if hllines:
|
||||||
self.body.append('\\sphinxresetverbatimhllines' + CR)
|
self.body.append(r'\sphinxresetverbatimhllines' + CR)
|
||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
|
|
||||||
def depart_literal_block(self, node: Element) -> None:
|
def depart_literal_block(self, node: Element) -> None:
|
||||||
self.body.append(CR + '\\end{sphinxalltt}' + CR)
|
self.body.append(CR + r'\end{sphinxalltt}' + CR)
|
||||||
self.in_parsed_literal -= 1
|
self.in_parsed_literal -= 1
|
||||||
visit_doctest_block = visit_literal_block
|
visit_doctest_block = visit_literal_block
|
||||||
depart_doctest_block = depart_literal_block
|
depart_doctest_block = depart_literal_block
|
||||||
|
|
||||||
def visit_line(self, node: Element) -> None:
|
def visit_line(self, node: Element) -> None:
|
||||||
self.body.append('\\item[] ')
|
self.body.append(r'\item[] ')
|
||||||
|
|
||||||
def depart_line(self, node: Element) -> None:
|
def depart_line(self, node: Element) -> None:
|
||||||
self.body.append(CR)
|
self.body.append(CR)
|
||||||
|
|
||||||
def visit_line_block(self, node: Element) -> None:
|
def visit_line_block(self, node: Element) -> None:
|
||||||
if isinstance(node.parent, nodes.line_block):
|
if isinstance(node.parent, nodes.line_block):
|
||||||
self.body.append('\\item[]' + CR)
|
self.body.append(r'\item[]' + CR)
|
||||||
self.body.append('\\begin{DUlineblock}{\\DUlineblockindent}' + CR)
|
self.body.append(r'\begin{DUlineblock}{\DUlineblockindent}' + CR)
|
||||||
else:
|
else:
|
||||||
self.body.append(CR + '\\begin{DUlineblock}{0em}' + CR)
|
self.body.append(CR + r'\begin{DUlineblock}{0em}' + CR)
|
||||||
if self.table:
|
if self.table:
|
||||||
self.table.has_problematic = True
|
self.table.has_problematic = True
|
||||||
|
|
||||||
def depart_line_block(self, node: Element) -> None:
|
def depart_line_block(self, node: Element) -> None:
|
||||||
self.body.append('\\end{DUlineblock}' + CR)
|
self.body.append(r'\end{DUlineblock}' + CR)
|
||||||
|
|
||||||
def visit_block_quote(self, node: Element) -> None:
|
def visit_block_quote(self, node: Element) -> None:
|
||||||
# If the block quote contains a single object and that object
|
# If the block quote contains a single object and that object
|
||||||
@ -1853,7 +1849,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
isinstance(child, nodes.enumerated_list):
|
isinstance(child, nodes.enumerated_list):
|
||||||
done = 1
|
done = 1
|
||||||
if not done:
|
if not done:
|
||||||
self.body.append('\\begin{quote}' + CR)
|
self.body.append(r'\begin{quote}' + CR)
|
||||||
if self.table:
|
if self.table:
|
||||||
self.table.has_problematic = True
|
self.table.has_problematic = True
|
||||||
|
|
||||||
@ -1865,7 +1861,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
isinstance(child, nodes.enumerated_list):
|
isinstance(child, nodes.enumerated_list):
|
||||||
done = 1
|
done = 1
|
||||||
if not done:
|
if not done:
|
||||||
self.body.append('\\end{quote}' + CR)
|
self.body.append(r'\end{quote}' + CR)
|
||||||
|
|
||||||
# option node handling copied from docutils' latex writer
|
# option node handling copied from docutils' latex writer
|
||||||
|
|
||||||
@ -1886,7 +1882,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def visit_option_group(self, node: Element) -> None:
|
def visit_option_group(self, node: Element) -> None:
|
||||||
self.body.append('\\item [')
|
self.body.append(r'\item [')
|
||||||
# flag for first option
|
# flag for first option
|
||||||
self.context.append(0)
|
self.context.append(0)
|
||||||
|
|
||||||
@ -1895,12 +1891,12 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
self.body.append('] ')
|
self.body.append('] ')
|
||||||
|
|
||||||
def visit_option_list(self, node: Element) -> None:
|
def visit_option_list(self, node: Element) -> None:
|
||||||
self.body.append('\\begin{optionlist}{3cm}' + CR)
|
self.body.append(r'\begin{optionlist}{3cm}' + CR)
|
||||||
if self.table:
|
if self.table:
|
||||||
self.table.has_problematic = True
|
self.table.has_problematic = True
|
||||||
|
|
||||||
def depart_option_list(self, node: Element) -> None:
|
def depart_option_list(self, node: Element) -> None:
|
||||||
self.body.append('\\end{optionlist}' + CR)
|
self.body.append(r'\end{optionlist}' + CR)
|
||||||
|
|
||||||
def visit_option_list_item(self, node: Element) -> None:
|
def visit_option_list_item(self, node: Element) -> None:
|
||||||
pass
|
pass
|
||||||
@ -1920,13 +1916,13 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def visit_superscript(self, node: Element) -> None:
|
def visit_superscript(self, node: Element) -> None:
|
||||||
self.body.append('$^{\\text{')
|
self.body.append(r'$^{\text{')
|
||||||
|
|
||||||
def depart_superscript(self, node: Element) -> None:
|
def depart_superscript(self, node: Element) -> None:
|
||||||
self.body.append('}}$')
|
self.body.append('}}$')
|
||||||
|
|
||||||
def visit_subscript(self, node: Element) -> None:
|
def visit_subscript(self, node: Element) -> None:
|
||||||
self.body.append('$_{\\text{')
|
self.body.append(r'$_{\text{')
|
||||||
|
|
||||||
def depart_subscript(self, node: Element) -> None:
|
def depart_subscript(self, node: Element) -> None:
|
||||||
self.body.append('}}$')
|
self.body.append('}}$')
|
||||||
@ -1993,7 +1989,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
if self.literal_whitespace:
|
if self.literal_whitespace:
|
||||||
# Insert a blank before the newline, to avoid
|
# Insert a blank before the newline, to avoid
|
||||||
# ! LaTeX Error: There's no line here to end.
|
# ! LaTeX Error: There's no line here to end.
|
||||||
text = text.replace(CR, '~\\\\' + CR).replace(' ', '~')
|
text = text.replace(CR, r'~\\' + CR).replace(' ', '~')
|
||||||
return text
|
return text
|
||||||
|
|
||||||
def encode_uri(self, text: str) -> str:
|
def encode_uri(self, text: str) -> str:
|
||||||
@ -2001,9 +1997,9 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
# this must be checked against hyperref package exact dealings
|
# this must be checked against hyperref package exact dealings
|
||||||
# mainly, %, #, {, } and \ need escaping via a \ escape
|
# mainly, %, #, {, } and \ need escaping via a \ escape
|
||||||
# in \href, the tilde is allowed and must be represented literally
|
# in \href, the tilde is allowed and must be represented literally
|
||||||
return self.encode(text).replace('\\textasciitilde{}', '~').\
|
return self.encode(text).replace(r'\textasciitilde{}', '~').\
|
||||||
replace('\\sphinxhyphen{}', '-').\
|
replace(r'\sphinxhyphen{}', '-').\
|
||||||
replace('\\textquotesingle{}', "'")
|
replace(r'\textquotesingle{}', "'")
|
||||||
|
|
||||||
def visit_Text(self, node: Text) -> None:
|
def visit_Text(self, node: Text) -> None:
|
||||||
text = self.encode(node.astext())
|
text = self.encode(node.astext())
|
||||||
|
0
tests/roots/test-directive-csv-table/conf.py
Normal file
0
tests/roots/test-directive-csv-table/conf.py
Normal file
1
tests/roots/test-directive-csv-table/example.csv
Normal file
1
tests/roots/test-directive-csv-table/example.csv
Normal file
@ -0,0 +1 @@
|
|||||||
|
foo,bar,baz
|
|
1
tests/roots/test-directive-csv-table/subdir/example.csv
Normal file
1
tests/roots/test-directive-csv-table/subdir/example.csv
Normal file
@ -0,0 +1 @@
|
|||||||
|
FOO,BAR,BAZ
|
|
@ -1,3 +1,5 @@
|
|||||||
|
.. c:namespace:: anon_dup_decl_ns
|
||||||
|
|
||||||
.. c:struct:: anon_dup_decl
|
.. c:struct:: anon_dup_decl
|
||||||
|
|
||||||
.. c:struct:: @a.A
|
.. c:struct:: @a.A
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
.. c:namespace:: function_param_target
|
||||||
|
|
||||||
.. c:function:: void f(int i)
|
.. c:function:: void f(int i)
|
||||||
|
|
||||||
- :c:var:`i`
|
- :c:var:`i`
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
.. c:namespace:: index
|
||||||
|
|
||||||
test-domain-c
|
test-domain-c
|
||||||
=============
|
=============
|
||||||
|
|
||||||
|
@ -1,10 +0,0 @@
|
|||||||
.. c:member:: int member;
|
|
||||||
.. c:var:: int var;
|
|
||||||
.. c:function:: void f();
|
|
||||||
.. .. c:macro:: NO_SEMICOLON;
|
|
||||||
.. c:struct:: Struct;
|
|
||||||
.. c:union:: Union;
|
|
||||||
.. c:enum:: Enum;
|
|
||||||
.. c:enumerator:: Enumerator;
|
|
||||||
.. c:type:: Type;
|
|
||||||
.. c:type:: int TypeDef;
|
|
@ -0,0 +1 @@
|
|||||||
|
python_use_unqualified_type_names = True
|
@ -0,0 +1,8 @@
|
|||||||
|
domain-py-smart_reference
|
||||||
|
=========================
|
||||||
|
|
||||||
|
.. py:class:: Name
|
||||||
|
:module: foo
|
||||||
|
|
||||||
|
|
||||||
|
.. py:function:: hello(name: foo.Name, age: foo.Age)
|
19
tests/roots/test-ext-autodoc/target/preserve_defaults.py
Normal file
19
tests/roots/test-ext-autodoc/target/preserve_defaults.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
CONSTANT = 'foo'
|
||||||
|
SENTINEL = object()
|
||||||
|
|
||||||
|
|
||||||
|
def foo(name: str = CONSTANT,
|
||||||
|
sentinal: Any = SENTINEL,
|
||||||
|
now: datetime = datetime.now()) -> None:
|
||||||
|
"""docstring"""
|
||||||
|
|
||||||
|
|
||||||
|
class Class:
|
||||||
|
"""docstring"""
|
||||||
|
|
||||||
|
def meth(self, name: str = CONSTANT, sentinal: Any = SENTINEL,
|
||||||
|
now: datetime = datetime.now()) -> None:
|
||||||
|
"""docstring"""
|
@ -17,6 +17,9 @@ T5 = TypeVar("T5", contravariant=True)
|
|||||||
#: T6
|
#: T6
|
||||||
T6 = NewType("T6", int)
|
T6 = NewType("T6", int)
|
||||||
|
|
||||||
|
#: T7
|
||||||
|
T7 = TypeVar("T7", bound=int)
|
||||||
|
|
||||||
|
|
||||||
class Class:
|
class Class:
|
||||||
#: T1
|
#: T1
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
extensions = ['sphinx.ext.doctest']
|
extensions = ['sphinx.ext.doctest']
|
||||||
|
|
||||||
project = 'test project for the doctest :skipif: directive'
|
project = 'test project for the doctest :skipif: directive'
|
||||||
master_doc = 'skipif'
|
root_doc = 'skipif'
|
||||||
source_suffix = '.txt'
|
source_suffix = '.txt'
|
||||||
exclude_patterns = ['_build']
|
exclude_patterns = ['_build']
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
extensions = ['sphinx.ext.doctest']
|
extensions = ['sphinx.ext.doctest']
|
||||||
|
|
||||||
project = 'test project for doctest'
|
project = 'test project for doctest'
|
||||||
master_doc = 'doctest'
|
root_doc = 'doctest'
|
||||||
source_suffix = '.txt'
|
source_suffix = '.txt'
|
||||||
exclude_patterns = ['_build']
|
exclude_patterns = ['_build']
|
||||||
|
@ -1,2 +1,2 @@
|
|||||||
master_doc = 'equations'
|
root_doc = 'equations'
|
||||||
extensions = ['sphinx.ext.imgmath']
|
extensions = ['sphinx.ext.imgmath']
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
master_doc = 'index'
|
|
||||||
|
|
||||||
exclude_patterns = ['_build']
|
exclude_patterns = ['_build']
|
||||||
|
|
||||||
latex_elements = {
|
latex_elements = {
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
master_doc = 'links'
|
root_doc = 'links'
|
||||||
source_suffix = '.txt'
|
source_suffix = '.txt'
|
||||||
exclude_patterns = ['_build']
|
exclude_patterns = ['_build']
|
||||||
linkcheck_anchors = True
|
linkcheck_anchors = True
|
||||||
|
@ -48,8 +48,8 @@ def nonascii_srcdir(request, rootdir, sphinx_test_tempdir):
|
|||||||
=======================
|
=======================
|
||||||
"""))
|
"""))
|
||||||
|
|
||||||
master_doc = srcdir / 'index.txt'
|
root_doc = srcdir / 'index.txt'
|
||||||
master_doc.write_text(master_doc.read_text() + dedent("""
|
root_doc.write_text(root_doc.read_text() + dedent("""
|
||||||
.. toctree::
|
.. toctree::
|
||||||
|
|
||||||
%(test_name)s/%(test_name)s
|
%(test_name)s/%(test_name)s
|
||||||
@ -71,7 +71,7 @@ def test_build_all(requests_head, make_app, nonascii_srcdir, buildername):
|
|||||||
app.build()
|
app.build()
|
||||||
|
|
||||||
|
|
||||||
def test_master_doc_not_found(tempdir, make_app):
|
def test_root_doc_not_found(tempdir, make_app):
|
||||||
(tempdir / 'conf.py').write_text('')
|
(tempdir / 'conf.py').write_text('')
|
||||||
assert tempdir.listdir() == ['conf.py']
|
assert tempdir.listdir() == ['conf.py']
|
||||||
|
|
||||||
|
@ -1037,7 +1037,7 @@ def test_toctree_maxdepth_howto(app, status, warning):
|
|||||||
|
|
||||||
@pytest.mark.sphinx(
|
@pytest.mark.sphinx(
|
||||||
'latex', testroot='toctree-maxdepth',
|
'latex', testroot='toctree-maxdepth',
|
||||||
confoverrides={'master_doc': 'foo'})
|
confoverrides={'root_doc': 'foo'})
|
||||||
def test_toctree_not_found(app, status, warning):
|
def test_toctree_not_found(app, status, warning):
|
||||||
app.builder.build_all()
|
app.builder.build_all()
|
||||||
result = (app.outdir / 'python.tex').read_text()
|
result = (app.outdir / 'python.tex').read_text()
|
||||||
@ -1051,7 +1051,7 @@ def test_toctree_not_found(app, status, warning):
|
|||||||
|
|
||||||
@pytest.mark.sphinx(
|
@pytest.mark.sphinx(
|
||||||
'latex', testroot='toctree-maxdepth',
|
'latex', testroot='toctree-maxdepth',
|
||||||
confoverrides={'master_doc': 'bar'})
|
confoverrides={'root_doc': 'bar'})
|
||||||
def test_toctree_without_maxdepth(app, status, warning):
|
def test_toctree_without_maxdepth(app, status, warning):
|
||||||
app.builder.build_all()
|
app.builder.build_all()
|
||||||
result = (app.outdir / 'python.tex').read_text()
|
result = (app.outdir / 'python.tex').read_text()
|
||||||
@ -1064,7 +1064,7 @@ def test_toctree_without_maxdepth(app, status, warning):
|
|||||||
|
|
||||||
@pytest.mark.sphinx(
|
@pytest.mark.sphinx(
|
||||||
'latex', testroot='toctree-maxdepth',
|
'latex', testroot='toctree-maxdepth',
|
||||||
confoverrides={'master_doc': 'qux'})
|
confoverrides={'root_doc': 'qux'})
|
||||||
def test_toctree_with_deeper_maxdepth(app, status, warning):
|
def test_toctree_with_deeper_maxdepth(app, status, warning):
|
||||||
app.builder.build_all()
|
app.builder.build_all()
|
||||||
result = (app.outdir / 'python.tex').read_text()
|
result = (app.outdir / 'python.tex').read_text()
|
||||||
@ -1532,7 +1532,7 @@ def test_latex_figure_in_admonition(app, status, warning):
|
|||||||
def test_default_latex_documents():
|
def test_default_latex_documents():
|
||||||
from sphinx.util import texescape
|
from sphinx.util import texescape
|
||||||
texescape.init()
|
texescape.init()
|
||||||
config = Config({'master_doc': 'index',
|
config = Config({'root_doc': 'index',
|
||||||
'project': 'STASI™ Documentation',
|
'project': 'STASI™ Documentation',
|
||||||
'author': "Wolfgang Schäuble & G'Beckstein."})
|
'author': "Wolfgang Schäuble & G'Beckstein."})
|
||||||
config.init_values()
|
config.init_values()
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user