mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge branch 'stable' into 4a164170-33e9-4df3-aad2-a13af37b6b43
This commit is contained in:
commit
1cae50f748
13
CHANGES
13
CHANGES
@ -14,6 +14,9 @@ Features added
|
|||||||
--------------
|
--------------
|
||||||
|
|
||||||
* #4181: autodoc: Sort dictionary keys when possible
|
* #4181: autodoc: Sort dictionary keys when possible
|
||||||
|
* ``VerbatimHighlightColor`` is a new
|
||||||
|
:ref:`LaTeX 'sphinxsetup' <latexsphinxsetup>` key (refs: #4285)
|
||||||
|
* Easier customizability of LaTeX macros involved in rendering of code-blocks
|
||||||
|
|
||||||
Bugs fixed
|
Bugs fixed
|
||||||
----------
|
----------
|
||||||
@ -23,6 +26,16 @@ Bugs fixed
|
|||||||
* #4221: napoleon depends on autodoc, but users need to load it manually
|
* #4221: napoleon depends on autodoc, but users need to load it manually
|
||||||
* #2298: automodule fails to document a class attribute
|
* #2298: automodule fails to document a class attribute
|
||||||
* #4099: C++: properly link class reference to class from inside constructor
|
* #4099: C++: properly link class reference to class from inside constructor
|
||||||
|
* #4267: PDF build broken by Unicode U+2116 NUMERO SIGN character
|
||||||
|
* #4249: PDF output: Pygments error highlighting increases line spacing in
|
||||||
|
code blocks
|
||||||
|
* #1238: Support ``:emphasize-lines:`` in PDF output
|
||||||
|
* #4279: Sphinx crashes with pickling error when run with multiple processes and
|
||||||
|
remote image
|
||||||
|
* #1421: Respect the quiet flag in sphinx-quickstart
|
||||||
|
* #4281: Race conditions when creating output directory
|
||||||
|
* #4315: For PDF 'howto' documents, ``latex_toplevel_sectioning='part'`` generates
|
||||||
|
``\chapter`` commands
|
||||||
* #4214: Two todolist directives break sphinx-1.6.5
|
* #4214: Two todolist directives break sphinx-1.6.5
|
||||||
|
|
||||||
Testing
|
Testing
|
||||||
|
1
EXAMPLES
1
EXAMPLES
@ -111,6 +111,7 @@ Documentation using the sphinxdoc theme
|
|||||||
Documentation using another builtin theme
|
Documentation using another builtin theme
|
||||||
-----------------------------------------
|
-----------------------------------------
|
||||||
|
|
||||||
|
* Arcade: http://arcade.academy/ (sphinx_rtd_theme)
|
||||||
* ASE: https://wiki.fysik.dtu.dk/ase/ (sphinx_rtd_theme)
|
* ASE: https://wiki.fysik.dtu.dk/ase/ (sphinx_rtd_theme)
|
||||||
* C/C++ Development with Eclipse: http://eclipsebook.in/ (agogo)
|
* C/C++ Development with Eclipse: http://eclipsebook.in/ (agogo)
|
||||||
* ESWP3 (http://eswp3.org) (sphinx_rtd_theme)
|
* ESWP3 (http://eswp3.org) (sphinx_rtd_theme)
|
||||||
|
@ -311,8 +311,8 @@ General configuration
|
|||||||
.. confval:: numfig
|
.. confval:: numfig
|
||||||
|
|
||||||
If true, figures, tables and code-blocks are automatically numbered if they
|
If true, figures, tables and code-blocks are automatically numbered if they
|
||||||
have a caption. At same time, the `numref` role is enabled. For now, it
|
have a caption. The :rst:role:`numref` role is enabled.
|
||||||
works only with the HTML builder and LaTeX builder. Default is ``False``.
|
Obeyed so far only by HTML and LaTeX builders. Default is ``False``.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
@ -335,10 +335,21 @@ General configuration
|
|||||||
|
|
||||||
.. confval:: numfig_secnum_depth
|
.. confval:: numfig_secnum_depth
|
||||||
|
|
||||||
The scope of figure numbers, that is, the numfig feature numbers figures
|
- if set to ``0``, figures, tables and code-blocks are continuously numbered
|
||||||
in which scope. ``0`` means "whole document". ``1`` means "in a section".
|
starting at ``1``.
|
||||||
Sphinx numbers like x.1, x.2, x.3... ``2`` means "in a subsection". Sphinx
|
- if ``1`` (default) numbers will be ``x.1``, ``x.2``, ... with ``x``
|
||||||
numbers like x.x.1, x.x.2, x.x.3..., and so on. Default is ``1``.
|
the section number (top level sectioning; no ``x.`` if no section).
|
||||||
|
This naturally applies only if section numbering has been activated via
|
||||||
|
the ``:numbered:`` option of the :rst:dir:`toctree` directive.
|
||||||
|
- ``2`` means that numbers will be ``x.y.1``, ``x.y.2``, ... if located in
|
||||||
|
a sub-section (but still ``x.1``, ``x.2``, ... if located directly under a
|
||||||
|
section and ``1``, ``2``, ... if not in any top level section.)
|
||||||
|
- etc...
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
The LaTeX builder currently ignores this configuration setting. It will
|
||||||
|
obey it at Sphinx 1.7.
|
||||||
|
|
||||||
.. versionadded:: 1.3
|
.. versionadded:: 1.3
|
||||||
|
|
||||||
@ -1606,10 +1617,15 @@ These options influence LaTeX output. See further :doc:`latex`.
|
|||||||
.. confval:: latex_toplevel_sectioning
|
.. confval:: latex_toplevel_sectioning
|
||||||
|
|
||||||
This value determines the topmost sectioning unit. It should be chosen from
|
This value determines the topmost sectioning unit. It should be chosen from
|
||||||
``part``, ``chapter`` or ``section``. The default is ``None``; the topmost
|
``'part'``, ``'chapter'`` or ``'section'``. The default is ``None``;
|
||||||
sectioning unit is switched by documentclass. ``section`` is used if
|
the topmost
|
||||||
|
sectioning unit is switched by documentclass: ``section`` is used if
|
||||||
documentclass will be ``howto``, otherwise ``chapter`` will be used.
|
documentclass will be ``howto``, otherwise ``chapter`` will be used.
|
||||||
|
|
||||||
|
Note that if LaTeX uses ``\part`` command, then the numbering of sectioning
|
||||||
|
units one level deep gets off-sync with HTML numbering, because LaTeX
|
||||||
|
numbers continuously ``\chapter`` (or ``\section`` for ``howto``.)
|
||||||
|
|
||||||
.. versionadded:: 1.4
|
.. versionadded:: 1.4
|
||||||
|
|
||||||
.. confval:: latex_appendices
|
.. confval:: latex_appendices
|
||||||
|
@ -78,7 +78,7 @@ or use Python raw strings (``r"raw"``).
|
|||||||
Normally, equations are not numbered. If you want your equation to get a
|
Normally, equations are not numbered. If you want your equation to get a
|
||||||
number, use the ``label`` option. When given, it selects an internal label
|
number, use the ``label`` option. When given, it selects an internal label
|
||||||
for the equation, by which it can be cross-referenced, and causes an equation
|
for the equation, by which it can be cross-referenced, and causes an equation
|
||||||
number to be issued. See :rst:role:`eqref` for an example. The numbering
|
number to be issued. See :rst:role:`eq` for an example. The numbering
|
||||||
style depends on the output format.
|
style depends on the output format.
|
||||||
|
|
||||||
There is also an option ``nowrap`` that prevents any wrapping of the given
|
There is also an option ``nowrap`` that prevents any wrapping of the given
|
||||||
|
@ -256,6 +256,16 @@ The available styling options
|
|||||||
``VerbatimBorderColor``
|
``VerbatimBorderColor``
|
||||||
default ``{rgb}{0,0,0}``. The frame color, defaults to black.
|
default ``{rgb}{0,0,0}``. The frame color, defaults to black.
|
||||||
|
|
||||||
|
``VerbatimHighlightColor``
|
||||||
|
default ``{rgb}{0.878,1,1}``. The color for highlighted lines.
|
||||||
|
|
||||||
|
.. versionadded:: 1.6.6
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Starting with this colour key, and for all others coming next, the actual
|
||||||
|
names declared to "color" or "xcolor" are prefixed with "sphinx".
|
||||||
|
|
||||||
``verbatimsep``
|
``verbatimsep``
|
||||||
default ``\fboxsep``. The separation between code lines and the frame.
|
default ``\fboxsep``. The separation between code lines and the frame.
|
||||||
|
|
||||||
@ -277,11 +287,6 @@ The available styling options
|
|||||||
default ``{rgb}{0,0,0}`` (black). The colour for the two horizontal rules
|
default ``{rgb}{0,0,0}`` (black). The colour for the two horizontal rules
|
||||||
used by Sphinx in LaTeX for styling a :dudir:`note` type admonition.
|
used by Sphinx in LaTeX for styling a :dudir:`note` type admonition.
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
The actual colour names declared to "color" or "xcolor" are prefixed with
|
|
||||||
"sphinx".
|
|
||||||
|
|
||||||
``noteborder``, ``hintborder``, ``importantborder``, ``tipborder``
|
``noteborder``, ``hintborder``, ``importantborder``, ``tipborder``
|
||||||
default ``0.5pt``. The width of the two horizontal rules.
|
default ``0.5pt``. The width of the two horizontal rules.
|
||||||
|
|
||||||
@ -443,6 +448,11 @@ Environments
|
|||||||
.. versionadded:: 1.5
|
.. versionadded:: 1.5
|
||||||
options ``verbatimwithframe``, ``verbatimwrapslines``,
|
options ``verbatimwithframe``, ``verbatimwrapslines``,
|
||||||
``verbatimsep``, ``verbatimborder``.
|
``verbatimsep``, ``verbatimborder``.
|
||||||
|
.. versionadded:: 1.6.6
|
||||||
|
support for ``:emphasize-lines:`` option
|
||||||
|
.. versionadded:: 1.6.6
|
||||||
|
easier customizability of the formatting via exposed to user LaTeX macros
|
||||||
|
such as ``\sphinxVerbatimHighlightLine``.
|
||||||
- the bibliography uses ``sphinxthebibliography`` and the Python Module index
|
- the bibliography uses ``sphinxthebibliography`` and the Python Module index
|
||||||
as well as the general index both use ``sphinxtheindex``; these environments
|
as well as the general index both use ``sphinxtheindex``; these environments
|
||||||
are wrappers of the ``thebibliography`` and respectively ``theindex``
|
are wrappers of the ``thebibliography`` and respectively ``theindex``
|
||||||
|
@ -121,6 +121,8 @@ emphasize particular lines::
|
|||||||
.. versionchanged:: 1.3
|
.. versionchanged:: 1.3
|
||||||
``lineno-start`` has been added.
|
``lineno-start`` has been added.
|
||||||
|
|
||||||
|
.. versionchanged:: 1.6.6
|
||||||
|
LaTeX supports the ``emphasize-lines`` option.
|
||||||
|
|
||||||
Includes
|
Includes
|
||||||
^^^^^^^^
|
^^^^^^^^
|
||||||
@ -188,8 +190,8 @@ Includes
|
|||||||
``lines``, the first allowed line having by convention the line number ``1``.
|
``lines``, the first allowed line having by convention the line number ``1``.
|
||||||
|
|
||||||
When lines have been selected in any of the ways described above, the
|
When lines have been selected in any of the ways described above, the
|
||||||
line numbers in ``emphasize-lines`` also refer to the selection, with the
|
line numbers in ``emphasize-lines`` refer to those selected lines, counted
|
||||||
first selected line having number ``1``.
|
consecutively starting at ``1``.
|
||||||
|
|
||||||
When specifying particular parts of a file to display, it can be useful to
|
When specifying particular parts of a file to display, it can be useful to
|
||||||
display the original line numbers. This can be done using the
|
display the original line numbers. This can be done using the
|
||||||
|
@ -222,15 +222,15 @@ Cross-referencing figures by figure number
|
|||||||
reST labels are used. When you use this role, it will insert a reference to
|
reST labels are used. When you use this role, it will insert a reference to
|
||||||
the figure with link text by its figure number like "Fig. 1.1".
|
the figure with link text by its figure number like "Fig. 1.1".
|
||||||
|
|
||||||
If an explicit link text is given (like usual: ``:numref:`Image of Sphinx (Fig.
|
If an explicit link text is given (as usual: ``:numref:`Image of Sphinx (Fig.
|
||||||
%s) <my-figure>```), the link caption will be the title of the reference.
|
%s) <my-figure>```), the link caption will serve as title of the reference.
|
||||||
As a special character, `%s` and `{number}` will be replaced to figure
|
As placeholders, `%s` and `{number}` get replaced by the figure
|
||||||
number. `{name}` will be replaced to figure caption.
|
number and `{name}` by the figure caption.
|
||||||
If no explicit link text is given, the value of :confval:`numfig_format` is
|
If no explicit link text is given, the :confval:`numfig_format` setting is
|
||||||
used to default value of link text.
|
used as fall-back default.
|
||||||
|
|
||||||
If :confval:`numfig` is ``False``, figures are not numbered.
|
If :confval:`numfig` is ``False``, figures are not numbered,
|
||||||
so this role inserts not a reference but labels or link text.
|
so this role inserts not a reference but the label or the link text.
|
||||||
|
|
||||||
Cross-referencing other items of interest
|
Cross-referencing other items of interest
|
||||||
-----------------------------------------
|
-----------------------------------------
|
||||||
|
@ -26,7 +26,7 @@ from fnmatch import fnmatch
|
|||||||
from sphinx import __display_version__
|
from sphinx import __display_version__
|
||||||
from sphinx.quickstart import EXTENSIONS
|
from sphinx.quickstart import EXTENSIONS
|
||||||
from sphinx.util import rst
|
from sphinx.util import rst
|
||||||
from sphinx.util.osutil import FileAvoidWrite, walk
|
from sphinx.util.osutil import FileAvoidWrite, ensuredir, walk
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
# For type annotation
|
# For type annotation
|
||||||
@ -375,9 +375,8 @@ Note: By default this script will not overwrite already created files.""")
|
|||||||
if not path.isdir(rootpath):
|
if not path.isdir(rootpath):
|
||||||
print('%s is not a directory.' % rootpath, file=sys.stderr)
|
print('%s is not a directory.' % rootpath, file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
if not path.isdir(opts.destdir):
|
|
||||||
if not opts.dryrun:
|
if not opts.dryrun:
|
||||||
os.makedirs(opts.destdir)
|
ensuredir(opts.destdir)
|
||||||
rootpath = path.abspath(rootpath)
|
rootpath = path.abspath(rootpath)
|
||||||
excludes = normalize_excludes(rootpath, excludes)
|
excludes = normalize_excludes(rootpath, excludes)
|
||||||
modules = recurse_tree(rootpath, excludes, opts)
|
modules = recurse_tree(rootpath, excludes, opts)
|
||||||
|
@ -41,7 +41,7 @@ from sphinx.util import import_object
|
|||||||
from sphinx.util import logging
|
from sphinx.util import logging
|
||||||
from sphinx.util import status_iterator, old_status_iterator, display_chunk
|
from sphinx.util import status_iterator, old_status_iterator, display_chunk
|
||||||
from sphinx.util.tags import Tags
|
from sphinx.util.tags import Tags
|
||||||
from sphinx.util.osutil import ENOENT
|
from sphinx.util.osutil import ENOENT, ensuredir
|
||||||
from sphinx.util.console import bold, darkgreen # type: ignore
|
from sphinx.util.console import bold, darkgreen # type: ignore
|
||||||
from sphinx.util.docutils import is_html5_writer_available, directive_helper
|
from sphinx.util.docutils import is_html5_writer_available, directive_helper
|
||||||
from sphinx.util.i18n import find_catalog_source_files
|
from sphinx.util.i18n import find_catalog_source_files
|
||||||
@ -160,7 +160,7 @@ class Sphinx(object):
|
|||||||
|
|
||||||
if not path.isdir(outdir):
|
if not path.isdir(outdir):
|
||||||
logger.info('making output directory...')
|
logger.info('making output directory...')
|
||||||
os.makedirs(outdir)
|
ensuredir(outdir)
|
||||||
|
|
||||||
# read config
|
# read config
|
||||||
self.tags = Tags(tags)
|
self.tags = Tags(tags)
|
||||||
|
@ -9,7 +9,6 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
|
||||||
from os import path
|
from os import path
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
@ -24,7 +23,7 @@ from docutils import nodes
|
|||||||
from sphinx.deprecation import RemovedInSphinx20Warning
|
from sphinx.deprecation import RemovedInSphinx20Warning
|
||||||
from sphinx.environment.adapters.asset import ImageAdapter
|
from sphinx.environment.adapters.asset import ImageAdapter
|
||||||
from sphinx.util import i18n, path_stabilize, logging, status_iterator
|
from sphinx.util import i18n, path_stabilize, logging, status_iterator
|
||||||
from sphinx.util.osutil import SEP, relative_uri
|
from sphinx.util.osutil import SEP, ensuredir, relative_uri
|
||||||
from sphinx.util.i18n import find_catalog
|
from sphinx.util.i18n import find_catalog
|
||||||
from sphinx.util.console import bold # type: ignore
|
from sphinx.util.console import bold # type: ignore
|
||||||
from sphinx.util.parallel import ParallelTasks, SerialTasks, make_chunks, \
|
from sphinx.util.parallel import ParallelTasks, SerialTasks, make_chunks, \
|
||||||
@ -79,8 +78,7 @@ class Builder(object):
|
|||||||
self.confdir = app.confdir
|
self.confdir = app.confdir
|
||||||
self.outdir = app.outdir
|
self.outdir = app.outdir
|
||||||
self.doctreedir = app.doctreedir
|
self.doctreedir = app.doctreedir
|
||||||
if not path.isdir(self.doctreedir):
|
ensuredir(self.doctreedir)
|
||||||
os.makedirs(self.doctreedir)
|
|
||||||
|
|
||||||
self.app = app # type: Sphinx
|
self.app = app # type: Sphinx
|
||||||
self.env = None # type: BuildEnvironment
|
self.env = None # type: BuildEnvironment
|
||||||
|
@ -274,7 +274,7 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
# type: () -> Iterator[unicode]
|
# type: () -> Iterator[unicode]
|
||||||
cfgdict = dict((confval.name, confval.value) for confval in self.config.filter('html'))
|
cfgdict = dict((confval.name, confval.value) for confval in self.config.filter('html'))
|
||||||
self.config_hash = get_stable_hash(cfgdict)
|
self.config_hash = get_stable_hash(cfgdict)
|
||||||
self.tags_hash = get_stable_hash(sorted(self.tags)) # type: ignore
|
self.tags_hash = get_stable_hash(sorted(self.tags))
|
||||||
old_config_hash = old_tags_hash = ''
|
old_config_hash = old_tags_hash = ''
|
||||||
try:
|
try:
|
||||||
with open(path.join(self.outdir, '.buildinfo')) as fp:
|
with open(path.join(self.outdir, '.buildinfo')) as fp:
|
||||||
|
@ -246,7 +246,7 @@ class HTMLHelpBuilder(StandaloneHTMLBuilder):
|
|||||||
olen = len(outdir)
|
olen = len(outdir)
|
||||||
for root, dirs, files in os.walk(outdir):
|
for root, dirs, files in os.walk(outdir):
|
||||||
staticdir = root.startswith(path.join(outdir, '_static'))
|
staticdir = root.startswith(path.join(outdir, '_static'))
|
||||||
for fn in files:
|
for fn in sorted(files):
|
||||||
if (staticdir and not fn.endswith('.js')) or \
|
if (staticdir and not fn.endswith('.js')) or \
|
||||||
fn.endswith('.html'):
|
fn.endswith('.html'):
|
||||||
print(path.join(root, fn)[olen:].replace(os.sep, '\\'),
|
print(path.join(root, fn)[olen:].replace(os.sep, '\\'),
|
||||||
|
@ -188,7 +188,7 @@ class QtHelpBuilder(StandaloneHTMLBuilder):
|
|||||||
for root, dirs, files in os.walk(outdir):
|
for root, dirs, files in os.walk(outdir):
|
||||||
resourcedir = root.startswith(staticdir) or \
|
resourcedir = root.startswith(staticdir) or \
|
||||||
root.startswith(imagesdir)
|
root.startswith(imagesdir)
|
||||||
for fn in files:
|
for fn in sorted(files):
|
||||||
if (resourcedir and not fn.endswith('.js')) or \
|
if (resourcedir and not fn.endswith('.js')) or \
|
||||||
fn.endswith('.html'):
|
fn.endswith('.html'):
|
||||||
filename = path.join(root, fn)[olen:]
|
filename = path.join(root, fn)[olen:]
|
||||||
@ -264,7 +264,7 @@ class QtHelpBuilder(StandaloneHTMLBuilder):
|
|||||||
link = node['refuri']
|
link = node['refuri']
|
||||||
title = htmlescape(node.astext()).replace('"', '"')
|
title = htmlescape(node.astext()).replace('"', '"')
|
||||||
item = section_template % {'title': title, 'ref': link}
|
item = section_template % {'title': title, 'ref': link}
|
||||||
item = u' ' * 4 * indentlevel + item # type: ignore
|
item = u' ' * 4 * indentlevel + item
|
||||||
parts.append(item.encode('ascii', 'xmlcharrefreplace'))
|
parts.append(item.encode('ascii', 'xmlcharrefreplace'))
|
||||||
elif isinstance(node, nodes.bullet_list):
|
elif isinstance(node, nodes.bullet_list):
|
||||||
for subnode in node:
|
for subnode in node:
|
||||||
|
@ -288,7 +288,7 @@ class Config(object):
|
|||||||
logger.warning("%s", exc)
|
logger.warning("%s", exc)
|
||||||
for name in config:
|
for name in config:
|
||||||
if name in self.values:
|
if name in self.values:
|
||||||
self.__dict__[name] = config[name]
|
self.__dict__[name] = config[name] # type: ignore
|
||||||
if isinstance(self.source_suffix, string_types): # type: ignore
|
if isinstance(self.source_suffix, string_types): # type: ignore
|
||||||
self.source_suffix = [self.source_suffix] # type: ignore
|
self.source_suffix = [self.source_suffix] # type: ignore
|
||||||
|
|
||||||
|
@ -528,7 +528,7 @@ class ASTBase(UnicodeMixin):
|
|||||||
if type(self) is not type(other):
|
if type(self) is not type(other):
|
||||||
return False
|
return False
|
||||||
try:
|
try:
|
||||||
for key, value in iteritems(self.__dict__): # type: ignore
|
for key, value in iteritems(self.__dict__):
|
||||||
if value != getattr(other, key):
|
if value != getattr(other, key):
|
||||||
return False
|
return False
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
|
@ -262,7 +262,7 @@ class TocTreeCollector(EnvironmentCollector):
|
|||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
figtype = env.get_domain('std').get_figtype(subnode) # type: ignore
|
figtype = env.get_domain('std').get_figtype(subnode)
|
||||||
if figtype and subnode['ids']:
|
if figtype and subnode['ids']:
|
||||||
register_fignumber(docname, secnum, figtype, subnode)
|
register_fignumber(docname, secnum, figtype, subnode)
|
||||||
|
|
||||||
|
@ -194,7 +194,7 @@ class GoogleDocstring(UnicodeMixin):
|
|||||||
line = self._line_iter.peek()
|
line = self._line_iter.peek()
|
||||||
while(not self._is_section_break() and
|
while(not self._is_section_break() and
|
||||||
(not line or self._is_indented(line, indent))):
|
(not line or self._is_indented(line, indent))):
|
||||||
lines.append(next(self._line_iter)) # type: ignore
|
lines.append(next(self._line_iter))
|
||||||
line = self._line_iter.peek()
|
line = self._line_iter.peek()
|
||||||
return lines
|
return lines
|
||||||
|
|
||||||
@ -204,7 +204,7 @@ class GoogleDocstring(UnicodeMixin):
|
|||||||
while (self._line_iter.has_next() and
|
while (self._line_iter.has_next() and
|
||||||
self._line_iter.peek() and
|
self._line_iter.peek() and
|
||||||
not self._is_section_header()):
|
not self._is_section_header()):
|
||||||
lines.append(next(self._line_iter)) # type: ignore
|
lines.append(next(self._line_iter))
|
||||||
return lines
|
return lines
|
||||||
|
|
||||||
def _consume_empty(self):
|
def _consume_empty(self):
|
||||||
@ -212,13 +212,13 @@ class GoogleDocstring(UnicodeMixin):
|
|||||||
lines = []
|
lines = []
|
||||||
line = self._line_iter.peek()
|
line = self._line_iter.peek()
|
||||||
while self._line_iter.has_next() and not line:
|
while self._line_iter.has_next() and not line:
|
||||||
lines.append(next(self._line_iter)) # type: ignore
|
lines.append(next(self._line_iter))
|
||||||
line = self._line_iter.peek()
|
line = self._line_iter.peek()
|
||||||
return lines
|
return lines
|
||||||
|
|
||||||
def _consume_field(self, parse_type=True, prefer_type=False):
|
def _consume_field(self, parse_type=True, prefer_type=False):
|
||||||
# type: (bool, bool) -> Tuple[unicode, unicode, List[unicode]]
|
# type: (bool, bool) -> Tuple[unicode, unicode, List[unicode]]
|
||||||
line = next(self._line_iter) # type: ignore
|
line = next(self._line_iter)
|
||||||
|
|
||||||
before, colon, after = self._partition_field_on_colon(line)
|
before, colon, after = self._partition_field_on_colon(line)
|
||||||
_name, _type, _desc = before, '', after # type: unicode, unicode, unicode
|
_name, _type, _desc = before, '', after # type: unicode, unicode, unicode
|
||||||
@ -250,7 +250,7 @@ class GoogleDocstring(UnicodeMixin):
|
|||||||
|
|
||||||
def _consume_inline_attribute(self):
|
def _consume_inline_attribute(self):
|
||||||
# type: () -> Tuple[unicode, List[unicode]]
|
# type: () -> Tuple[unicode, List[unicode]]
|
||||||
line = next(self._line_iter) # type: ignore
|
line = next(self._line_iter)
|
||||||
_type, colon, _desc = self._partition_field_on_colon(line)
|
_type, colon, _desc = self._partition_field_on_colon(line)
|
||||||
if not colon:
|
if not colon:
|
||||||
_type, _desc = _desc, _type
|
_type, _desc = _desc, _type
|
||||||
@ -285,7 +285,7 @@ class GoogleDocstring(UnicodeMixin):
|
|||||||
|
|
||||||
def _consume_section_header(self):
|
def _consume_section_header(self):
|
||||||
# type: () -> unicode
|
# type: () -> unicode
|
||||||
section = next(self._line_iter) # type: ignore
|
section = next(self._line_iter)
|
||||||
stripped_section = section.strip(':')
|
stripped_section = section.strip(':')
|
||||||
if stripped_section.lower() in self._sections:
|
if stripped_section.lower() in self._sections:
|
||||||
section = stripped_section
|
section = stripped_section
|
||||||
@ -295,7 +295,7 @@ class GoogleDocstring(UnicodeMixin):
|
|||||||
# type: () -> List[unicode]
|
# type: () -> List[unicode]
|
||||||
lines = []
|
lines = []
|
||||||
while self._line_iter.has_next():
|
while self._line_iter.has_next():
|
||||||
lines.append(next(self._line_iter)) # type: ignore
|
lines.append(next(self._line_iter))
|
||||||
return lines
|
return lines
|
||||||
|
|
||||||
def _consume_to_next_section(self):
|
def _consume_to_next_section(self):
|
||||||
@ -303,7 +303,7 @@ class GoogleDocstring(UnicodeMixin):
|
|||||||
self._consume_empty()
|
self._consume_empty()
|
||||||
lines = []
|
lines = []
|
||||||
while not self._is_section_break():
|
while not self._is_section_break():
|
||||||
lines.append(next(self._line_iter)) # type: ignore
|
lines.append(next(self._line_iter))
|
||||||
return lines + self._consume_empty()
|
return lines + self._consume_empty()
|
||||||
|
|
||||||
def _dedent(self, lines, full=False):
|
def _dedent(self, lines, full=False):
|
||||||
@ -886,7 +886,7 @@ class NumpyDocstring(GoogleDocstring):
|
|||||||
|
|
||||||
def _consume_field(self, parse_type=True, prefer_type=False):
|
def _consume_field(self, parse_type=True, prefer_type=False):
|
||||||
# type: (bool, bool) -> Tuple[unicode, unicode, List[unicode]]
|
# type: (bool, bool) -> Tuple[unicode, unicode, List[unicode]]
|
||||||
line = next(self._line_iter) # type: ignore
|
line = next(self._line_iter)
|
||||||
if parse_type:
|
if parse_type:
|
||||||
_name, _, _type = self._partition_field_on_colon(line)
|
_name, _, _type = self._partition_field_on_colon(line)
|
||||||
else:
|
else:
|
||||||
@ -907,10 +907,10 @@ class NumpyDocstring(GoogleDocstring):
|
|||||||
|
|
||||||
def _consume_section_header(self):
|
def _consume_section_header(self):
|
||||||
# type: () -> unicode
|
# type: () -> unicode
|
||||||
section = next(self._line_iter) # type: ignore
|
section = next(self._line_iter)
|
||||||
if not _directive_regex.match(section):
|
if not _directive_regex.match(section):
|
||||||
# Consume the header underline
|
# Consume the header underline
|
||||||
next(self._line_iter) # type: ignore
|
next(self._line_iter)
|
||||||
return section
|
return section
|
||||||
|
|
||||||
def _is_section_break(self):
|
def _is_section_break(self):
|
||||||
|
@ -208,5 +208,5 @@ class NodeVisitor(object):
|
|||||||
def generic_visit(self, node):
|
def generic_visit(self, node):
|
||||||
"""Called if no explicit visitor function exists for a node."""
|
"""Called if no explicit visitor function exists for a node."""
|
||||||
if isinstance(node, Node):
|
if isinstance(node, Node):
|
||||||
for child in node: # type: ignore
|
for child in node:
|
||||||
self.visit(child)
|
self.visit(child)
|
||||||
|
@ -35,7 +35,7 @@ from six.moves.urllib.parse import quote as urlquote
|
|||||||
from docutils.utils import column_width
|
from docutils.utils import column_width
|
||||||
|
|
||||||
from sphinx import __display_version__, package_dir
|
from sphinx import __display_version__, package_dir
|
||||||
from sphinx.util.osutil import make_filename
|
from sphinx.util.osutil import ensuredir, make_filename
|
||||||
from sphinx.util.console import ( # type: ignore
|
from sphinx.util.console import ( # type: ignore
|
||||||
purple, bold, red, turquoise, nocolor, color_terminal
|
purple, bold, red, turquoise, nocolor, color_terminal
|
||||||
)
|
)
|
||||||
@ -69,13 +69,6 @@ EXTENSIONS = ('autodoc', 'doctest', 'intersphinx', 'todo', 'coverage',
|
|||||||
PROMPT_PREFIX = '> '
|
PROMPT_PREFIX = '> '
|
||||||
|
|
||||||
|
|
||||||
def mkdir_p(dir):
|
|
||||||
# type: (unicode) -> None
|
|
||||||
if path.isdir(dir):
|
|
||||||
return
|
|
||||||
os.makedirs(dir)
|
|
||||||
|
|
||||||
|
|
||||||
# function to get input from terminal -- overridden by the test suite
|
# function to get input from terminal -- overridden by the test suite
|
||||||
def term_input(prompt):
|
def term_input(prompt):
|
||||||
# type: (unicode) -> unicode
|
# type: (unicode) -> unicode
|
||||||
@ -433,11 +426,11 @@ def generate(d, overwrite=True, silent=False, templatedir=None):
|
|||||||
d[key + '_str'] = d[key].replace('\\', '\\\\').replace("'", "\\'")
|
d[key + '_str'] = d[key].replace('\\', '\\\\').replace("'", "\\'")
|
||||||
|
|
||||||
if not path.isdir(d['path']):
|
if not path.isdir(d['path']):
|
||||||
mkdir_p(d['path'])
|
ensuredir(d['path'])
|
||||||
|
|
||||||
srcdir = d['sep'] and path.join(d['path'], 'source') or d['path']
|
srcdir = d['sep'] and path.join(d['path'], 'source') or d['path']
|
||||||
|
|
||||||
mkdir_p(srcdir)
|
ensuredir(srcdir)
|
||||||
if d['sep']:
|
if d['sep']:
|
||||||
builddir = path.join(d['path'], 'build')
|
builddir = path.join(d['path'], 'build')
|
||||||
d['exclude_patterns'] = ''
|
d['exclude_patterns'] = ''
|
||||||
@ -448,17 +441,19 @@ def generate(d, overwrite=True, silent=False, templatedir=None):
|
|||||||
'Thumbs.db', '.DS_Store',
|
'Thumbs.db', '.DS_Store',
|
||||||
])
|
])
|
||||||
d['exclude_patterns'] = ', '.join(exclude_patterns)
|
d['exclude_patterns'] = ', '.join(exclude_patterns)
|
||||||
mkdir_p(builddir)
|
ensuredir(builddir)
|
||||||
mkdir_p(path.join(srcdir, d['dot'] + 'templates'))
|
ensuredir(path.join(srcdir, d['dot'] + 'templates'))
|
||||||
mkdir_p(path.join(srcdir, d['dot'] + 'static'))
|
ensuredir(path.join(srcdir, d['dot'] + 'static'))
|
||||||
|
|
||||||
def write_file(fpath, content, newline=None):
|
def write_file(fpath, content, newline=None):
|
||||||
# type: (unicode, unicode, unicode) -> None
|
# type: (unicode, unicode, unicode) -> None
|
||||||
if overwrite or not path.isfile(fpath):
|
if overwrite or not path.isfile(fpath):
|
||||||
|
if 'quiet' not in d:
|
||||||
print('Creating file %s.' % fpath)
|
print('Creating file %s.' % fpath)
|
||||||
with open(fpath, 'wt', encoding='utf-8', newline=newline) as f:
|
with open(fpath, 'wt', encoding='utf-8', newline=newline) as f:
|
||||||
f.write(content)
|
f.write(content)
|
||||||
else:
|
else:
|
||||||
|
if 'quiet' not in d:
|
||||||
print('File %s already exists, skipping.' % fpath)
|
print('File %s already exists, skipping.' % fpath)
|
||||||
|
|
||||||
conf_path = os.path.join(templatedir, 'conf.py_t') if templatedir else None
|
conf_path = os.path.join(templatedir, 'conf.py_t') if templatedir else None
|
||||||
|
@ -136,8 +136,8 @@ class BuildDoc(Command):
|
|||||||
# type: () -> None
|
# type: () -> None
|
||||||
if self.source_dir is None:
|
if self.source_dir is None:
|
||||||
self.source_dir = self._guess_source_dir()
|
self.source_dir = self._guess_source_dir()
|
||||||
self.announce('Using source directory %s' % self.source_dir) # type: ignore
|
self.announce('Using source directory %s' % self.source_dir)
|
||||||
self.ensure_dirname('source_dir') # type: ignore
|
self.ensure_dirname('source_dir')
|
||||||
if self.source_dir is None:
|
if self.source_dir is None:
|
||||||
self.source_dir = os.curdir
|
self.source_dir = os.curdir
|
||||||
self.source_dir = abspath(self.source_dir)
|
self.source_dir = abspath(self.source_dir)
|
||||||
@ -145,10 +145,10 @@ class BuildDoc(Command):
|
|||||||
self.config_dir = self.source_dir
|
self.config_dir = self.source_dir
|
||||||
self.config_dir = abspath(self.config_dir)
|
self.config_dir = abspath(self.config_dir)
|
||||||
|
|
||||||
self.ensure_string_list('builder') # type: ignore
|
self.ensure_string_list('builder')
|
||||||
if self.build_dir is None:
|
if self.build_dir is None:
|
||||||
build = self.get_finalized_command('build') # type: ignore
|
build = self.get_finalized_command('build')
|
||||||
self.build_dir = os.path.join(abspath(build.build_base), 'sphinx')
|
self.build_dir = os.path.join(abspath(build.build_base), 'sphinx') # type: ignore
|
||||||
self.mkpath(self.build_dir) # type: ignore
|
self.mkpath(self.build_dir) # type: ignore
|
||||||
self.build_dir = abspath(self.build_dir)
|
self.build_dir = abspath(self.build_dir)
|
||||||
self.doctree_dir = os.path.join(self.build_dir, 'doctrees')
|
self.doctree_dir = os.path.join(self.build_dir, 'doctrees')
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
%
|
%
|
||||||
|
|
||||||
\NeedsTeXFormat{LaTeX2e}[1995/12/01]
|
\NeedsTeXFormat{LaTeX2e}[1995/12/01]
|
||||||
\ProvidesPackage{sphinx}[2017/07/24 v1.6.4 LaTeX package (Sphinx markup)]
|
\ProvidesPackage{sphinx}[2017/12/12 v1.6.6 LaTeX package (Sphinx markup)]
|
||||||
|
|
||||||
% provides \ltx@ifundefined
|
% provides \ltx@ifundefined
|
||||||
% (many packages load ltxcmds: graphicx does for pdftex and lualatex but
|
% (many packages load ltxcmds: graphicx does for pdftex and lualatex but
|
||||||
@ -39,7 +39,7 @@
|
|||||||
\@ifclassloaded{memoir}{}{\RequirePackage{fancyhdr}}
|
\@ifclassloaded{memoir}{}{\RequirePackage{fancyhdr}}
|
||||||
% for \text macro and \iffirstchoice@ conditional even if amsmath not loaded
|
% for \text macro and \iffirstchoice@ conditional even if amsmath not loaded
|
||||||
\RequirePackage{amstext}
|
\RequirePackage{amstext}
|
||||||
\RequirePackage{textcomp}
|
\RequirePackage[warn]{textcomp}
|
||||||
\RequirePackage{titlesec}
|
\RequirePackage{titlesec}
|
||||||
\@ifpackagelater{titlesec}{2016/03/15}%
|
\@ifpackagelater{titlesec}{2016/03/15}%
|
||||||
{\@ifpackagelater{titlesec}{2016/03/21}%
|
{\@ifpackagelater{titlesec}{2016/03/21}%
|
||||||
@ -159,6 +159,7 @@
|
|||||||
% For highlighted code.
|
% For highlighted code.
|
||||||
\RequirePackage{fancyvrb}
|
\RequirePackage{fancyvrb}
|
||||||
\fvset{fontsize=\small}
|
\fvset{fontsize=\small}
|
||||||
|
\define@key{FV}{hllines}{\def\sphinx@verbatim@checkifhl##1{\in@{, ##1,}{#1}}}
|
||||||
% For hyperlinked footnotes in tables; also for gathering footnotes from
|
% For hyperlinked footnotes in tables; also for gathering footnotes from
|
||||||
% topic and warning blocks. Also to allow code-blocks in footnotes.
|
% topic and warning blocks. Also to allow code-blocks in footnotes.
|
||||||
\RequirePackage{footnotehyper-sphinx}
|
\RequirePackage{footnotehyper-sphinx}
|
||||||
@ -208,6 +209,17 @@
|
|||||||
|
|
||||||
% stylesheet for highlighting with pygments
|
% stylesheet for highlighting with pygments
|
||||||
\RequirePackage{sphinxhighlight}
|
\RequirePackage{sphinxhighlight}
|
||||||
|
% fix baseline increase from Pygments latex formatter in case of error tokens
|
||||||
|
% and keep \fboxsep's scope local via added braces
|
||||||
|
\def\PYG@tok@err{%
|
||||||
|
\def\PYG@bc##1{{\setlength{\fboxsep}{-\fboxrule}%
|
||||||
|
\fcolorbox[rgb]{1.00,0.00,0.00}{1,1,1}{\strut ##1}}}%
|
||||||
|
}
|
||||||
|
\def\PYG@tok@cs{%
|
||||||
|
\def\PYG@tc##1{\textcolor[rgb]{0.25,0.50,0.56}{##1}}%
|
||||||
|
\def\PYG@bc##1{{\setlength{\fboxsep}{0pt}%
|
||||||
|
\colorbox[rgb]{1.00,0.94,0.94}{\strut ##1}}}%
|
||||||
|
}%
|
||||||
|
|
||||||
|
|
||||||
%% OPTIONS
|
%% OPTIONS
|
||||||
@ -306,6 +318,8 @@
|
|||||||
% set the key handler. The "value" ##1 must be acceptable by \definecolor.
|
% set the key handler. The "value" ##1 must be acceptable by \definecolor.
|
||||||
\define@key{sphinx}{#1}{\definecolor{sphinx#1}##1}%
|
\define@key{sphinx}{#1}{\definecolor{sphinx#1}##1}%
|
||||||
}%
|
}%
|
||||||
|
% Default color chosen to be as in minted.sty LaTeX package!
|
||||||
|
\sphinxDeclareSphinxColorOption{VerbatimHighlightColor}{{rgb}{0.878,1,1}}
|
||||||
% admonition boxes, "light" style
|
% admonition boxes, "light" style
|
||||||
\sphinxDeclareSphinxColorOption{noteBorderColor}{{rgb}{0,0,0}}
|
\sphinxDeclareSphinxColorOption{noteBorderColor}{{rgb}{0,0,0}}
|
||||||
\sphinxDeclareSphinxColorOption{hintBorderColor}{{rgb}{0,0,0}}
|
\sphinxDeclareSphinxColorOption{hintBorderColor}{{rgb}{0,0,0}}
|
||||||
@ -837,6 +851,34 @@
|
|||||||
|
|
||||||
% needed to create wrapper environments of fancyvrb's Verbatim
|
% needed to create wrapper environments of fancyvrb's Verbatim
|
||||||
\newcommand*{\sphinxVerbatimEnvironment}{\gdef\FV@EnvironName{sphinxVerbatim}}
|
\newcommand*{\sphinxVerbatimEnvironment}{\gdef\FV@EnvironName{sphinxVerbatim}}
|
||||||
|
% serves to implement line highlighting and line wrapping
|
||||||
|
\newcommand\sphinxFancyVerbFormatLine[1]{%
|
||||||
|
\expandafter\sphinx@verbatim@checkifhl\expandafter{\the\FV@CodeLineNo}%
|
||||||
|
\ifin@
|
||||||
|
\sphinxVerbatimHighlightLine{#1}%
|
||||||
|
\else
|
||||||
|
\sphinxVerbatimFormatLine{#1}%
|
||||||
|
\fi
|
||||||
|
}%
|
||||||
|
\newcommand\sphinxVerbatimHighlightLine[1]{%
|
||||||
|
\edef\sphinxrestorefboxsep{\fboxsep\the\fboxsep\relax}%
|
||||||
|
\fboxsep0pt\relax % cf LaTeX bug graphics/4524
|
||||||
|
\colorbox{sphinxVerbatimHighlightColor}%
|
||||||
|
{\sphinxrestorefboxsep\sphinxVerbatimFormatLine{#1}}%
|
||||||
|
% no need to restore \fboxsep here, as this ends up in a \hbox from fancyvrb
|
||||||
|
}%
|
||||||
|
% \sphinxVerbatimFormatLine will be set locally to one of those two:
|
||||||
|
\newcommand\sphinxVerbatimFormatLineWrap[1]{%
|
||||||
|
\hsize\linewidth
|
||||||
|
\vtop{\raggedright\hyphenpenalty\z@\exhyphenpenalty\z@
|
||||||
|
\doublehyphendemerits\z@\finalhyphendemerits\z@
|
||||||
|
\strut #1\strut}%
|
||||||
|
}%
|
||||||
|
\newcommand\sphinxVerbatimFormatLineNoWrap[1]{\hb@xt@\linewidth{\strut #1\hss}}%
|
||||||
|
\g@addto@macro\FV@SetupFont{%
|
||||||
|
\sbox\sphinxcontinuationbox {\spx@opt@verbatimcontinued}%
|
||||||
|
\sbox\sphinxvisiblespacebox {\spx@opt@verbatimvisiblespace}%
|
||||||
|
}%
|
||||||
% Sphinx <1.5 optional argument was in fact mandatory. It is now really
|
% Sphinx <1.5 optional argument was in fact mandatory. It is now really
|
||||||
% optional and handled by original Verbatim.
|
% optional and handled by original Verbatim.
|
||||||
\newenvironment{sphinxVerbatim}{%
|
\newenvironment{sphinxVerbatim}{%
|
||||||
@ -883,23 +925,19 @@
|
|||||||
% to achieve this without extensive rewrite of fancyvrb.
|
% to achieve this without extensive rewrite of fancyvrb.
|
||||||
% - The (not used in sphinx) obeytabs option to Verbatim is
|
% - The (not used in sphinx) obeytabs option to Verbatim is
|
||||||
% broken by this change (showtabs and tabspace work).
|
% broken by this change (showtabs and tabspace work).
|
||||||
\expandafter\def\expandafter\FV@SetupFont\expandafter
|
\let\sphinxVerbatimFormatLine\sphinxVerbatimFormatLineWrap
|
||||||
{\FV@SetupFont\sbox\sphinxcontinuationbox {\spx@opt@verbatimcontinued}%
|
|
||||||
\sbox\sphinxvisiblespacebox {\spx@opt@verbatimvisiblespace}}%
|
|
||||||
\def\FancyVerbFormatLine ##1{\hsize\linewidth
|
|
||||||
\vtop{\raggedright\hyphenpenalty\z@\exhyphenpenalty\z@
|
|
||||||
\doublehyphendemerits\z@\finalhyphendemerits\z@
|
|
||||||
\strut ##1\strut}%
|
|
||||||
}%
|
|
||||||
\let\FV@Space\spx@verbatim@space
|
\let\FV@Space\spx@verbatim@space
|
||||||
% Allow breaks at special characters using \PYG... macros.
|
% Allow breaks at special characters using \PYG... macros.
|
||||||
\sphinxbreaksatspecials
|
\sphinxbreaksatspecials
|
||||||
% Breaks at punctuation characters . , ; ? ! and / (needs catcode activation)
|
% Breaks at punctuation characters . , ; ? ! and / (needs catcode activation)
|
||||||
\def\FancyVerbCodes{\sphinxbreaksviaactive}%
|
\fvset{codes*=\sphinxbreaksviaactive}%
|
||||||
\fi % end of conditional code for wrapping long code lines
|
\else % end of conditional code for wrapping long code lines
|
||||||
% go around fancyvrb's check of \@currenvir
|
\let\sphinxVerbatimFormatLine\sphinxVerbatimFormatLineNoWrap
|
||||||
|
\fi
|
||||||
|
\let\FancyVerbFormatLine\sphinxFancyVerbFormatLine
|
||||||
|
% workaround to fancyvrb's check of \@currenvir
|
||||||
\let\VerbatimEnvironment\sphinxVerbatimEnvironment
|
\let\VerbatimEnvironment\sphinxVerbatimEnvironment
|
||||||
% go around fancyvrb's check of current list depth
|
% workaround to fancyvrb's check of current list depth
|
||||||
\def\@toodeep {\advance\@listdepth\@ne}%
|
\def\@toodeep {\advance\@listdepth\@ne}%
|
||||||
% The list environment is needed to control perfectly the vertical space.
|
% The list environment is needed to control perfectly the vertical space.
|
||||||
% Note: \OuterFrameSep used by framed.sty is later set to \topsep hence 0pt.
|
% Note: \OuterFrameSep used by framed.sty is later set to \topsep hence 0pt.
|
||||||
@ -1212,7 +1250,7 @@
|
|||||||
\spx@notice@border \dimexpr\csname spx@opt@#1border\endcsname\relax
|
\spx@notice@border \dimexpr\csname spx@opt@#1border\endcsname\relax
|
||||||
% start specific environment, passing the heading as argument
|
% start specific environment, passing the heading as argument
|
||||||
\begin{sphinx#1}{#2}}
|
\begin{sphinx#1}{#2}}
|
||||||
% in end part, need to go around a LaTeX's "feature"
|
% workaround some LaTeX "feature" of \end command
|
||||||
{\edef\spx@temp{\noexpand\end{sphinx\spx@noticetype}}\spx@temp}
|
{\edef\spx@temp{\noexpand\end{sphinx\spx@noticetype}}\spx@temp}
|
||||||
% use of ``notice'' is for backwards compatibility and will be removed in
|
% use of ``notice'' is for backwards compatibility and will be removed in
|
||||||
% Sphinx 1.7.
|
% Sphinx 1.7.
|
||||||
|
@ -346,5 +346,5 @@ class SphinxSmartQuotes(SmartQuotes):
|
|||||||
texttype = {True: 'literal', # "literal" text is not changed:
|
texttype = {True: 'literal', # "literal" text is not changed:
|
||||||
False: 'plain'}
|
False: 'plain'}
|
||||||
for txtnode in txtnodes:
|
for txtnode in txtnodes:
|
||||||
smartquotable = not is_smartquotable(txtnode)
|
notsmartquotable = not is_smartquotable(txtnode)
|
||||||
yield (texttype[smartquotable], txtnode.astext())
|
yield (texttype[notsmartquotable], txtnode.astext())
|
||||||
|
@ -14,6 +14,10 @@ from docutils import nodes
|
|||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.transforms import SphinxTransform
|
from sphinx.transforms import SphinxTransform
|
||||||
|
|
||||||
|
if False:
|
||||||
|
# For type annotation
|
||||||
|
from typing import List # NOQA
|
||||||
|
|
||||||
|
|
||||||
class RefOnlyListChecker(nodes.GenericNodeVisitor):
|
class RefOnlyListChecker(nodes.GenericNodeVisitor):
|
||||||
"""Raise `nodes.NodeFound` if non-simple list item is encountered.
|
"""Raise `nodes.NodeFound` if non-simple list item is encountered.
|
||||||
@ -32,7 +36,7 @@ class RefOnlyListChecker(nodes.GenericNodeVisitor):
|
|||||||
|
|
||||||
def visit_list_item(self, node):
|
def visit_list_item(self, node):
|
||||||
# type: (nodes.Node) -> None
|
# type: (nodes.Node) -> None
|
||||||
children = []
|
children = [] # type: List[nodes.Node]
|
||||||
for child in node.children:
|
for child in node.children:
|
||||||
if not isinstance(child, nodes.Invisible):
|
if not isinstance(child, nodes.Invisible):
|
||||||
children.append(child)
|
children.append(child)
|
||||||
|
@ -398,10 +398,8 @@ def parselinenos(spec, total):
|
|||||||
elif len(begend) == 1:
|
elif len(begend) == 1:
|
||||||
items.append(int(begend[0]) - 1)
|
items.append(int(begend[0]) - 1)
|
||||||
elif len(begend) == 2:
|
elif len(begend) == 2:
|
||||||
start = int(begend[0] or 1) # type: ignore
|
start = int(begend[0] or 1) # left half open (cf. -10)
|
||||||
# left half open (cf. -10)
|
end = int(begend[1] or max(start, total)) # right half open (cf. 10-)
|
||||||
end = int(begend[1] or max(start, total)) # type: ignore
|
|
||||||
# right half open (cf. 10-)
|
|
||||||
if start > end: # invalid range (cf. 10-1)
|
if start > end: # invalid range (cf. 10-1)
|
||||||
raise ValueError
|
raise ValueError
|
||||||
items.extend(range(start - 1, end))
|
items.extend(range(start - 1, end))
|
||||||
@ -528,7 +526,7 @@ class PeekableIterator(object):
|
|||||||
def peek(self):
|
def peek(self):
|
||||||
# type: () -> Any
|
# type: () -> Any
|
||||||
"""Return the next item without changing the state of the iterator."""
|
"""Return the next item without changing the state of the iterator."""
|
||||||
item = next(self) # type: ignore
|
item = next(self)
|
||||||
self.push(item)
|
self.push(item)
|
||||||
return item
|
return item
|
||||||
|
|
||||||
|
@ -82,6 +82,10 @@ def convert_serializable(records):
|
|||||||
r.msg = r.getMessage()
|
r.msg = r.getMessage()
|
||||||
r.args = ()
|
r.args = ()
|
||||||
|
|
||||||
|
location = getattr(r, 'location', None)
|
||||||
|
if isinstance(location, nodes.Node):
|
||||||
|
r.location = get_node_location(location) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class SphinxWarningLogRecord(logging.LogRecord):
|
class SphinxWarningLogRecord(logging.LogRecord):
|
||||||
"""Log record class supporting location"""
|
"""Log record class supporting location"""
|
||||||
@ -152,8 +156,8 @@ class NewLineStreamHandlerPY2(logging.StreamHandler):
|
|||||||
# remove return code forcely when nonl=True
|
# remove return code forcely when nonl=True
|
||||||
self.stream = StringIO()
|
self.stream = StringIO()
|
||||||
super(NewLineStreamHandlerPY2, self).emit(record)
|
super(NewLineStreamHandlerPY2, self).emit(record)
|
||||||
stream.write(self.stream.getvalue()[:-1]) # type: ignore
|
stream.write(self.stream.getvalue()[:-1])
|
||||||
stream.flush() # type: ignore
|
stream.flush()
|
||||||
else:
|
else:
|
||||||
super(NewLineStreamHandlerPY2, self).emit(record)
|
super(NewLineStreamHandlerPY2, self).emit(record)
|
||||||
finally:
|
finally:
|
||||||
@ -415,21 +419,26 @@ class WarningLogRecordTranslator(logging.Filter):
|
|||||||
else:
|
else:
|
||||||
record.location = None
|
record.location = None
|
||||||
elif isinstance(location, nodes.Node):
|
elif isinstance(location, nodes.Node):
|
||||||
(source, line) = get_source_line(location)
|
record.location = get_node_location(location)
|
||||||
if source and line:
|
|
||||||
record.location = "%s:%s" % (source, line)
|
|
||||||
elif source:
|
|
||||||
record.location = "%s:" % source
|
|
||||||
elif line:
|
|
||||||
record.location = "<unknown>:%s" % line
|
|
||||||
else:
|
|
||||||
record.location = None
|
|
||||||
elif location and ':' not in location:
|
elif location and ':' not in location:
|
||||||
record.location = '%s' % self.app.env.doc2path(location)
|
record.location = '%s' % self.app.env.doc2path(location)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def get_node_location(node):
|
||||||
|
# type: (nodes.Node) -> str
|
||||||
|
(source, line) = get_source_line(node)
|
||||||
|
if source and line:
|
||||||
|
return "%s:%s" % (source, line)
|
||||||
|
elif source:
|
||||||
|
return "%s:" % source
|
||||||
|
elif line:
|
||||||
|
return "<unknown>:%s" % line
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
class ColorizeFormatter(logging.Formatter):
|
class ColorizeFormatter(logging.Formatter):
|
||||||
def format(self, record):
|
def format(self, record):
|
||||||
# type: (logging.LogRecord) -> str
|
# type: (logging.LogRecord) -> str
|
||||||
|
@ -48,6 +48,8 @@ BEGIN_DOC = r'''
|
|||||||
|
|
||||||
URI_SCHEMES = ('mailto:', 'http:', 'https:', 'ftp:')
|
URI_SCHEMES = ('mailto:', 'http:', 'https:', 'ftp:')
|
||||||
SECNUMDEPTH = 3
|
SECNUMDEPTH = 3
|
||||||
|
LATEXSECTIONNAMES = ["part", "chapter", "section", "subsection",
|
||||||
|
"subsubsection", "paragraph", "subparagraph"]
|
||||||
|
|
||||||
DEFAULT_SETTINGS = {
|
DEFAULT_SETTINGS = {
|
||||||
'latex_engine': 'pdflatex',
|
'latex_engine': 'pdflatex',
|
||||||
@ -501,8 +503,6 @@ def rstdim_to_latexdim(width_str):
|
|||||||
|
|
||||||
|
|
||||||
class LaTeXTranslator(nodes.NodeVisitor):
|
class LaTeXTranslator(nodes.NodeVisitor):
|
||||||
sectionnames = ["part", "chapter", "section", "subsection",
|
|
||||||
"subsubsection", "paragraph", "subparagraph"]
|
|
||||||
|
|
||||||
ignore_missing_images = False
|
ignore_missing_images = False
|
||||||
|
|
||||||
@ -532,16 +532,6 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
|||||||
self.compact_list = 0
|
self.compact_list = 0
|
||||||
self.first_param = 0
|
self.first_param = 0
|
||||||
|
|
||||||
# determine top section level
|
|
||||||
if builder.config.latex_toplevel_sectioning:
|
|
||||||
self.top_sectionlevel = \
|
|
||||||
self.sectionnames.index(builder.config.latex_toplevel_sectioning)
|
|
||||||
else:
|
|
||||||
if document.settings.docclass == 'howto':
|
|
||||||
self.top_sectionlevel = 2
|
|
||||||
else:
|
|
||||||
self.top_sectionlevel = 1
|
|
||||||
|
|
||||||
# sort out some elements
|
# sort out some elements
|
||||||
self.elements = DEFAULT_SETTINGS.copy()
|
self.elements = DEFAULT_SETTINGS.copy()
|
||||||
self.elements.update(ADDITIONAL_SETTINGS.get(builder.config.latex_engine, {}))
|
self.elements.update(ADDITIONAL_SETTINGS.get(builder.config.latex_engine, {}))
|
||||||
@ -564,11 +554,30 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
|||||||
})
|
})
|
||||||
if builder.config.latex_keep_old_macro_names:
|
if builder.config.latex_keep_old_macro_names:
|
||||||
self.elements['sphinxpkgoptions'] = ''
|
self.elements['sphinxpkgoptions'] = ''
|
||||||
|
|
||||||
|
# we assume LaTeX class provides \chapter command except in case
|
||||||
|
# of non-Japanese 'howto' case
|
||||||
|
self.sectionnames = LATEXSECTIONNAMES[:]
|
||||||
if document.settings.docclass == 'howto':
|
if document.settings.docclass == 'howto':
|
||||||
docclass = builder.config.latex_docclass.get('howto', 'article')
|
docclass = builder.config.latex_docclass.get('howto', 'article')
|
||||||
|
if docclass[0] == 'j': # Japanese class...
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
self.sectionnames.remove('chapter')
|
||||||
else:
|
else:
|
||||||
docclass = builder.config.latex_docclass.get('manual', 'report')
|
docclass = builder.config.latex_docclass.get('manual', 'report')
|
||||||
self.elements['docclass'] = docclass
|
self.elements['docclass'] = docclass
|
||||||
|
|
||||||
|
# determine top section level
|
||||||
|
self.top_sectionlevel = 1
|
||||||
|
if builder.config.latex_toplevel_sectioning:
|
||||||
|
try:
|
||||||
|
self.top_sectionlevel = \
|
||||||
|
self.sectionnames.index(builder.config.latex_toplevel_sectioning)
|
||||||
|
except ValueError:
|
||||||
|
logger.warning('unknown %r toplevel_sectioning for class %r' %
|
||||||
|
(builder.config.latex_toplevel_sectioning, docclass))
|
||||||
|
|
||||||
if builder.config.today:
|
if builder.config.today:
|
||||||
self.elements['date'] = builder.config.today
|
self.elements['date'] = builder.config.today
|
||||||
else:
|
else:
|
||||||
@ -631,21 +640,23 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
|||||||
usepackages = (declare_package(*p) for p in builder.usepackages)
|
usepackages = (declare_package(*p) for p in builder.usepackages)
|
||||||
self.elements['usepackages'] += "\n".join(usepackages)
|
self.elements['usepackages'] += "\n".join(usepackages)
|
||||||
if document.get('tocdepth'):
|
if document.get('tocdepth'):
|
||||||
# redece tocdepth if `part` or `chapter` is used for top_sectionlevel
|
# reduce tocdepth if `part` or `chapter` is used for top_sectionlevel
|
||||||
# tocdepth = -1: show only parts
|
# tocdepth = -1: show only parts
|
||||||
# tocdepth = 0: show parts and chapters
|
# tocdepth = 0: show parts and chapters
|
||||||
# tocdepth = 1: show parts, chapters and sections
|
# tocdepth = 1: show parts, chapters and sections
|
||||||
# tocdepth = 2: show parts, chapters, sections and subsections
|
# tocdepth = 2: show parts, chapters, sections and subsections
|
||||||
# ...
|
# ...
|
||||||
|
|
||||||
tocdepth = document['tocdepth'] + self.top_sectionlevel - 2
|
tocdepth = document['tocdepth'] + self.top_sectionlevel - 2
|
||||||
maxdepth = len(self.sectionnames) - self.top_sectionlevel
|
if len(self.sectionnames) < 7 and self.top_sectionlevel > 0:
|
||||||
if tocdepth > maxdepth:
|
tocdepth += 1 # because top_sectionlevel is shifted by -1
|
||||||
|
if tocdepth > 5: # 5 corresponds to subparagraph
|
||||||
logger.warning('too large :maxdepth:, ignored.')
|
logger.warning('too large :maxdepth:, ignored.')
|
||||||
tocdepth = maxdepth
|
tocdepth = 5
|
||||||
|
|
||||||
self.elements['tocdepth'] = '\\setcounter{tocdepth}{%d}' % tocdepth
|
self.elements['tocdepth'] = '\\setcounter{tocdepth}{%d}' % tocdepth
|
||||||
if tocdepth >= SECNUMDEPTH:
|
if tocdepth >= SECNUMDEPTH:
|
||||||
# Increase secnumdepth if tocdepth is depther than default SECNUMDEPTH
|
# Increase secnumdepth if tocdepth is deeper than default SECNUMDEPTH
|
||||||
self.elements['secnumdepth'] = '\\setcounter{secnumdepth}{%d}' % tocdepth
|
self.elements['secnumdepth'] = '\\setcounter{secnumdepth}{%d}' % tocdepth
|
||||||
|
|
||||||
if getattr(document.settings, 'contentsname', None):
|
if getattr(document.settings, 'contentsname', None):
|
||||||
@ -2270,6 +2281,8 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
|||||||
lang = self.hlsettingstack[-1][0]
|
lang = self.hlsettingstack[-1][0]
|
||||||
linenos = code.count('\n') >= self.hlsettingstack[-1][1] - 1
|
linenos = code.count('\n') >= self.hlsettingstack[-1][1] - 1
|
||||||
highlight_args = node.get('highlight_args', {})
|
highlight_args = node.get('highlight_args', {})
|
||||||
|
hllines = '\\fvset{hllines={, %s,}}%%' %\
|
||||||
|
str(highlight_args.get('hl_lines', []))[1:-1]
|
||||||
if 'language' in node:
|
if 'language' in node:
|
||||||
# code-block directives
|
# code-block directives
|
||||||
lang = node['language']
|
lang = node['language']
|
||||||
@ -2308,7 +2321,7 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
|||||||
hlcode += '\\end{sphinxVerbatimintable}'
|
hlcode += '\\end{sphinxVerbatimintable}'
|
||||||
else:
|
else:
|
||||||
hlcode += '\\end{sphinxVerbatim}'
|
hlcode += '\\end{sphinxVerbatim}'
|
||||||
self.body.append('\n' + hlcode + '\n')
|
self.body.append('\n' + hllines + '\n' + hlcode + '\n')
|
||||||
if ids:
|
if ids:
|
||||||
self.body.append('\\let\\sphinxLiteralBlockLabel\\empty\n')
|
self.body.append('\\let\\sphinxLiteralBlockLabel\\empty\n')
|
||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
|
5
tests/roots/test-builder-gettext-dont-rebuild-mo/bom.rst
Normal file
5
tests/roots/test-builder-gettext-dont-rebuild-mo/bom.rst
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
File with UTF-8 BOM
|
||||||
|
===================
|
||||||
|
|
||||||
|
This file has a UTF-8 "BOM".
|
||||||
|
|
7
tests/roots/test-builder-gettext-dont-rebuild-mo/conf.py
Normal file
7
tests/roots/test-builder-gettext-dont-rebuild-mo/conf.py
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
master_doc = 'index'
|
||||||
|
|
||||||
|
latex_documents = [
|
||||||
|
(master_doc, 'test.tex', 'The basic Sphinx documentation for testing', 'Sphinx', 'report')
|
||||||
|
]
|
@ -0,0 +1,6 @@
|
|||||||
|
The basic Sphinx documentation for testing
|
||||||
|
==========================================
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
|
||||||
|
bom
|
7
tests/roots/test-directive-code/emphasize.rst
Normal file
7
tests/roots/test-directive-code/emphasize.rst
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
Literal Includes with Highlighted Lines
|
||||||
|
=======================================
|
||||||
|
|
||||||
|
.. literalinclude:: target.py
|
||||||
|
:language: python
|
||||||
|
:emphasize-lines: 5-6, 13-15, 24-
|
||||||
|
|
@ -27,6 +27,7 @@ header2
|
|||||||
|
|
||||||
\endlastfoot
|
\endlastfoot
|
||||||
|
|
||||||
|
\fvset{hllines={, ,}}%
|
||||||
\begin{sphinxVerbatimintable}[commandchars=\\\{\}]
|
\begin{sphinxVerbatimintable}[commandchars=\\\{\}]
|
||||||
\PYG{n}{hello} \PYG{n}{world}
|
\PYG{n}{hello} \PYG{n}{world}
|
||||||
\end{sphinxVerbatimintable}
|
\end{sphinxVerbatimintable}
|
||||||
|
@ -10,6 +10,7 @@ header1
|
|||||||
header2
|
header2
|
||||||
\unskip}\relax \\
|
\unskip}\relax \\
|
||||||
\hline
|
\hline
|
||||||
|
\fvset{hllines={, ,}}%
|
||||||
\begin{sphinxVerbatimintable}[commandchars=\\\{\}]
|
\begin{sphinxVerbatimintable}[commandchars=\\\{\}]
|
||||||
\PYG{n}{hello} \PYG{n}{world}
|
\PYG{n}{hello} \PYG{n}{world}
|
||||||
\end{sphinxVerbatimintable}
|
\end{sphinxVerbatimintable}
|
||||||
|
@ -1 +0,0 @@
|
|||||||
This whole directory is there to test html_static_path.
|
|
@ -1 +0,0 @@
|
|||||||
/* This file should be excluded from being copied over */
|
|
@ -1 +0,0 @@
|
|||||||
/* Stub file */
|
|
@ -29,15 +29,10 @@ numfig = True
|
|||||||
|
|
||||||
rst_epilog = '.. |subst| replace:: global substitution'
|
rst_epilog = '.. |subst| replace:: global substitution'
|
||||||
|
|
||||||
html_theme = 'testtheme'
|
|
||||||
html_theme_path = ['.']
|
|
||||||
html_theme_options = {'testopt': 'testoverride'}
|
|
||||||
html_sidebars = {'**': 'customsb.html',
|
html_sidebars = {'**': 'customsb.html',
|
||||||
'contents': ['contentssb.html', 'localtoc.html',
|
'contents': ['contentssb.html', 'localtoc.html',
|
||||||
'globaltoc.html']}
|
'globaltoc.html']}
|
||||||
html_style = 'default.css'
|
html_style = 'default.css'
|
||||||
html_static_path = ['_static', 'templated.css_t']
|
|
||||||
html_extra_path = ['robots.txt']
|
|
||||||
html_last_updated_fmt = '%b %d, %Y'
|
html_last_updated_fmt = '%b %d, %Y'
|
||||||
html_context = {'hckey': 'hcval', 'hckey_co': 'wrong_hcval_co'}
|
html_context = {'hckey': 'hcval', 'hckey_co': 'wrong_hcval_co'}
|
||||||
|
|
||||||
|
@ -1,2 +0,0 @@
|
|||||||
User-agent: *
|
|
||||||
Disallow: /cgi-bin/
|
|
@ -1,9 +0,0 @@
|
|||||||
#, fuzzy
|
|
||||||
msgid ""
|
|
||||||
msgstr ""
|
|
||||||
"MIME-Version: 1.0\n"
|
|
||||||
"Content-Type: text/plain; charset=UTF-8\n"
|
|
||||||
"Content-Transfer-Encoding: 8bit\n"
|
|
||||||
|
|
||||||
msgid "Including in subdir"
|
|
||||||
msgstr "translation"
|
|
@ -1,2 +0,0 @@
|
|||||||
/* Stub file, templated */
|
|
||||||
{{ sphinx_version }}
|
|
Before Width: | Height: | Size: 120 B After Width: | Height: | Size: 120 B |
@ -59,13 +59,13 @@ def nonascii_srcdir(request, rootdir, sphinx_test_tempdir):
|
|||||||
return srcdir
|
return srcdir
|
||||||
|
|
||||||
|
|
||||||
|
# note: this test skips building docs for some builders because they have independent testcase.
|
||||||
|
# (html, latex, texinfo and manpage)
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"buildername",
|
"buildername",
|
||||||
[
|
[
|
||||||
# note: no 'html' - if it's ok with dirhtml it's ok with html
|
'dirhtml', 'singlehtml', 'pickle', 'json', 'text', 'htmlhelp', 'qthelp',
|
||||||
'dirhtml', 'singlehtml', 'latex', 'texinfo', 'pickle', 'json', 'text',
|
'epub2', 'epub', 'applehelp', 'changes', 'xml', 'pseudoxml', 'linkcheck',
|
||||||
'htmlhelp', 'qthelp', 'epub2', 'epub', 'applehelp', 'changes', 'xml',
|
|
||||||
'pseudoxml', 'man', 'linkcheck',
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@mock.patch('sphinx.builders.linkcheck.requests.head',
|
@mock.patch('sphinx.builders.linkcheck.requests.head',
|
||||||
|
@ -126,24 +126,6 @@ def check_xpath(etree, fname, path, check, be_found=True):
|
|||||||
[node.text for node in nodes]))
|
[node.text for node in nodes]))
|
||||||
|
|
||||||
|
|
||||||
def check_static_entries(outdir):
|
|
||||||
staticdir = outdir / '_static'
|
|
||||||
assert staticdir.isdir()
|
|
||||||
# a file from a directory entry in html_static_path
|
|
||||||
assert (staticdir / 'README').isfile()
|
|
||||||
# a directory from a directory entry in html_static_path
|
|
||||||
assert (staticdir / 'subdir' / 'foo.css').isfile()
|
|
||||||
# a file from a file entry in html_static_path
|
|
||||||
assert (staticdir / 'templated.css').isfile()
|
|
||||||
assert (staticdir / 'templated.css').text().splitlines()[1] == __display_version__
|
|
||||||
# a file from _static, but matches exclude_patterns
|
|
||||||
assert not (staticdir / 'excluded.css').exists()
|
|
||||||
|
|
||||||
|
|
||||||
def check_extra_entries(outdir):
|
|
||||||
assert (outdir / 'robots.txt').isfile()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx('html', testroot='warnings')
|
@pytest.mark.sphinx('html', testroot='warnings')
|
||||||
def test_html_warnings(app, warning):
|
def test_html_warnings(app, warning):
|
||||||
app.build()
|
app.build()
|
||||||
@ -156,15 +138,6 @@ def test_html_warnings(app, warning):
|
|||||||
'--- Got:\n' + html_warnings
|
'--- Got:\n' + html_warnings
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx('html', tags=['testtag'], confoverrides={
|
|
||||||
'html_context.hckey_co': 'hcval_co'})
|
|
||||||
@pytest.mark.test_params(shared_result='test_build_html_output')
|
|
||||||
def test_static_output(app):
|
|
||||||
app.build()
|
|
||||||
check_static_entries(app.builder.outdir)
|
|
||||||
check_extra_entries(app.builder.outdir)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("fname,expect", flat_dict({
|
@pytest.mark.parametrize("fname,expect", flat_dict({
|
||||||
'images.html': [
|
'images.html': [
|
||||||
(".//img[@src='_images/img.png']", ''),
|
(".//img[@src='_images/img.png']", ''),
|
||||||
@ -377,7 +350,6 @@ def test_static_output(app):
|
|||||||
'contents.html': [
|
'contents.html': [
|
||||||
(".//meta[@name='hc'][@content='hcval']", ''),
|
(".//meta[@name='hc'][@content='hcval']", ''),
|
||||||
(".//meta[@name='hc_co'][@content='hcval_co']", ''),
|
(".//meta[@name='hc_co'][@content='hcval_co']", ''),
|
||||||
(".//meta[@name='testopt'][@content='testoverride']", ''),
|
|
||||||
(".//td[@class='label']", r'\[Ref1\]'),
|
(".//td[@class='label']", r'\[Ref1\]'),
|
||||||
(".//td[@class='label']", ''),
|
(".//td[@class='label']", ''),
|
||||||
(".//li[@class='toctree-l1']/a", 'Testing various markup'),
|
(".//li[@class='toctree-l1']/a", 'Testing various markup'),
|
||||||
@ -410,9 +382,6 @@ def test_static_output(app):
|
|||||||
(".//a[@href='http://bugs.python.org/issue1000']", "issue 1000"),
|
(".//a[@href='http://bugs.python.org/issue1000']", "issue 1000"),
|
||||||
(".//a[@href='http://bugs.python.org/issue1042']", "explicit caption"),
|
(".//a[@href='http://bugs.python.org/issue1042']", "explicit caption"),
|
||||||
],
|
],
|
||||||
'_static/statictmpl.html': [
|
|
||||||
(".//project", 'Sphinx <Tests>'),
|
|
||||||
],
|
|
||||||
'genindex.html': [
|
'genindex.html': [
|
||||||
# index entries
|
# index entries
|
||||||
(".//a/strong", "Main"),
|
(".//a/strong", "Main"),
|
||||||
@ -1145,16 +1114,28 @@ def test_html_assets(app):
|
|||||||
assert not (app.outdir / 'subdir' / '.htpasswd').exists()
|
assert not (app.outdir / 'subdir' / '.htpasswd').exists()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx('html', confoverrides={'html_sourcelink_suffix': ''})
|
@pytest.mark.sphinx('html', testroot='basic', confoverrides={'html_copy_source': False})
|
||||||
|
def test_html_copy_source(app):
|
||||||
|
app.builder.build_all()
|
||||||
|
assert not (app.outdir / '_sources' / 'index.rst.txt').exists()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.sphinx('html', testroot='basic', confoverrides={'html_sourcelink_suffix': '.txt'})
|
||||||
def test_html_sourcelink_suffix(app):
|
def test_html_sourcelink_suffix(app):
|
||||||
app.builder.build_all()
|
app.builder.build_all()
|
||||||
content_otherext = (app.outdir / 'otherext.html').text()
|
assert (app.outdir / '_sources' / 'index.rst.txt').exists()
|
||||||
content_images = (app.outdir / 'images.html').text()
|
|
||||||
|
|
||||||
assert '<a href="_sources/otherext.foo"' in content_otherext
|
|
||||||
assert '<a href="_sources/images.txt"' in content_images
|
@pytest.mark.sphinx('html', testroot='basic', confoverrides={'html_sourcelink_suffix': '.rst'})
|
||||||
assert (app.outdir / '_sources' / 'otherext.foo').exists()
|
def test_html_sourcelink_suffix_same(app):
|
||||||
assert (app.outdir / '_sources' / 'images.txt').exists()
|
app.builder.build_all()
|
||||||
|
assert (app.outdir / '_sources' / 'index.rst').exists()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.sphinx('html', testroot='basic', confoverrides={'html_sourcelink_suffix': ''})
|
||||||
|
def test_html_sourcelink_suffix_empty(app):
|
||||||
|
app.builder.build_all()
|
||||||
|
assert (app.outdir / '_sources' / 'index.rst').exists()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx('html', testroot='html_entity')
|
@pytest.mark.sphinx('html', testroot='html_entity')
|
||||||
|
@ -252,7 +252,6 @@ def cached_etree_parse():
|
|||||||
'contents.html': [
|
'contents.html': [
|
||||||
(".//meta[@name='hc'][@content='hcval']", ''),
|
(".//meta[@name='hc'][@content='hcval']", ''),
|
||||||
(".//meta[@name='hc_co'][@content='hcval_co']", ''),
|
(".//meta[@name='hc_co'][@content='hcval_co']", ''),
|
||||||
(".//meta[@name='testopt'][@content='testoverride']", ''),
|
|
||||||
(".//dt[@class='label']/span[@class='brackets']", r'Ref1'),
|
(".//dt[@class='label']/span[@class='brackets']", r'Ref1'),
|
||||||
(".//dt[@class='label']", ''),
|
(".//dt[@class='label']", ''),
|
||||||
(".//li[@class='toctree-l1']/a", 'Testing various markup'),
|
(".//li[@class='toctree-l1']/a", 'Testing various markup'),
|
||||||
@ -285,9 +284,6 @@ def cached_etree_parse():
|
|||||||
(".//a[@href='http://bugs.python.org/issue1000']", "issue 1000"),
|
(".//a[@href='http://bugs.python.org/issue1000']", "issue 1000"),
|
||||||
(".//a[@href='http://bugs.python.org/issue1042']", "explicit caption"),
|
(".//a[@href='http://bugs.python.org/issue1042']", "explicit caption"),
|
||||||
],
|
],
|
||||||
'_static/statictmpl.html': [
|
|
||||||
(".//project", 'Sphinx <Tests>'),
|
|
||||||
],
|
|
||||||
'genindex.html': [
|
'genindex.html': [
|
||||||
# index entries
|
# index entries
|
||||||
(".//a/strong", "Main"),
|
(".//a/strong", "Main"),
|
||||||
|
@ -713,20 +713,16 @@ def test_latex_logo_if_not_found(app, status, warning):
|
|||||||
assert isinstance(exc, SphinxError)
|
assert isinstance(exc, SphinxError)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx('latex', testroot='toctree-maxdepth',
|
@pytest.mark.sphinx('latex', testroot='toctree-maxdepth')
|
||||||
confoverrides={'latex_documents': [
|
|
||||||
('index', 'SphinxTests.tex', 'Sphinx Tests Documentation',
|
|
||||||
'Georg Brandl', 'manual'),
|
|
||||||
]})
|
|
||||||
def test_toctree_maxdepth_manual(app, status, warning):
|
def test_toctree_maxdepth_manual(app, status, warning):
|
||||||
app.builder.build_all()
|
app.builder.build_all()
|
||||||
result = (app.outdir / 'SphinxTests.tex').text(encoding='utf8')
|
result = (app.outdir / 'Python.tex').text(encoding='utf8')
|
||||||
print(result)
|
print(result)
|
||||||
print(status.getvalue())
|
print(status.getvalue())
|
||||||
print(warning.getvalue())
|
print(warning.getvalue())
|
||||||
assert '\\setcounter{tocdepth}{1}' in result
|
assert '\\setcounter{tocdepth}{1}' in result
|
||||||
assert '\\setcounter{secnumdepth}' not in result
|
assert '\\setcounter{secnumdepth}' not in result
|
||||||
|
assert '\\chapter{Foo}' in result
|
||||||
|
|
||||||
@pytest.mark.sphinx(
|
@pytest.mark.sphinx(
|
||||||
'latex', testroot='toctree-maxdepth',
|
'latex', testroot='toctree-maxdepth',
|
||||||
@ -742,7 +738,7 @@ def test_toctree_maxdepth_howto(app, status, warning):
|
|||||||
print(warning.getvalue())
|
print(warning.getvalue())
|
||||||
assert '\\setcounter{tocdepth}{2}' in result
|
assert '\\setcounter{tocdepth}{2}' in result
|
||||||
assert '\\setcounter{secnumdepth}' not in result
|
assert '\\setcounter{secnumdepth}' not in result
|
||||||
|
assert '\\section{Foo}' in result
|
||||||
|
|
||||||
@pytest.mark.sphinx(
|
@pytest.mark.sphinx(
|
||||||
'latex', testroot='toctree-maxdepth',
|
'latex', testroot='toctree-maxdepth',
|
||||||
@ -755,7 +751,7 @@ def test_toctree_not_found(app, status, warning):
|
|||||||
print(warning.getvalue())
|
print(warning.getvalue())
|
||||||
assert '\\setcounter{tocdepth}' not in result
|
assert '\\setcounter{tocdepth}' not in result
|
||||||
assert '\\setcounter{secnumdepth}' not in result
|
assert '\\setcounter{secnumdepth}' not in result
|
||||||
|
assert '\\chapter{Foo A}' in result
|
||||||
|
|
||||||
@pytest.mark.sphinx(
|
@pytest.mark.sphinx(
|
||||||
'latex', testroot='toctree-maxdepth',
|
'latex', testroot='toctree-maxdepth',
|
||||||
@ -805,6 +801,26 @@ def test_latex_toplevel_sectioning_is_part(app, status, warning):
|
|||||||
print(status.getvalue())
|
print(status.getvalue())
|
||||||
print(warning.getvalue())
|
print(warning.getvalue())
|
||||||
assert '\\part{Foo}' in result
|
assert '\\part{Foo}' in result
|
||||||
|
assert '\\chapter{Foo A}' in result
|
||||||
|
assert '\\chapter{Foo B}' in result
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.sphinx(
|
||||||
|
'latex', testroot='toctree-maxdepth',
|
||||||
|
confoverrides={'latex_toplevel_sectioning': 'part',
|
||||||
|
'latex_documents': [
|
||||||
|
('index', 'Python.tex', 'Sphinx Tests Documentation',
|
||||||
|
'Georg Brandl', 'howto')
|
||||||
|
]})
|
||||||
|
def test_latex_toplevel_sectioning_is_part_with_howto(app, status, warning):
|
||||||
|
app.builder.build_all()
|
||||||
|
result = (app.outdir / 'Python.tex').text(encoding='utf8')
|
||||||
|
print(result)
|
||||||
|
print(status.getvalue())
|
||||||
|
print(warning.getvalue())
|
||||||
|
assert '\\part{Foo}' in result
|
||||||
|
assert '\\section{Foo A}' in result
|
||||||
|
assert '\\section{Foo B}' in result
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx(
|
@pytest.mark.sphinx(
|
||||||
@ -819,6 +835,22 @@ def test_latex_toplevel_sectioning_is_chapter(app, status, warning):
|
|||||||
assert '\\chapter{Foo}' in result
|
assert '\\chapter{Foo}' in result
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.sphinx(
|
||||||
|
'latex', testroot='toctree-maxdepth',
|
||||||
|
confoverrides={'latex_toplevel_sectioning': 'chapter',
|
||||||
|
'latex_documents': [
|
||||||
|
('index', 'Python.tex', 'Sphinx Tests Documentation',
|
||||||
|
'Georg Brandl', 'howto')
|
||||||
|
]})
|
||||||
|
def test_latex_toplevel_sectioning_is_chapter_with_howto(app, status, warning):
|
||||||
|
app.builder.build_all()
|
||||||
|
result = (app.outdir / 'Python.tex').text(encoding='utf8')
|
||||||
|
print(result)
|
||||||
|
print(status.getvalue())
|
||||||
|
print(warning.getvalue())
|
||||||
|
assert '\\section{Foo}' in result
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx(
|
@pytest.mark.sphinx(
|
||||||
'latex', testroot='toctree-maxdepth',
|
'latex', testroot='toctree-maxdepth',
|
||||||
confoverrides={'latex_toplevel_sectioning': 'section'})
|
confoverrides={'latex_toplevel_sectioning': 'section'})
|
||||||
|
@ -349,6 +349,14 @@ def test_code_block_namedlink_latex(app, status, warning):
|
|||||||
assert link2 in latex
|
assert link2 in latex
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.sphinx('latex', testroot='directive-code')
|
||||||
|
def test_code_block_emphasize_latex(app, status, warning):
|
||||||
|
app.builder.build(['emphasize'])
|
||||||
|
latex = (app.outdir / 'Python.tex').text(encoding='utf-8').replace('\r\n', '\n')
|
||||||
|
includes = '\\fvset{hllines={, 5, 6, 13, 14, 15, 24, 25, 26, 27,}}%\n'
|
||||||
|
assert includes in latex
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx('xml', testroot='directive-code')
|
@pytest.mark.sphinx('xml', testroot='directive-code')
|
||||||
def test_literal_include(app, status, warning):
|
def test_literal_include(app, status, warning):
|
||||||
app.builder.build(['index'])
|
app.builder.build(['index'])
|
||||||
|
@ -520,7 +520,7 @@ def test_gettext_buildr_ignores_only_directive(app):
|
|||||||
|
|
||||||
@sphinx_intl
|
@sphinx_intl
|
||||||
# use individual shared_result directory to avoid "incompatible doctree" error
|
# use individual shared_result directory to avoid "incompatible doctree" error
|
||||||
@pytest.mark.test_params(shared_result='test_gettext_dont_rebuild_mo')
|
@pytest.mark.sphinx(testroot='builder-gettext-dont-rebuild-mo')
|
||||||
def test_gettext_dont_rebuild_mo(make_app, app_params, build_mo):
|
def test_gettext_dont_rebuild_mo(make_app, app_params, build_mo):
|
||||||
# --- don't rebuild by .mo mtime
|
# --- don't rebuild by .mo mtime
|
||||||
def get_number_of_update_targets(app_):
|
def get_number_of_update_targets(app_):
|
||||||
@ -533,7 +533,7 @@ def test_gettext_dont_rebuild_mo(make_app, app_params, build_mo):
|
|||||||
app0 = make_app('dummy', *args, **kwargs)
|
app0 = make_app('dummy', *args, **kwargs)
|
||||||
build_mo(app0.srcdir)
|
build_mo(app0.srcdir)
|
||||||
app0.build()
|
app0.build()
|
||||||
assert (app0.srcdir / 'bom.mo')
|
assert (app0.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').exists()
|
||||||
# Since it is after the build, the number of documents to be updated is 0
|
# Since it is after the build, the number of documents to be updated is 0
|
||||||
assert get_number_of_update_targets(app0) == 0
|
assert get_number_of_update_targets(app0) == 0
|
||||||
# When rewriting the timestamp of mo file, the number of documents to be
|
# When rewriting the timestamp of mo file, the number of documents to be
|
||||||
|
@ -211,7 +211,8 @@ def get_verifier(verify, verify_re):
|
|||||||
'verify',
|
'verify',
|
||||||
u'::\n\n @Γ\\∞${}',
|
u'::\n\n @Γ\\∞${}',
|
||||||
None,
|
None,
|
||||||
(u'\\begin{sphinxVerbatim}[commandchars=\\\\\\{\\}]\n'
|
(u'\\fvset{hllines={, ,}}%\n'
|
||||||
|
u'\\begin{sphinxVerbatim}[commandchars=\\\\\\{\\}]\n'
|
||||||
u'@\\(\\Gamma\\)\\PYGZbs{}\\(\\infty\\)\\PYGZdl{}\\PYGZob{}\\PYGZcb{}\n'
|
u'@\\(\\Gamma\\)\\PYGZbs{}\\(\\infty\\)\\PYGZdl{}\\PYGZob{}\\PYGZcb{}\n'
|
||||||
u'\\end{sphinxVerbatim}'),
|
u'\\end{sphinxVerbatim}'),
|
||||||
),
|
),
|
||||||
|
@ -17,6 +17,7 @@ from sphinx.theming import ThemeError
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx(
|
@pytest.mark.sphinx(
|
||||||
|
testroot='theming',
|
||||||
confoverrides={'html_theme': 'ziptheme',
|
confoverrides={'html_theme': 'ziptheme',
|
||||||
'html_theme_options.testopt': 'foo'})
|
'html_theme_options.testopt': 'foo'})
|
||||||
def test_theme_api(app, status, warning):
|
def test_theme_api(app, status, warning):
|
||||||
@ -25,10 +26,11 @@ def test_theme_api(app, status, warning):
|
|||||||
# test Theme class API
|
# test Theme class API
|
||||||
assert set(app.html_themes.keys()) == \
|
assert set(app.html_themes.keys()) == \
|
||||||
set(['basic', 'default', 'scrolls', 'agogo', 'sphinxdoc', 'haiku',
|
set(['basic', 'default', 'scrolls', 'agogo', 'sphinxdoc', 'haiku',
|
||||||
'traditional', 'testtheme', 'ziptheme', 'epub', 'nature',
|
'traditional', 'epub', 'nature', 'pyramid', 'bizstyle', 'classic', 'nonav',
|
||||||
'pyramid', 'bizstyle', 'classic', 'nonav'])
|
'test-theme', 'ziptheme', 'staticfiles', 'parent', 'child'])
|
||||||
assert app.html_themes['testtheme'] == app.srcdir / 'testtheme'
|
assert app.html_themes['test-theme'] == app.srcdir / 'test_theme' / 'test-theme'
|
||||||
assert app.html_themes['ziptheme'] == app.srcdir / 'ziptheme.zip'
|
assert app.html_themes['ziptheme'] == app.srcdir / 'ziptheme.zip'
|
||||||
|
assert app.html_themes['staticfiles'] == app.srcdir / 'test_theme' / 'staticfiles'
|
||||||
|
|
||||||
# test Theme instance API
|
# test Theme instance API
|
||||||
theme = app.builder.theme
|
theme = app.builder.theme
|
||||||
@ -93,3 +95,18 @@ def test_double_inheriting_theme(app, status, warning):
|
|||||||
def test_nested_zipped_theme(app, status, warning):
|
def test_nested_zipped_theme(app, status, warning):
|
||||||
assert app.builder.theme.name == 'child'
|
assert app.builder.theme.name == 'child'
|
||||||
app.build() # => not raises TemplateNotFound
|
app.build() # => not raises TemplateNotFound
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.sphinx(testroot='theming',
|
||||||
|
confoverrides={'html_theme': 'staticfiles'})
|
||||||
|
def test_staticfiles(app, status, warning):
|
||||||
|
app.build()
|
||||||
|
assert (app.outdir / '_static' / 'staticimg.png').exists()
|
||||||
|
assert (app.outdir / '_static' / 'statictmpl.html').exists()
|
||||||
|
assert (app.outdir / '_static' / 'statictmpl.html').text() == (
|
||||||
|
'<!-- testing static templates -->\n'
|
||||||
|
'<html><project>Python</project></html>'
|
||||||
|
)
|
||||||
|
|
||||||
|
result = (app.outdir / 'index.html').text()
|
||||||
|
assert '<meta name="testopt" content="optdefault" />' in result
|
||||||
|
Loading…
Reference in New Issue
Block a user