mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge from master
This commit is contained in:
commit
d8a109976b
20
.gitignore
vendored
Normal file
20
.gitignore
vendored
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
*.pyc
|
||||||
|
*.egg
|
||||||
|
*.so
|
||||||
|
|
||||||
|
.dir-locals.el
|
||||||
|
.ropeproject/
|
||||||
|
TAGS
|
||||||
|
.tags
|
||||||
|
.tox
|
||||||
|
.DS_Store
|
||||||
|
sphinx/pycode/Grammar*pickle
|
||||||
|
distribute-*
|
||||||
|
|
||||||
|
env/
|
||||||
|
build/
|
||||||
|
dist/
|
||||||
|
Sphinx.egg-info/
|
||||||
|
doc/_build/
|
||||||
|
tests/.coverage
|
||||||
|
tests/build/
|
21
.hgignore
21
.hgignore
@ -1,21 +0,0 @@
|
|||||||
.*\.pyc
|
|
||||||
.*\.egg
|
|
||||||
.*\.so
|
|
||||||
.dir-locals.el
|
|
||||||
^\.tox
|
|
||||||
\.DS_Store$
|
|
||||||
^build/
|
|
||||||
^dist/
|
|
||||||
^tests/.coverage
|
|
||||||
^sphinx/pycode/Grammar.*pickle
|
|
||||||
^Sphinx.egg-info/
|
|
||||||
^doc/_build/
|
|
||||||
^TAGS
|
|
||||||
^\.ropeproject/
|
|
||||||
^env/
|
|
||||||
\.DS_Store$
|
|
||||||
~$
|
|
||||||
^utils/.*3\.py$
|
|
||||||
^distribute-
|
|
||||||
^tests/root/_build/*
|
|
||||||
^tests/root/generated/*
|
|
11
.travis.yml
11
.travis.yml
@ -1,7 +1,14 @@
|
|||||||
language: python
|
language: python
|
||||||
python:
|
python:
|
||||||
|
- "2.6"
|
||||||
- "2.7"
|
- "2.7"
|
||||||
- "3.3"
|
- "3.3"
|
||||||
script: make test
|
- "3.4"
|
||||||
|
- "pypy"
|
||||||
|
env:
|
||||||
|
- DOCUTILS=0.11
|
||||||
|
- DOCUTILS=0.12
|
||||||
install:
|
install:
|
||||||
- python setup.py -q install
|
- pip install docutils==$DOCUTILS
|
||||||
|
- pip install -r test-reqs.txt
|
||||||
|
script: make test
|
||||||
|
4
AUTHORS
4
AUTHORS
@ -8,7 +8,7 @@ Other co-maintainers:
|
|||||||
* Takayuki Shimizukawa <shimizukawa@gmail.com>
|
* Takayuki Shimizukawa <shimizukawa@gmail.com>
|
||||||
* Daniel Neuhäuser <@DasIch>
|
* Daniel Neuhäuser <@DasIch>
|
||||||
* Jon Waltman <@jonwaltman>
|
* Jon Waltman <@jonwaltman>
|
||||||
* Rob Ruana <@RelentlessIdiot>
|
* Rob Ruana <@RobRuana>
|
||||||
* Robert Lehmann <@lehmannro>
|
* Robert Lehmann <@lehmannro>
|
||||||
* Roland Meister <@rolmei>
|
* Roland Meister <@rolmei>
|
||||||
|
|
||||||
@ -26,6 +26,7 @@ Other contributors, listed alphabetically, are:
|
|||||||
* Horst Gutmann -- internationalization support
|
* Horst Gutmann -- internationalization support
|
||||||
* Martin Hans -- autodoc improvements
|
* Martin Hans -- autodoc improvements
|
||||||
* Doug Hellmann -- graphviz improvements
|
* Doug Hellmann -- graphviz improvements
|
||||||
|
* Takeshi Komiya -- numref feature
|
||||||
* Dave Kuhlman -- original LaTeX writer
|
* Dave Kuhlman -- original LaTeX writer
|
||||||
* Blaise Laflamme -- pyramid theme
|
* Blaise Laflamme -- pyramid theme
|
||||||
* Thomas Lamb -- linkcheck builder
|
* Thomas Lamb -- linkcheck builder
|
||||||
@ -41,6 +42,7 @@ Other contributors, listed alphabetically, are:
|
|||||||
* Christopher Perkins -- autosummary integration
|
* Christopher Perkins -- autosummary integration
|
||||||
* Benjamin Peterson -- unittests
|
* Benjamin Peterson -- unittests
|
||||||
* T. Powers -- HTML output improvements
|
* T. Powers -- HTML output improvements
|
||||||
|
* Jeppe Pihl -- literalinclude improvements
|
||||||
* Rob Ruana -- napoleon extension
|
* Rob Ruana -- napoleon extension
|
||||||
* Stefan Seefeld -- toctree improvements
|
* Stefan Seefeld -- toctree improvements
|
||||||
* Shibukawa Yoshiki -- pluggable search API and Japanese search
|
* Shibukawa Yoshiki -- pluggable search API and Japanese search
|
||||||
|
412
CHANGES
412
CHANGES
@ -1,5 +1,66 @@
|
|||||||
Release 1.3 (in development)
|
Release 1.3b3 (in development)
|
||||||
============================
|
==============================
|
||||||
|
|
||||||
|
Incompatible changes
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
* Dependency requirement updates: docutils 0.11, Pygments 2.0
|
||||||
|
|
||||||
|
Features added
|
||||||
|
--------------
|
||||||
|
|
||||||
|
* Added ``highlight_options`` configuration value.
|
||||||
|
* The ``language`` config value is now available in the HTML templates.
|
||||||
|
* The ``env-updated`` event can now return a value, which is interpreted
|
||||||
|
as an iterable of additional docnames that need to be rewritten.
|
||||||
|
|
||||||
|
Bugs fixed
|
||||||
|
----------
|
||||||
|
|
||||||
|
* LaTeX writer now generates correct markup for cells spanning multiple rows.
|
||||||
|
* #1674: Do not crash if a module's ``__all__`` is not a list of strings.
|
||||||
|
* #1629: Use VerbatimBorderColor to add frame to code-block in LaTeX
|
||||||
|
* On windows, make-mode didn't work on Win32 platform if sphinx was invoked as
|
||||||
|
``python sphinx-build.py``.
|
||||||
|
* #1687: linkcheck now treats 401 Unauthorized responses as "working".
|
||||||
|
* #1690: toctrees with ``glob`` option now can also contain entries for single
|
||||||
|
documents with explicit title.
|
||||||
|
|
||||||
|
|
||||||
|
Release 1.3b2 (released Dec 5, 2014)
|
||||||
|
====================================
|
||||||
|
|
||||||
|
Incompatible changes
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
* update bundled ez_setup.py for setuptools-7.0 that requires Python 2.6 or
|
||||||
|
later.
|
||||||
|
|
||||||
|
Features added
|
||||||
|
--------------
|
||||||
|
|
||||||
|
* #1597: Added possibility to return a new template name from
|
||||||
|
`html-page-context`.
|
||||||
|
* PR#314, #1150: Configuration values are now checked for their type. A
|
||||||
|
warning is raised if the configured and the default value do not have the
|
||||||
|
same type and do not share a common non-trivial base class.
|
||||||
|
|
||||||
|
Bugs fixed
|
||||||
|
----------
|
||||||
|
|
||||||
|
* PR#311: sphinx-quickstart does not work on python 3.4.
|
||||||
|
* Fix :confval:`autodoc_docstring_signature` not working with signatures
|
||||||
|
in class docstrings.
|
||||||
|
* Rebuilding cause crash unexpectedly when source files were added.
|
||||||
|
* #1607: Fix a crash when building latexpdf with "howto" class
|
||||||
|
* #1251: Fix again. Sections which depth are lower than :tocdepth: should not
|
||||||
|
be shown on localtoc sidebar.
|
||||||
|
* make-mode didn't work on Win32 platform if sphinx was installed by wheel
|
||||||
|
package.
|
||||||
|
|
||||||
|
|
||||||
|
Release 1.3b1 (released Oct 10, 2014)
|
||||||
|
=====================================
|
||||||
|
|
||||||
Incompatible changes
|
Incompatible changes
|
||||||
--------------------
|
--------------------
|
||||||
@ -12,12 +73,21 @@ Incompatible changes
|
|||||||
* A new node, ``sphinx.addnodes.literal_strong``, has been added, for text that
|
* A new node, ``sphinx.addnodes.literal_strong``, has been added, for text that
|
||||||
should appear literally (i.e. no smart quotes) in strong font. Custom writers
|
should appear literally (i.e. no smart quotes) in strong font. Custom writers
|
||||||
will have to be adapted to handle this node.
|
will have to be adapted to handle this node.
|
||||||
* PR#269, #1476: replace `<tt>` tag by `<code>`. User customized stylesheets
|
* PR#269, #1476: replace ``<tt>`` tag by ``<code>``. User customized stylesheets
|
||||||
should be updated If the css contain some styles for `<tt>` tag.
|
should be updated If the css contain some styles for ``tt>`` tag.
|
||||||
Thanks to Takeshi Komiya.
|
Thanks to Takeshi Komiya.
|
||||||
* #1543: :confval:`templates_path` is automatically added to
|
* #1543: `templates_path` is automatically added to
|
||||||
:confval:`exclude_patterns` to avoid reading autosummary rst templates in the
|
`exclude_patterns` to avoid reading autosummary rst templates in the
|
||||||
templates directory.
|
templates directory.
|
||||||
|
* Custom domains should implement the new `Domain.resolve_any_xref`
|
||||||
|
method to make the `any` role work properly.
|
||||||
|
* gettext builder: gettext doesn't emit uuid information to generated pot files
|
||||||
|
by default. Please set ``True`` to `gettext_uuid` to emit uuid information.
|
||||||
|
Additionally, if the ``python-levenshtein`` 3rd-party package is installed,
|
||||||
|
it will improve the calculation time.
|
||||||
|
* gettext builder: disable extracting/apply 'index' node by default. Please set
|
||||||
|
'index' to :confval:`gettext_enables` to enable extracting index entries.
|
||||||
|
* PR#307: Add frame to code-block in LaTeX. Thanks to Takeshi Komiya.
|
||||||
|
|
||||||
Features added
|
Features added
|
||||||
--------------
|
--------------
|
||||||
@ -26,21 +96,35 @@ Features added
|
|||||||
* Add support for docutils 0.12
|
* Add support for docutils 0.12
|
||||||
* Added ``sphinx.ext.napoleon`` extension for NumPy and Google style docstring
|
* Added ``sphinx.ext.napoleon`` extension for NumPy and Google style docstring
|
||||||
support.
|
support.
|
||||||
|
* Added support for parallel reading (parsing) of source files with the
|
||||||
|
`sphinx-build -j` option. Third-party extensions will need to be checked for
|
||||||
|
compatibility and may need to be adapted if they store information in the
|
||||||
|
build environment object. See `env-merge-info`.
|
||||||
|
* Added the `any` role that can be used to find a cross-reference of
|
||||||
|
*any* type in *any* domain. Custom domains should implement the new
|
||||||
|
`Domain.resolve_any_xref` method to make this work properly.
|
||||||
|
* Exception logs now contain the last 10 messages emitted by Sphinx.
|
||||||
* Added support for extension versions (a string returned by ``setup()``, these
|
* Added support for extension versions (a string returned by ``setup()``, these
|
||||||
can be shown in the traceback log files). Version requirements for extensions
|
can be shown in the traceback log files). Version requirements for extensions
|
||||||
can be specified in projects using the new :confval:`needs_extensions` config
|
can be specified in projects using the new `needs_extensions` config
|
||||||
value.
|
value.
|
||||||
|
* Changing the default role within a document with the :dudir:`default-role`
|
||||||
|
directive is now supported.
|
||||||
* PR#214: Added stemming support for 14 languages, so that the built-in document
|
* PR#214: Added stemming support for 14 languages, so that the built-in document
|
||||||
search can now handle these. Thanks to Shibukawa Yoshiki.
|
search can now handle these. Thanks to Shibukawa Yoshiki.
|
||||||
|
* PR#296, PR#303, #76: numfig feature: Assign numbers to figures, tables and
|
||||||
|
code-blocks. This feature is configured with `numfig`, `numfig_secnum_depth`
|
||||||
|
and `numfig_format`. Also `numref` role is available. Thanks to Takeshi
|
||||||
|
Komiya.
|
||||||
* PR#202: Allow "." and "~" prefixed references in ``:param:`` doc fields
|
* PR#202: Allow "." and "~" prefixed references in ``:param:`` doc fields
|
||||||
for Python.
|
for Python.
|
||||||
* PR#184: Add :confval:`autodoc_mock_imports`, allowing to mock imports of
|
* PR#184: Add `autodoc_mock_imports`, allowing to mock imports of
|
||||||
external modules that need not be present when autodocumenting.
|
external modules that need not be present when autodocumenting.
|
||||||
* #925: Allow list-typed config values to be provided on the command line,
|
* #925: Allow list-typed config values to be provided on the command line,
|
||||||
like ``-D key=val1,val2``.
|
like ``-D key=val1,val2``.
|
||||||
* #668: Allow line numbering of ``code-block`` and ``literalinclude`` directives
|
* #668: Allow line numbering of `code-block` and `literalinclude` directives
|
||||||
to start at an arbitrary line number, with a new ``lineno-start`` option.
|
to start at an arbitrary line number, with a new ``lineno-start`` option.
|
||||||
* PR#172, PR#266: The :rst:dir:`code-block` and :rst:dir:`literalinclude`
|
* PR#172, PR#266: The `code-block` and `literalinclude`
|
||||||
directives now can have a ``caption`` option that shows a filename before the
|
directives now can have a ``caption`` option that shows a filename before the
|
||||||
code in the output. Thanks to Nasimul Haque, Takeshi Komiya.
|
code in the output. Thanks to Nasimul Haque, Takeshi Komiya.
|
||||||
* Prompt for the document language in sphinx-quickstart.
|
* Prompt for the document language in sphinx-quickstart.
|
||||||
@ -55,132 +139,56 @@ Features added
|
|||||||
for the ids defined on the node. Thanks to Olivier Heurtier.
|
for the ids defined on the node. Thanks to Olivier Heurtier.
|
||||||
* PR#229: Allow registration of other translators. Thanks to Russell Sim.
|
* PR#229: Allow registration of other translators. Thanks to Russell Sim.
|
||||||
* Add app.set_translator() API to register or override a Docutils translator
|
* Add app.set_translator() API to register or override a Docutils translator
|
||||||
class like :confval:`html_translator_class`.
|
class like `html_translator_class`.
|
||||||
* PR#267, #1134: add 'diff' parameter to literalinclude. Thanks to Richard Wall
|
* PR#267, #1134: add 'diff' parameter to literalinclude. Thanks to Richard Wall
|
||||||
and WAKAYAMA shirou.
|
and WAKAYAMA shirou.
|
||||||
* PR#272: Added 'bizstyle' theme. Thanks to Shoji KUMAGAI.
|
* PR#272: Added 'bizstyle' theme. Thanks to Shoji KUMAGAI.
|
||||||
* Automatically compile ``*.mo`` files from ``*.po`` files when
|
* Automatically compile ``*.mo`` files from ``*.po`` files when
|
||||||
:confval:`gettext_auto_build` is True (default) and ``*.po`` is newer than
|
`gettext_auto_build` is True (default) and ``*.po`` is newer than
|
||||||
``*.mo`` file.
|
``*.mo`` file.
|
||||||
* #623: :mod:`~sphinx.ext.viewcode` supports imported function/class aliases.
|
* #623: `sphinx.ext.viewcode` supports imported function/class aliases.
|
||||||
* PR#275: :mod:`~sphinx.ext.intersphinx` supports multiple target for the
|
* PR#275: `sphinx.ext.intersphinx` supports multiple target for the
|
||||||
inventory. Thanks to Brigitta Sipocz.
|
inventory. Thanks to Brigitta Sipocz.
|
||||||
|
* PR#261: Added the `env-before-read-docs` event that can be connected to modify
|
||||||
|
the order of documents before they are read by the environment.
|
||||||
|
* #1284: Program options documented with :rst:dir:`option` can now start with
|
||||||
|
``+``.
|
||||||
|
* PR#291: The caption of :rst:dir:`code-block` is recognised as a title of ref
|
||||||
|
target. Thanks to Takeshi Komiya.
|
||||||
|
* PR#298: Add new API: :meth:`~sphinx.application.Sphinx.add_latex_package`.
|
||||||
|
Thanks to Takeshi Komiya.
|
||||||
|
* #1344: add :confval:`gettext_enables` to enable extracting 'index' to gettext
|
||||||
|
catalog output / applying translation catalog to generated documentation.
|
||||||
|
* PR#301, #1583: Allow the line numbering of the directive `literalinclude` to
|
||||||
|
match that of the included file, using a new ``lineno-match`` option. Thanks
|
||||||
|
to Jeppe Pihl.
|
||||||
|
* PR#299: add various options to sphinx-quickstart. Quiet mode option
|
||||||
|
``--quiet`` will skips wizard mode. Thanks to WAKAYAMA shirou.
|
||||||
|
* #1623: Return types specified with ``:rtype:`` are now turned into links if
|
||||||
|
possible.
|
||||||
|
|
||||||
Bugs fixed
|
Bugs fixed
|
||||||
----------
|
----------
|
||||||
|
|
||||||
|
* #1438: Updated jQuery version from 1.8.3 to 1.11.1.
|
||||||
|
* #1568: Fix a crash when a "centered" directive contains a reference.
|
||||||
|
* Now sphinx.ext.autodoc works with python-2.5 again.
|
||||||
|
* #1563: :meth:`~sphinx.application.Sphinx.add_search_language` raises
|
||||||
|
AssertionError for correct type of argument. Thanks to rikoman.
|
||||||
* #1174: Fix smart quotes being applied inside roles like :rst:role:`program` or
|
* #1174: Fix smart quotes being applied inside roles like :rst:role:`program` or
|
||||||
:rst:role:`makevar`.
|
`makevar`.
|
||||||
* #1335: Fix autosummary template overloading with exclamation prefix like
|
|
||||||
``{% extends "!autosummary/class.rst" %}`` cause infinite recursive function
|
|
||||||
call. This was caused by PR#181.
|
|
||||||
* #1337: Fix autodoc with ``autoclass_content="both"`` uses useless
|
|
||||||
``object.__init__`` docstring when class does not have ``__init__``.
|
|
||||||
This was caused by a change for #1138.
|
|
||||||
* #1340: Can't search alphabetical words on the HTML quick search generated
|
|
||||||
with language='ja'.
|
|
||||||
* #1319: Do not crash if the :confval:`html_logo` file does not exist.
|
|
||||||
* #603: Do not use the HTML-ized title for building the search index (that
|
|
||||||
resulted in "literal" being found on every page with a literal in the
|
|
||||||
title).
|
|
||||||
* #751: Allow production lists longer than a page in LaTeX by using longtable.
|
|
||||||
* #764: Always look for stopwords lowercased in JS search.
|
|
||||||
* #814: autodoc: Guard against strange type objects that don't have
|
|
||||||
``__bases__``.
|
|
||||||
* #932: autodoc: Do not crash if ``__doc__`` is not a string.
|
|
||||||
* #933: Do not crash if an :rst:role:`option` value is malformed (contains
|
|
||||||
spaces but no option name).
|
|
||||||
* #908: On Python 3, handle error messages from LaTeX correctly in the pngmath
|
|
||||||
extension.
|
|
||||||
* #943: In autosummary, recognize "first sentences" to pull from the docstring
|
|
||||||
if they contain uppercase letters.
|
|
||||||
* #923: Take the entire LaTeX document into account when caching
|
|
||||||
pngmath-generated images. This rebuilds them correctly when
|
|
||||||
:confval:`pngmath_latex_preamble` changes.
|
|
||||||
* #901: Emit a warning when using docutils' new "math" markup without a Sphinx
|
|
||||||
math extension active.
|
|
||||||
* #845: In code blocks, when the selected lexer fails, display line numbers
|
|
||||||
nevertheless if configured.
|
|
||||||
* #929: Support parsed-literal blocks in LaTeX output correctly.
|
|
||||||
* #949: Update the tabulary.sty packed with Sphinx.
|
|
||||||
* #1050: Add anonymous labels into ``objects.inv`` to be referenced via
|
|
||||||
:mod:`~sphinx.ext.intersphinx`.
|
|
||||||
* #1095: Fix print-media stylesheet being included always in the "scrolls"
|
|
||||||
theme.
|
|
||||||
* #1085: Fix current classname not getting set if class description has
|
|
||||||
``:noindex:`` set.
|
|
||||||
* #1181: Report option errors in autodoc directives more gracefully.
|
|
||||||
* #1155: Fix autodocumenting C-defined methods as attributes in Python 3.
|
|
||||||
* #1233: Allow finding both Python classes and exceptions with the "class" and
|
|
||||||
"exc" roles in intersphinx.
|
|
||||||
* #1198: Allow "image" for the "figwidth" option of the :rst:dir:`figure`
|
|
||||||
directive as documented by docutils.
|
|
||||||
* #1152: Fix pycode parsing errors of Python 3 code by including two grammar
|
|
||||||
versions for Python 2 and 3, and loading the appropriate version for the
|
|
||||||
running Python version.
|
|
||||||
* #1017: Be helpful and tell the user when the argument to :rst:dir:`option`
|
|
||||||
does not match the required format.
|
|
||||||
* #1345: Fix two bugs with :confval:`nitpick_ignore`; now you don't have to
|
|
||||||
remove the store environment for changes to have effect.
|
|
||||||
* #1072: In the JS search, fix issues searching for upper-cased words by
|
|
||||||
lowercasing words before stemming.
|
|
||||||
* #1299: Make behavior of the :rst:dir:`math` directive more consistent and
|
|
||||||
avoid producing empty environments in LaTeX output.
|
|
||||||
* #1308: Strip HTML tags from the content of "raw" nodes before feeding it
|
|
||||||
to the search indexer.
|
|
||||||
* #1249: Fix duplicate LaTeX page numbering for manual documents.
|
|
||||||
* #1292: In the linkchecker, retry HEAD requests when denied by HTTP 405.
|
|
||||||
Also make the redirect code apparent and tweak the output a bit to be
|
|
||||||
more obvious.
|
|
||||||
* #1285: Avoid name clashes between C domain objects and section titles.
|
|
||||||
* #848: Always take the newest code in incremental rebuilds with the
|
|
||||||
:mod:`sphinx.ext.viewcode` extension.
|
|
||||||
* #979, #1266: Fix exclude handling in ``sphinx-apidoc``.
|
|
||||||
* #1302: Fix regression in :mod:`sphinx.ext.inheritance_diagram` when
|
|
||||||
documenting classes that can't be pickled.
|
|
||||||
* #1316: Remove hard-coded ``font-face`` resources from epub theme.
|
|
||||||
* #1329: Fix traceback with empty translation msgstr in .po files.
|
|
||||||
* #1300: Fix references not working in translated documents in some instances.
|
|
||||||
* #1283: Fix a bug in the detection of changed files that would try to access
|
|
||||||
doctrees of deleted documents.
|
|
||||||
* #1330: Fix :confval:`exclude_patterns` behavior with subdirectories in the
|
|
||||||
:confval:`html_static_path`.
|
|
||||||
* #1323: Fix emitting empty ``<ul>`` tags in the HTML writer, which is not
|
|
||||||
valid HTML.
|
|
||||||
* #1147: Don't emit a sidebar search box in the "singlehtml" builder.
|
|
||||||
* PR#211: When checking for existence of the :confval:`html_logo` file, check
|
|
||||||
the full relative path and not the basename.
|
|
||||||
* #1357: Option names documented by :rst:dir:`option` are now again allowed to
|
|
||||||
not start with a dash or slash, and referencing them will work correctly.
|
|
||||||
* #1358: Fix handling of image paths outside of the source directory when using
|
|
||||||
the "wildcard" style reference.
|
|
||||||
* #1374: Fix for autosummary generating overly-long summaries if first line
|
|
||||||
doesn't end with a period.
|
|
||||||
* #1391: Actually prevent using "pngmath" and "mathjax" extensions at the same
|
|
||||||
time in sphinx-quickstart.
|
|
||||||
* #1386: Fix bug preventing more than one theme being added by the entry point
|
|
||||||
mechanism.
|
|
||||||
* #1370: Ignore "toctree" nodes in text writer, instead of raising.
|
|
||||||
* #1364: Fix 'make gettext' fails when the '.. todolist::' directive is present.
|
|
||||||
* #1367: Fix a change of PR#96 that break sphinx.util.docfields.Field.make_field
|
|
||||||
interface/behavior for `item` argument usage.
|
|
||||||
* #1363: Fix i18n: missing python domain's cross-references with currentmodule
|
|
||||||
directive or currentclass directive.
|
|
||||||
* #1419: Generated i18n sphinx.js files are missing message catalog entries
|
|
||||||
from '.js_t' and '.html'. The issue was introduced in Sphinx 1.1.
|
|
||||||
* #636: Keep straight single quotes in literal blocks in the LaTeX build.
|
|
||||||
* PR#235: comment db schema of websupport lacked a length of the node_id field.
|
* PR#235: comment db schema of websupport lacked a length of the node_id field.
|
||||||
Thanks to solos.
|
Thanks to solos.
|
||||||
* #1466,PR#241: Fix failure of the cpp domain parser to parse C+11
|
* #1466,PR#241: Fix failure of the cpp domain parser to parse C+11
|
||||||
"variadic templates" declarations. Thanks to Victor Zverovich.
|
"variadic templates" declarations. Thanks to Victor Zverovich.
|
||||||
* #1459,PR#244: Fix default mathjax js path point to `http://` that cause
|
* #1459,PR#244: Fix default mathjax js path point to ``http://`` that cause
|
||||||
mixed-content error on HTTPS server. Thanks to sbrandtb and robo9k.
|
mixed-content error on HTTPS server. Thanks to sbrandtb and robo9k.
|
||||||
* PR#157: autodoc remove spurious signatures from @property decorated
|
* PR#157: autodoc remove spurious signatures from @property decorated
|
||||||
attributes. Thanks to David Ham.
|
attributes. Thanks to David Ham.
|
||||||
* PR#159: Add coverage targets to quickstart generated Makefile and make.bat.
|
* PR#159: Add coverage targets to quickstart generated Makefile and make.bat.
|
||||||
Thanks to Matthias Troffaes.
|
Thanks to Matthias Troffaes.
|
||||||
* #1251: When specifying toctree :numbered: option and :tocdepth: metadata,
|
* #1251: When specifying toctree :numbered: option and :tocdepth: metadata,
|
||||||
sub section number that is larger depth than `:tocdepth:` is shrinked.
|
sub section number that is larger depth than ``:tocdepth:`` is shrunk.
|
||||||
* PR#260: Encode underscore in citation labels for latex export. Thanks to
|
* PR#260: Encode underscore in citation labels for latex export. Thanks to
|
||||||
Lennart Fricke.
|
Lennart Fricke.
|
||||||
* PR#264: Fix could not resolve xref for figure node with :name: option.
|
* PR#264: Fix could not resolve xref for figure node with :name: option.
|
||||||
@ -206,8 +214,8 @@ Bugs fixed
|
|||||||
* Support for scoped and unscoped enums. Enumerators in unscoped enums
|
* Support for scoped and unscoped enums. Enumerators in unscoped enums
|
||||||
are injected into the parent scope in addition to the enum scope.
|
are injected into the parent scope in addition to the enum scope.
|
||||||
|
|
||||||
* PR#258, #939: Add dedent option for :rst:dir:`code-block` and
|
* PR#258, #939: Add dedent option for `code-block` and
|
||||||
:rst:dir:`literal-include`. Thanks to Zafar Siddiqui.
|
`literalinclude`. Thanks to Zafar Siddiqui.
|
||||||
* PR#268: Fix numbering section does not work at singlehtml mode. It still
|
* PR#268: Fix numbering section does not work at singlehtml mode. It still
|
||||||
ad-hoc fix because there is a issue that section IDs are conflicted.
|
ad-hoc fix because there is a issue that section IDs are conflicted.
|
||||||
Thanks to Takeshi Komiya.
|
Thanks to Takeshi Komiya.
|
||||||
@ -215,20 +223,32 @@ Bugs fixed
|
|||||||
Takeshi Komiya.
|
Takeshi Komiya.
|
||||||
* PR#274: Set its URL as a default title value if URL appears in toctree.
|
* PR#274: Set its URL as a default title value if URL appears in toctree.
|
||||||
Thanks to Takeshi Komiya.
|
Thanks to Takeshi Komiya.
|
||||||
* PR#276, #1381: :rst:role:`rfc` and :rst:role:`pep` roles support custom link
|
* PR#276, #1381: `rfc` and `pep` roles support custom link
|
||||||
text. Thanks to Takeshi Komiya.
|
text. Thanks to Takeshi Komiya.
|
||||||
* PR#277, #1513: highlights for function pointers in argument list of
|
* PR#277, #1513: highlights for function pointers in argument list of
|
||||||
:rst:dir:`c:function`. Thanks to Takeshi Komiya.
|
`c:function`. Thanks to Takeshi Komiya.
|
||||||
* PR#278: Fix section entries were shown twice if toctree has been put under
|
* PR#278: Fix section entries were shown twice if toctree has been put under
|
||||||
only directive. Thanks to Takeshi Komiya.
|
only directive. Thanks to Takeshi Komiya.
|
||||||
* #1547: pgen2 tokenizer doesn't recognize `...` literal (Ellipsis for py3).
|
* #1547: pgen2 tokenizer doesn't recognize ``...`` literal (Ellipsis for py3).
|
||||||
|
* PR#294: On LaTeX builder, wrap float environment on writing literal_block
|
||||||
|
to avoid separation of caption and body. Thanks to Takeshi Komiya.
|
||||||
|
* PR#295, #1520: ``make.bat latexpdf`` mechanism to ``cd`` back to the current
|
||||||
|
directory. Thanks to Peter Suter.
|
||||||
|
* PR#297, #1571: Add imgpath property to all builders. It make easier to
|
||||||
|
develop builder extensions. Thanks to Takeshi Komiya.
|
||||||
|
* #1584: Point to master doc in HTML "top" link.
|
||||||
|
* #1585: Autosummary of modules broken in Sphinx-1.2.3.
|
||||||
|
* #1610: Sphinx cause AttributeError when MeCab search option is enabled and
|
||||||
|
python-mecab is not installed.
|
||||||
|
* #1674: Do not crash if a module's ``__all__`` is not a list of strings.
|
||||||
|
* #1673: Fix crashes with :confval:`nitpick_ignore` and ``:doc:`` references.
|
||||||
|
* #1686: ifconfig directive doesn't care about default config values.
|
||||||
|
* #1642: Fix only one search result appearing in Chrome.
|
||||||
|
|
||||||
Documentation
|
Documentation
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
* Add clarification about the syntax of tags. (:file:`doc/markup/misc.rst`)
|
* Add clarification about the syntax of tags. (:file:`doc/markup/misc.rst`)
|
||||||
* #1325: Added a "Intersphinx" tutorial section. (:file:`doc/tutorial.rst`)
|
|
||||||
* Extended the :ref:`documentation about building extensions <dev-extensions>`.
|
|
||||||
|
|
||||||
|
|
||||||
Release 1.2.3 (released Sep 1, 2014)
|
Release 1.2.3 (released Sep 1, 2014)
|
||||||
@ -237,7 +257,7 @@ Release 1.2.3 (released Sep 1, 2014)
|
|||||||
Features added
|
Features added
|
||||||
--------------
|
--------------
|
||||||
|
|
||||||
* #1518: `sphinx-apidoc` command now have a `--version` option to show version
|
* #1518: ``sphinx-apidoc`` command now has a ``--version`` option to show version
|
||||||
information and exit
|
information and exit
|
||||||
* New locales: Hebrew, European Portuguese, Vietnamese.
|
* New locales: Hebrew, European Portuguese, Vietnamese.
|
||||||
|
|
||||||
@ -255,14 +275,14 @@ Bugs fixed
|
|||||||
Thanks to Jorge_C.
|
Thanks to Jorge_C.
|
||||||
* #1467: Exception on Python3 if nonexistent method is specified by automethod
|
* #1467: Exception on Python3 if nonexistent method is specified by automethod
|
||||||
* #1441: autosummary can't handle nested classes correctly.
|
* #1441: autosummary can't handle nested classes correctly.
|
||||||
* #1499: With non-callable `setup` in a conf.py, now sphinx-build emits
|
* #1499: With non-callable ``setup`` in a conf.py, now sphinx-build emits
|
||||||
user-friendly error message.
|
a user-friendly error message.
|
||||||
* #1502: In autodoc, fix display of parameter defaults containing backslashes.
|
* #1502: In autodoc, fix display of parameter defaults containing backslashes.
|
||||||
* #1226: autodoc, autosummary: importing setup.py by automodule will invoke
|
* #1226: autodoc, autosummary: importing setup.py by automodule will invoke
|
||||||
setup process and execute `sys.exit()`. Now sphinx avoids SystemExit
|
setup process and execute ``sys.exit()``. Now sphinx avoids SystemExit
|
||||||
exception and emits warnings without unexpected termination.
|
exception and emits warnings without unexpected termination.
|
||||||
* #1503: py:function directive generate incorrectly signature when specifying
|
* #1503: py:function directive generate incorrectly signature when specifying
|
||||||
a default parameter with an empty list `[]`. Thanks to Geert Jansen.
|
a default parameter with an empty list ``[]``. Thanks to Geert Jansen.
|
||||||
* #1508: Non-ASCII filename raise exception on make singlehtml, latex, man,
|
* #1508: Non-ASCII filename raise exception on make singlehtml, latex, man,
|
||||||
texinfo and changes.
|
texinfo and changes.
|
||||||
* #1531: On Python3 environment, docutils.conf with 'source_link=true' in the
|
* #1531: On Python3 environment, docutils.conf with 'source_link=true' in the
|
||||||
@ -272,11 +292,11 @@ Bugs fixed
|
|||||||
* PR#281, PR#282, #1509: TODO extension not compatible with websupport. Thanks
|
* PR#281, PR#282, #1509: TODO extension not compatible with websupport. Thanks
|
||||||
to Takeshi Komiya.
|
to Takeshi Komiya.
|
||||||
* #1477: gettext does not extract nodes.line in a table or list.
|
* #1477: gettext does not extract nodes.line in a table or list.
|
||||||
* #1544: `make text` generate wrong table when it has empty table cells.
|
* #1544: ``make text`` generates wrong table when it has empty table cells.
|
||||||
* #1522: Footnotes from table get displayed twice in LaTeX. This problem has
|
* #1522: Footnotes from table get displayed twice in LaTeX. This problem has
|
||||||
been appeared from Sphinx-1.2.1 by #949.
|
been appeared from Sphinx-1.2.1 by #949.
|
||||||
* #508: Sphinx every time exit with zero when is invoked from setup.py command.
|
* #508: Sphinx every time exit with zero when is invoked from setup.py command.
|
||||||
ex. `python setup.py build_sphinx -b doctest` return zero even if doctest
|
ex. ``python setup.py build_sphinx -b doctest`` return zero even if doctest
|
||||||
failed.
|
failed.
|
||||||
|
|
||||||
Release 1.2.2 (released Mar 2, 2014)
|
Release 1.2.2 (released Mar 2, 2014)
|
||||||
@ -285,7 +305,7 @@ Release 1.2.2 (released Mar 2, 2014)
|
|||||||
Bugs fixed
|
Bugs fixed
|
||||||
----------
|
----------
|
||||||
|
|
||||||
* PR#211: When checking for existence of the :confval:`html_logo` file, check
|
* PR#211: When checking for existence of the `html_logo` file, check
|
||||||
the full relative path and not the basename.
|
the full relative path and not the basename.
|
||||||
* PR#212: Fix traceback with autodoc and ``__init__`` methods without docstring.
|
* PR#212: Fix traceback with autodoc and ``__init__`` methods without docstring.
|
||||||
* PR#213: Fix a missing import in the setup command.
|
* PR#213: Fix a missing import in the setup command.
|
||||||
@ -303,7 +323,7 @@ Bugs fixed
|
|||||||
* #1370: Ignore "toctree" nodes in text writer, instead of raising.
|
* #1370: Ignore "toctree" nodes in text writer, instead of raising.
|
||||||
* #1364: Fix 'make gettext' fails when the '.. todolist::' directive is present.
|
* #1364: Fix 'make gettext' fails when the '.. todolist::' directive is present.
|
||||||
* #1367: Fix a change of PR#96 that break sphinx.util.docfields.Field.make_field
|
* #1367: Fix a change of PR#96 that break sphinx.util.docfields.Field.make_field
|
||||||
interface/behavior for `item` argument usage.
|
interface/behavior for ``item`` argument usage.
|
||||||
|
|
||||||
Documentation
|
Documentation
|
||||||
-------------
|
-------------
|
||||||
@ -325,7 +345,7 @@ Bugs fixed
|
|||||||
This was caused by a change for #1138.
|
This was caused by a change for #1138.
|
||||||
* #1340: Can't search alphabetical words on the HTML quick search generated
|
* #1340: Can't search alphabetical words on the HTML quick search generated
|
||||||
with language='ja'.
|
with language='ja'.
|
||||||
* #1319: Do not crash if the :confval:`html_logo` file does not exist.
|
* #1319: Do not crash if the `html_logo` file does not exist.
|
||||||
* #603: Do not use the HTML-ized title for building the search index (that
|
* #603: Do not use the HTML-ized title for building the search index (that
|
||||||
resulted in "literal" being found on every page with a literal in the
|
resulted in "literal" being found on every page with a literal in the
|
||||||
title).
|
title).
|
||||||
@ -342,7 +362,7 @@ Bugs fixed
|
|||||||
if they contain uppercase letters.
|
if they contain uppercase letters.
|
||||||
* #923: Take the entire LaTeX document into account when caching
|
* #923: Take the entire LaTeX document into account when caching
|
||||||
pngmath-generated images. This rebuilds them correctly when
|
pngmath-generated images. This rebuilds them correctly when
|
||||||
:confval:`pngmath_latex_preamble` changes.
|
`pngmath_latex_preamble` changes.
|
||||||
* #901: Emit a warning when using docutils' new "math" markup without a Sphinx
|
* #901: Emit a warning when using docutils' new "math" markup without a Sphinx
|
||||||
math extension active.
|
math extension active.
|
||||||
* #845: In code blocks, when the selected lexer fails, display line numbers
|
* #845: In code blocks, when the selected lexer fails, display line numbers
|
||||||
@ -359,14 +379,14 @@ Bugs fixed
|
|||||||
* #1155: Fix autodocumenting C-defined methods as attributes in Python 3.
|
* #1155: Fix autodocumenting C-defined methods as attributes in Python 3.
|
||||||
* #1233: Allow finding both Python classes and exceptions with the "class" and
|
* #1233: Allow finding both Python classes and exceptions with the "class" and
|
||||||
"exc" roles in intersphinx.
|
"exc" roles in intersphinx.
|
||||||
* #1198: Allow "image" for the "figwidth" option of the :rst:dir:`figure`
|
* #1198: Allow "image" for the "figwidth" option of the :dudir:`figure`
|
||||||
directive as documented by docutils.
|
directive as documented by docutils.
|
||||||
* #1152: Fix pycode parsing errors of Python 3 code by including two grammar
|
* #1152: Fix pycode parsing errors of Python 3 code by including two grammar
|
||||||
versions for Python 2 and 3, and loading the appropriate version for the
|
versions for Python 2 and 3, and loading the appropriate version for the
|
||||||
running Python version.
|
running Python version.
|
||||||
* #1017: Be helpful and tell the user when the argument to :rst:dir:`option`
|
* #1017: Be helpful and tell the user when the argument to :rst:dir:`option`
|
||||||
does not match the required format.
|
does not match the required format.
|
||||||
* #1345: Fix two bugs with :confval:`nitpick_ignore`; now you don't have to
|
* #1345: Fix two bugs with `nitpick_ignore`; now you don't have to
|
||||||
remove the store environment for changes to have effect.
|
remove the store environment for changes to have effect.
|
||||||
* #1072: In the JS search, fix issues searching for upper-cased words by
|
* #1072: In the JS search, fix issues searching for upper-cased words by
|
||||||
lowercasing words before stemming.
|
lowercasing words before stemming.
|
||||||
@ -389,8 +409,8 @@ Bugs fixed
|
|||||||
* #1300: Fix references not working in translated documents in some instances.
|
* #1300: Fix references not working in translated documents in some instances.
|
||||||
* #1283: Fix a bug in the detection of changed files that would try to access
|
* #1283: Fix a bug in the detection of changed files that would try to access
|
||||||
doctrees of deleted documents.
|
doctrees of deleted documents.
|
||||||
* #1330: Fix :confval:`exclude_patterns` behavior with subdirectories in the
|
* #1330: Fix `exclude_patterns` behavior with subdirectories in the
|
||||||
:confval:`html_static_path`.
|
`html_static_path`.
|
||||||
* #1323: Fix emitting empty ``<ul>`` tags in the HTML writer, which is not
|
* #1323: Fix emitting empty ``<ul>`` tags in the HTML writer, which is not
|
||||||
valid HTML.
|
valid HTML.
|
||||||
* #1147: Don't emit a sidebar search box in the "singlehtml" builder.
|
* #1147: Don't emit a sidebar search box in the "singlehtml" builder.
|
||||||
@ -422,7 +442,7 @@ Bugs fixed
|
|||||||
* Restore ``versionmodified`` CSS class for versionadded/changed and deprecated
|
* Restore ``versionmodified`` CSS class for versionadded/changed and deprecated
|
||||||
directives.
|
directives.
|
||||||
|
|
||||||
* PR#181: Fix `html_theme_path=['.']` is a trigger of rebuild all documents
|
* PR#181: Fix ``html_theme_path = ['.']`` is a trigger of rebuild all documents
|
||||||
always (This change keeps the current "theme changes cause a rebuild"
|
always (This change keeps the current "theme changes cause a rebuild"
|
||||||
feature).
|
feature).
|
||||||
|
|
||||||
@ -489,7 +509,7 @@ Features added
|
|||||||
* Support docutils.conf 'writers' and 'html4css1 writer' section in the HTML
|
* Support docutils.conf 'writers' and 'html4css1 writer' section in the HTML
|
||||||
writer. The latex, manpage and texinfo writers also support their respective
|
writer. The latex, manpage and texinfo writers also support their respective
|
||||||
'writers' sections.
|
'writers' sections.
|
||||||
* The new :confval:`html_extra_path` config value allows to specify directories
|
* The new `html_extra_path` config value allows to specify directories
|
||||||
with files that should be copied directly to the HTML output directory.
|
with files that should be copied directly to the HTML output directory.
|
||||||
* Autodoc directives for module data and attributes now support an
|
* Autodoc directives for module data and attributes now support an
|
||||||
``annotation`` option, so that the default display of the data/attribute
|
``annotation`` option, so that the default display of the data/attribute
|
||||||
@ -560,10 +580,10 @@ Incompatible changes
|
|||||||
|
|
||||||
* Removed ``sphinx.util.compat.directive_dwim()`` and
|
* Removed ``sphinx.util.compat.directive_dwim()`` and
|
||||||
``sphinx.roles.xfileref_role()`` which were deprecated since version 1.0.
|
``sphinx.roles.xfileref_role()`` which were deprecated since version 1.0.
|
||||||
* PR#122: the files given in :confval:`latex_additional_files` now override TeX
|
* PR#122: the files given in `latex_additional_files` now override TeX
|
||||||
files included by Sphinx, such as ``sphinx.sty``.
|
files included by Sphinx, such as ``sphinx.sty``.
|
||||||
* PR#124: the node generated by :rst:dir:`versionadded`,
|
* PR#124: the node generated by `versionadded`,
|
||||||
:rst:dir:`versionchanged` and :rst:dir:`deprecated` directives now includes
|
`versionchanged` and `deprecated` directives now includes
|
||||||
all added markup (such as "New in version X") as child nodes, and no
|
all added markup (such as "New in version X") as child nodes, and no
|
||||||
additional text must be generated by writers.
|
additional text must be generated by writers.
|
||||||
* PR#99: the :rst:dir:`seealso` directive now generates admonition nodes instead
|
* PR#99: the :rst:dir:`seealso` directive now generates admonition nodes instead
|
||||||
@ -617,7 +637,7 @@ Features added
|
|||||||
asterisks ("*").
|
asterisks ("*").
|
||||||
- The default value for the ``paragraphindent`` has been changed from 2 to 0
|
- The default value for the ``paragraphindent`` has been changed from 2 to 0
|
||||||
meaning that paragraphs are no longer indented by default.
|
meaning that paragraphs are no longer indented by default.
|
||||||
- #1110: A new configuration value :confval:`texinfo_no_detailmenu` has been
|
- #1110: A new configuration value `texinfo_no_detailmenu` has been
|
||||||
added for controlling whether a ``@detailmenu`` is added in the "Top"
|
added for controlling whether a ``@detailmenu`` is added in the "Top"
|
||||||
node's menu.
|
node's menu.
|
||||||
- Detailed menus are no longer created except for the "Top" node.
|
- Detailed menus are no longer created except for the "Top" node.
|
||||||
@ -626,16 +646,16 @@ Features added
|
|||||||
|
|
||||||
* LaTeX builder:
|
* LaTeX builder:
|
||||||
|
|
||||||
- PR#115: Add ``'transition'`` item in :confval:`latex_elements` for
|
- PR#115: Add ``'transition'`` item in `latex_elements` for
|
||||||
customizing how transitions are displayed. Thanks to Jeff Klukas.
|
customizing how transitions are displayed. Thanks to Jeff Klukas.
|
||||||
- PR#114: The LaTeX writer now includes the "cmap" package by default. The
|
- PR#114: The LaTeX writer now includes the "cmap" package by default. The
|
||||||
``'cmappkg'`` item in :confval:`latex_elements` can be used to control this.
|
``'cmappkg'`` item in `latex_elements` can be used to control this.
|
||||||
Thanks to Dmitry Shachnev.
|
Thanks to Dmitry Shachnev.
|
||||||
- The ``'fontpkg'`` item in :confval:`latex_elements` now defaults to ``''``
|
- The ``'fontpkg'`` item in `latex_elements` now defaults to ``''``
|
||||||
when the :confval:`language` uses the Cyrillic script. Suggested by Dmitry
|
when the `language` uses the Cyrillic script. Suggested by Dmitry
|
||||||
Shachnev.
|
Shachnev.
|
||||||
- The :confval:`latex_documents`, :confval:`texinfo_documents`, and
|
- The `latex_documents`, `texinfo_documents`, and
|
||||||
:confval:`man_pages` configuration values will be set to default values based
|
`man_pages` configuration values will be set to default values based
|
||||||
on the :confval:`master_doc` if not explicitly set in :file:`conf.py`.
|
on the :confval:`master_doc` if not explicitly set in :file:`conf.py`.
|
||||||
Previously, if these values were not set, no output would be generated by
|
Previously, if these values were not set, no output would be generated by
|
||||||
their respective builders.
|
their respective builders.
|
||||||
@ -653,13 +673,13 @@ Features added
|
|||||||
- Added the Docutils-native XML and pseudo-XML builders. See
|
- Added the Docutils-native XML and pseudo-XML builders. See
|
||||||
:class:`XMLBuilder` and :class:`PseudoXMLBuilder`.
|
:class:`XMLBuilder` and :class:`PseudoXMLBuilder`.
|
||||||
- PR#45: The linkcheck builder now checks ``#anchor``\ s for existence.
|
- PR#45: The linkcheck builder now checks ``#anchor``\ s for existence.
|
||||||
- PR#123, #1106: Add :confval:`epub_use_index` configuration value. If
|
- PR#123, #1106: Add `epub_use_index` configuration value. If
|
||||||
provided, it will be used instead of :confval:`html_use_index` for epub
|
provided, it will be used instead of `html_use_index` for epub
|
||||||
builder.
|
builder.
|
||||||
- PR#126: Add :confval:`epub_tocscope` configuration value. The setting
|
- PR#126: Add `epub_tocscope` configuration value. The setting
|
||||||
controls the generation of the epub toc. The user can now also include
|
controls the generation of the epub toc. The user can now also include
|
||||||
hidden toc entries.
|
hidden toc entries.
|
||||||
- PR#112: Add :confval:`epub_show_urls` configuration value.
|
- PR#112: Add `epub_show_urls` configuration value.
|
||||||
|
|
||||||
* Extensions:
|
* Extensions:
|
||||||
|
|
||||||
@ -727,7 +747,7 @@ Bugs fixed
|
|||||||
* #1127: Fix traceback when autodoc tries to tokenize a non-Python file.
|
* #1127: Fix traceback when autodoc tries to tokenize a non-Python file.
|
||||||
* #1126: Fix double-hyphen to en-dash conversion in wrong places such as
|
* #1126: Fix double-hyphen to en-dash conversion in wrong places such as
|
||||||
command-line option names in LaTeX.
|
command-line option names in LaTeX.
|
||||||
* #1123: Allow whitespaces in filenames given to :rst:dir:`literalinclude`.
|
* #1123: Allow whitespaces in filenames given to `literalinclude`.
|
||||||
* #1120: Added improvements about i18n for themes "basic", "haiku" and
|
* #1120: Added improvements about i18n for themes "basic", "haiku" and
|
||||||
"scrolls" that Sphinx built-in. Thanks to Leonardo J. Caballero G.
|
"scrolls" that Sphinx built-in. Thanks to Leonardo J. Caballero G.
|
||||||
* #1118: Updated Spanish translation. Thanks to Leonardo J. Caballero G.
|
* #1118: Updated Spanish translation. Thanks to Leonardo J. Caballero G.
|
||||||
@ -735,7 +755,7 @@ Bugs fixed
|
|||||||
* #1112: Avoid duplicate download files when referenced from documents in
|
* #1112: Avoid duplicate download files when referenced from documents in
|
||||||
different ways (absolute/relative).
|
different ways (absolute/relative).
|
||||||
* #1111: Fix failure to find uppercase words in search when
|
* #1111: Fix failure to find uppercase words in search when
|
||||||
:confval:`html_search_language` is 'ja'. Thanks to Tomo Saito.
|
`html_search_language` is 'ja'. Thanks to Tomo Saito.
|
||||||
* #1108: The text writer now correctly numbers enumerated lists with
|
* #1108: The text writer now correctly numbers enumerated lists with
|
||||||
non-default start values (based on patch by Ewan Edwards).
|
non-default start values (based on patch by Ewan Edwards).
|
||||||
* #1102: Support multi-context "with" statements in autodoc.
|
* #1102: Support multi-context "with" statements in autodoc.
|
||||||
@ -800,7 +820,7 @@ Release 1.1.3 (Mar 10, 2012)
|
|||||||
* #860: Do not crash when encountering invalid doctest examples, just
|
* #860: Do not crash when encountering invalid doctest examples, just
|
||||||
emit a warning.
|
emit a warning.
|
||||||
|
|
||||||
* #864: Fix crash with some settings of :confval:`modindex_common_prefix`.
|
* #864: Fix crash with some settings of `modindex_common_prefix`.
|
||||||
|
|
||||||
* #862: Fix handling of ``-D`` and ``-A`` options on Python 3.
|
* #862: Fix handling of ``-D`` and ``-A`` options on Python 3.
|
||||||
|
|
||||||
@ -864,7 +884,7 @@ Release 1.1 (Oct 9, 2011)
|
|||||||
Incompatible changes
|
Incompatible changes
|
||||||
--------------------
|
--------------------
|
||||||
|
|
||||||
* The :rst:dir:`py:module` directive doesn't output its ``platform`` option
|
* The `py:module` directive doesn't output its ``platform`` option
|
||||||
value anymore. (It was the only thing that the directive did output, and
|
value anymore. (It was the only thing that the directive did output, and
|
||||||
therefore quite inconsistent.)
|
therefore quite inconsistent.)
|
||||||
|
|
||||||
@ -900,7 +920,7 @@ Features added
|
|||||||
:rst:dir:`toctree`\'s ``numbered`` option.
|
:rst:dir:`toctree`\'s ``numbered`` option.
|
||||||
- #586: Implemented improved :rst:dir:`glossary` markup which allows
|
- #586: Implemented improved :rst:dir:`glossary` markup which allows
|
||||||
multiple terms per definition.
|
multiple terms per definition.
|
||||||
- #478: Added :rst:dir:`py:decorator` directive to describe decorators.
|
- #478: Added `py:decorator` directive to describe decorators.
|
||||||
- C++ domain now supports array definitions.
|
- C++ domain now supports array definitions.
|
||||||
- C++ domain now supports doc fields (``:param x:`` inside directives).
|
- C++ domain now supports doc fields (``:param x:`` inside directives).
|
||||||
- Section headings in :rst:dir:`only` directives are now correctly
|
- Section headings in :rst:dir:`only` directives are now correctly
|
||||||
@ -911,7 +931,7 @@ Features added
|
|||||||
* HTML builder:
|
* HTML builder:
|
||||||
|
|
||||||
- Added ``pyramid`` theme.
|
- Added ``pyramid`` theme.
|
||||||
- #559: :confval:`html_add_permalinks` is now a string giving the
|
- #559: `html_add_permalinks` is now a string giving the
|
||||||
text to display in permalinks.
|
text to display in permalinks.
|
||||||
- #259: HTML table rows now have even/odd CSS classes to enable
|
- #259: HTML table rows now have even/odd CSS classes to enable
|
||||||
"Zebra styling".
|
"Zebra styling".
|
||||||
@ -919,26 +939,26 @@ Features added
|
|||||||
|
|
||||||
* Other builders:
|
* Other builders:
|
||||||
|
|
||||||
- #516: Added new value of the :confval:`latex_show_urls` option to
|
- #516: Added new value of the `latex_show_urls` option to
|
||||||
show the URLs in footnotes.
|
show the URLs in footnotes.
|
||||||
- #209: Added :confval:`text_newlines` and :confval:`text_sectionchars`
|
- #209: Added `text_newlines` and `text_sectionchars`
|
||||||
config values.
|
config values.
|
||||||
- Added :confval:`man_show_urls` config value.
|
- Added `man_show_urls` config value.
|
||||||
- #472: linkcheck builder: Check links in parallel, use HTTP HEAD
|
- #472: linkcheck builder: Check links in parallel, use HTTP HEAD
|
||||||
requests and allow configuring the timeout. New config values:
|
requests and allow configuring the timeout. New config values:
|
||||||
:confval:`linkcheck_timeout` and :confval:`linkcheck_workers`.
|
`linkcheck_timeout` and `linkcheck_workers`.
|
||||||
- #521: Added :confval:`linkcheck_ignore` config value.
|
- #521: Added `linkcheck_ignore` config value.
|
||||||
- #28: Support row/colspans in tables in the LaTeX builder.
|
- #28: Support row/colspans in tables in the LaTeX builder.
|
||||||
|
|
||||||
* Configuration and extensibility:
|
* Configuration and extensibility:
|
||||||
|
|
||||||
- #537: Added :confval:`nitpick_ignore`.
|
- #537: Added `nitpick_ignore`.
|
||||||
- #306: Added :event:`env-get-outdated` event.
|
- #306: Added :event:`env-get-outdated` event.
|
||||||
- :meth:`.Application.add_stylesheet` now accepts full URIs.
|
- :meth:`.Application.add_stylesheet` now accepts full URIs.
|
||||||
|
|
||||||
* Autodoc:
|
* Autodoc:
|
||||||
|
|
||||||
- #564: Add :confval:`autodoc_docstring_signature`. When enabled (the
|
- #564: Add `autodoc_docstring_signature`. When enabled (the
|
||||||
default), autodoc retrieves the signature from the first line of the
|
default), autodoc retrieves the signature from the first line of the
|
||||||
docstring, if it is found there.
|
docstring, if it is found there.
|
||||||
- #176: Provide ``private-members`` option for autodoc directives.
|
- #176: Provide ``private-members`` option for autodoc directives.
|
||||||
@ -956,12 +976,12 @@ Features added
|
|||||||
- Added ``inline`` option to graphviz directives, and fixed the
|
- Added ``inline`` option to graphviz directives, and fixed the
|
||||||
default (block-style) in LaTeX output.
|
default (block-style) in LaTeX output.
|
||||||
- #590: Added ``caption`` option to graphviz directives.
|
- #590: Added ``caption`` option to graphviz directives.
|
||||||
- #553: Added :rst:dir:`testcleanup` blocks in the doctest extension.
|
- #553: Added `testcleanup` blocks in the doctest extension.
|
||||||
- #594: :confval:`trim_doctest_flags` now also removes ``<BLANKLINE>``
|
- #594: `trim_doctest_flags` now also removes ``<BLANKLINE>``
|
||||||
indicators.
|
indicators.
|
||||||
- #367: Added automatic exclusion of hidden members in inheritance
|
- #367: Added automatic exclusion of hidden members in inheritance
|
||||||
diagrams, and an option to selectively enable it.
|
diagrams, and an option to selectively enable it.
|
||||||
- Added :confval:`pngmath_add_tooltips`.
|
- Added `pngmath_add_tooltips`.
|
||||||
- The math extension displaymath directives now support ``name`` in
|
- The math extension displaymath directives now support ``name`` in
|
||||||
addition to ``label`` for giving the equation label, for compatibility
|
addition to ``label`` for giving the equation label, for compatibility
|
||||||
with Docutils.
|
with Docutils.
|
||||||
@ -1034,7 +1054,7 @@ Release 1.0.8 (Sep 23, 2011)
|
|||||||
* #669: Respect the ``noindex`` flag option in py:module directives.
|
* #669: Respect the ``noindex`` flag option in py:module directives.
|
||||||
|
|
||||||
* #675: Fix IndexErrors when including nonexisting lines with
|
* #675: Fix IndexErrors when including nonexisting lines with
|
||||||
:rst:dir:`literalinclude`.
|
`literalinclude`.
|
||||||
|
|
||||||
* #676: Respect custom function/method parameter separator strings.
|
* #676: Respect custom function/method parameter separator strings.
|
||||||
|
|
||||||
@ -1117,7 +1137,7 @@ Release 1.0.6 (Jan 04, 2011)
|
|||||||
* #570: Try decoding ``-D`` and ``-A`` command-line arguments with
|
* #570: Try decoding ``-D`` and ``-A`` command-line arguments with
|
||||||
the locale's preferred encoding.
|
the locale's preferred encoding.
|
||||||
|
|
||||||
* #528: Observe :confval:`locale_dirs` when looking for the JS
|
* #528: Observe `locale_dirs` when looking for the JS
|
||||||
translations file.
|
translations file.
|
||||||
|
|
||||||
* #574: Add special code for better support of Japanese documents
|
* #574: Add special code for better support of Japanese documents
|
||||||
@ -1290,51 +1310,51 @@ Features added
|
|||||||
|
|
||||||
- Added a "nitpicky" mode that emits warnings for all missing
|
- Added a "nitpicky" mode that emits warnings for all missing
|
||||||
references. It is activated by the :option:`-n` command-line switch
|
references. It is activated by the :option:`-n` command-line switch
|
||||||
or the :confval:`nitpicky` config value.
|
or the `nitpicky` config value.
|
||||||
- Added ``latexpdf`` target in quickstart Makefile.
|
- Added ``latexpdf`` target in quickstart Makefile.
|
||||||
|
|
||||||
* Markup:
|
* Markup:
|
||||||
|
|
||||||
- The :rst:role:`menuselection` and :rst:role:`guilabel` roles now
|
- The `menuselection` and `guilabel` roles now
|
||||||
support ampersand accelerators.
|
support ampersand accelerators.
|
||||||
- New more compact doc field syntax is now recognized: ``:param type
|
- New more compact doc field syntax is now recognized: ``:param type
|
||||||
name: description``.
|
name: description``.
|
||||||
- Added ``tab-width`` option to :rst:dir:`literalinclude` directive.
|
- Added ``tab-width`` option to `literalinclude` directive.
|
||||||
- Added ``titlesonly`` option to :rst:dir:`toctree` directive.
|
- Added ``titlesonly`` option to :rst:dir:`toctree` directive.
|
||||||
- Added the ``prepend`` and ``append`` options to the
|
- Added the ``prepend`` and ``append`` options to the
|
||||||
:rst:dir:`literalinclude` directive.
|
`literalinclude` directive.
|
||||||
- #284: All docinfo metadata is now put into the document metadata, not
|
- #284: All docinfo metadata is now put into the document metadata, not
|
||||||
just the author.
|
just the author.
|
||||||
- The :rst:role:`ref` role can now also reference tables by caption.
|
- The `ref` role can now also reference tables by caption.
|
||||||
- The :rst:dir:`include` directive now supports absolute paths, which
|
- The :dudir:`include` directive now supports absolute paths, which
|
||||||
are interpreted as relative to the source directory.
|
are interpreted as relative to the source directory.
|
||||||
- In the Python domain, references like ``:func:`.name``` now look for
|
- In the Python domain, references like ``:func:`.name``` now look for
|
||||||
matching names with any prefix if no direct match is found.
|
matching names with any prefix if no direct match is found.
|
||||||
|
|
||||||
* Configuration:
|
* Configuration:
|
||||||
|
|
||||||
- Added :confval:`rst_prolog` config value.
|
- Added `rst_prolog` config value.
|
||||||
- Added :confval:`html_secnumber_suffix` config value to control
|
- Added `html_secnumber_suffix` config value to control
|
||||||
section numbering format.
|
section numbering format.
|
||||||
- Added :confval:`html_compact_lists` config value to control
|
- Added `html_compact_lists` config value to control
|
||||||
docutils' compact lists feature.
|
docutils' compact lists feature.
|
||||||
- The :confval:`html_sidebars` config value can now contain patterns
|
- The `html_sidebars` config value can now contain patterns
|
||||||
as keys, and the values can be lists that explicitly select which
|
as keys, and the values can be lists that explicitly select which
|
||||||
sidebar templates should be rendered. That means that the builtin
|
sidebar templates should be rendered. That means that the builtin
|
||||||
sidebar contents can be included only selectively.
|
sidebar contents can be included only selectively.
|
||||||
- :confval:`html_static_path` can now contain single file entries.
|
- `html_static_path` can now contain single file entries.
|
||||||
- The new universal config value :confval:`exclude_patterns` makes the
|
- The new universal config value `exclude_patterns` makes the
|
||||||
old :confval:`unused_docs`, :confval:`exclude_trees` and
|
old ``unused_docs``, ``exclude_trees`` and
|
||||||
:confval:`exclude_dirnames` obsolete.
|
``exclude_dirnames`` obsolete.
|
||||||
- Added :confval:`html_output_encoding` config value.
|
- Added `html_output_encoding` config value.
|
||||||
- Added the :confval:`latex_docclass` config value and made the
|
- Added the `latex_docclass` config value and made the
|
||||||
"twoside" documentclass option overridable by "oneside".
|
"twoside" documentclass option overridable by "oneside".
|
||||||
- Added the :confval:`trim_doctest_flags` config value, which is true
|
- Added the `trim_doctest_flags` config value, which is true
|
||||||
by default.
|
by default.
|
||||||
- Added :confval:`html_show_copyright` config value.
|
- Added `html_show_copyright` config value.
|
||||||
- Added :confval:`latex_show_pagerefs` and :confval:`latex_show_urls`
|
- Added `latex_show_pagerefs` and `latex_show_urls`
|
||||||
config values.
|
config values.
|
||||||
- The behavior of :confval:`html_file_suffix` changed slightly: the
|
- The behavior of `html_file_suffix` changed slightly: the
|
||||||
empty string now means "no suffix" instead of "default suffix", use
|
empty string now means "no suffix" instead of "default suffix", use
|
||||||
``None`` for "default suffix".
|
``None`` for "default suffix".
|
||||||
|
|
||||||
@ -1376,7 +1396,7 @@ Features added
|
|||||||
* Extension API:
|
* Extension API:
|
||||||
|
|
||||||
- Added :event:`html-collect-pages`.
|
- Added :event:`html-collect-pages`.
|
||||||
- Added :confval:`needs_sphinx` config value and
|
- Added `needs_sphinx` config value and
|
||||||
:meth:`~sphinx.application.Sphinx.require_sphinx` application API
|
:meth:`~sphinx.application.Sphinx.require_sphinx` application API
|
||||||
method.
|
method.
|
||||||
- #200: Added :meth:`~sphinx.application.Sphinx.add_stylesheet`
|
- #200: Added :meth:`~sphinx.application.Sphinx.add_stylesheet`
|
||||||
@ -1388,7 +1408,7 @@ Features added
|
|||||||
- Added the :mod:`~sphinx.ext.extlinks` extension.
|
- Added the :mod:`~sphinx.ext.extlinks` extension.
|
||||||
- Added support for source ordering of members in autodoc, with
|
- Added support for source ordering of members in autodoc, with
|
||||||
``autodoc_member_order = 'bysource'``.
|
``autodoc_member_order = 'bysource'``.
|
||||||
- Added :confval:`autodoc_default_flags` config value, which can be
|
- Added `autodoc_default_flags` config value, which can be
|
||||||
used to select default flags for all autodoc directives.
|
used to select default flags for all autodoc directives.
|
||||||
- Added a way for intersphinx to refer to named labels in other
|
- Added a way for intersphinx to refer to named labels in other
|
||||||
projects, and to specify the project you want to link to.
|
projects, and to specify the project you want to link to.
|
||||||
@ -1398,7 +1418,7 @@ Features added
|
|||||||
extension, thanks to Pauli Virtanen.
|
extension, thanks to Pauli Virtanen.
|
||||||
- #309: The :mod:`~sphinx.ext.graphviz` extension can now output SVG
|
- #309: The :mod:`~sphinx.ext.graphviz` extension can now output SVG
|
||||||
instead of PNG images, controlled by the
|
instead of PNG images, controlled by the
|
||||||
:confval:`graphviz_output_format` config value.
|
`graphviz_output_format` config value.
|
||||||
- Added ``alt`` option to :rst:dir:`graphviz` extension directives.
|
- Added ``alt`` option to :rst:dir:`graphviz` extension directives.
|
||||||
- Added ``exclude`` argument to :func:`.autodoc.between`.
|
- Added ``exclude`` argument to :func:`.autodoc.between`.
|
||||||
|
|
||||||
@ -1423,5 +1443,5 @@ Previous versions
|
|||||||
=================
|
=================
|
||||||
|
|
||||||
The changelog for versions before 1.0 can be found in the file ``CHANGES.old``
|
The changelog for versions before 1.0 can be found in the file ``CHANGES.old``
|
||||||
in the source distribution or `at BitBucket
|
in the source distribution or `at Github
|
||||||
<https://bitbucket.org/birkenfeld/sphinx/raw/tip/CHANGES.old>`__.
|
<https://github.com/sphinx-doc/sphinx/raw/master/CHANGES.old>`__.
|
||||||
|
8
EXAMPLES
8
EXAMPLES
@ -44,6 +44,7 @@ Documentation using the default theme
|
|||||||
* Paste: http://pythonpaste.org/script/
|
* Paste: http://pythonpaste.org/script/
|
||||||
* Paver: http://paver.github.io/paver/
|
* Paver: http://paver.github.io/paver/
|
||||||
* Pioneers and Prominent Men of Utah: http://pioneers.rstebbing.com/
|
* Pioneers and Prominent Men of Utah: http://pioneers.rstebbing.com/
|
||||||
|
* PyCantonese: http://pycantonese.github.io/
|
||||||
* Pyccuracy: https://github.com/heynemann/pyccuracy/wiki/
|
* Pyccuracy: https://github.com/heynemann/pyccuracy/wiki/
|
||||||
* PyCuda: http://documen.tician.de/pycuda/
|
* PyCuda: http://documen.tician.de/pycuda/
|
||||||
* Pyevolve: http://pyevolve.sourceforge.net/
|
* Pyevolve: http://pyevolve.sourceforge.net/
|
||||||
@ -51,6 +52,7 @@ Documentation using the default theme
|
|||||||
* PyMQI: http://pythonhosted.org/pymqi/
|
* PyMQI: http://pythonhosted.org/pymqi/
|
||||||
* PyPubSub: http://pubsub.sourceforge.net/
|
* PyPubSub: http://pubsub.sourceforge.net/
|
||||||
* pyrticle: http://documen.tician.de/pyrticle/
|
* pyrticle: http://documen.tician.de/pyrticle/
|
||||||
|
* pySPACE: http://pyspace.github.io/pyspace/
|
||||||
* Python: http://docs.python.org/
|
* Python: http://docs.python.org/
|
||||||
* python-apt: http://apt.alioth.debian.org/python-apt-doc/
|
* python-apt: http://apt.alioth.debian.org/python-apt-doc/
|
||||||
* PyUblas: http://documen.tician.de/pyublas/
|
* PyUblas: http://documen.tician.de/pyublas/
|
||||||
@ -73,6 +75,7 @@ Documentation using a customized version of the default theme
|
|||||||
* Chaco: http://docs.enthought.com/chaco/
|
* Chaco: http://docs.enthought.com/chaco/
|
||||||
* Chef: http://docs.opscode.com/
|
* Chef: http://docs.opscode.com/
|
||||||
* Djagios: http://djagios.org/
|
* Djagios: http://djagios.org/
|
||||||
|
* EZ-Draw: http://pageperso.lif.univ-mrs.fr/~edouard.thiel/ez-draw/doc/en/html/ez-manual.html
|
||||||
* GetFEM++: http://home.gna.org/getfem/
|
* GetFEM++: http://home.gna.org/getfem/
|
||||||
* Google or-tools:
|
* Google or-tools:
|
||||||
https://or-tools.googlecode.com/svn/trunk/documentation/user_manual/index.html
|
https://or-tools.googlecode.com/svn/trunk/documentation/user_manual/index.html
|
||||||
@ -181,6 +184,8 @@ Documentation using a custom theme/integrated in a site
|
|||||||
* Pylons: http://docs.pylonsproject.org/projects/pylons-webframework/en/latest/
|
* Pylons: http://docs.pylonsproject.org/projects/pylons-webframework/en/latest/
|
||||||
* PyMOTW: http://pymotw.com/2/
|
* PyMOTW: http://pymotw.com/2/
|
||||||
* pypol: http://pypol.altervista.org/ (celery)
|
* pypol: http://pypol.altervista.org/ (celery)
|
||||||
|
* python-aspectlib: http://python-aspectlib.readthedocs.org/en/latest/
|
||||||
|
(`sphinx-py3doc-enhanced-theme`_)
|
||||||
* QGIS: http://qgis.org/en/docs/index.html
|
* QGIS: http://qgis.org/en/docs/index.html
|
||||||
* qooxdoo: http://manual.qooxdoo.org/current/
|
* qooxdoo: http://manual.qooxdoo.org/current/
|
||||||
* Roundup: http://www.roundup-tracker.org/
|
* Roundup: http://www.roundup-tracker.org/
|
||||||
@ -196,6 +201,8 @@ Documentation using a custom theme/integrated in a site
|
|||||||
* Werkzeug: http://werkzeug.pocoo.org/docs/
|
* Werkzeug: http://werkzeug.pocoo.org/docs/
|
||||||
* WFront: http://discorporate.us/projects/WFront/
|
* WFront: http://discorporate.us/projects/WFront/
|
||||||
|
|
||||||
|
.. _sphinx-py3doc-enhanced-theme: https://pypi.python.org/pypi/sphinx_py3doc_enhanced_theme
|
||||||
|
|
||||||
|
|
||||||
Homepages and other non-documentation sites
|
Homepages and other non-documentation sites
|
||||||
-------------------------------------------
|
-------------------------------------------
|
||||||
@ -235,6 +242,7 @@ Books produced using Sphinx
|
|||||||
http://www.oreilly.co.jp/books/9784873116488/
|
http://www.oreilly.co.jp/books/9784873116488/
|
||||||
* "LassoGuide":
|
* "LassoGuide":
|
||||||
http://www.lassosoft.com/Lasso-Documentation
|
http://www.lassosoft.com/Lasso-Documentation
|
||||||
|
* "Software-Dokumentation mit Sphinx": http://www.amazon.de/dp/1497448689/
|
||||||
|
|
||||||
|
|
||||||
Thesis using Sphinx
|
Thesis using Sphinx
|
||||||
|
2
LICENSE
2
LICENSE
@ -1,7 +1,7 @@
|
|||||||
License for Sphinx
|
License for Sphinx
|
||||||
==================
|
==================
|
||||||
|
|
||||||
Copyright (c) 2007-2013 by the Sphinx team (see AUTHORS file).
|
Copyright (c) 2007-2015 by the Sphinx team (see AUTHORS file).
|
||||||
All rights reserved.
|
All rights reserved.
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
21
Makefile
21
Makefile
@ -7,7 +7,22 @@ DONT_CHECK = -i build -i dist -i sphinx/style/jquery.js \
|
|||||||
-i sphinx/pycode/pgen2 -i sphinx/util/smartypants.py \
|
-i sphinx/pycode/pgen2 -i sphinx/util/smartypants.py \
|
||||||
-i .ropeproject -i doc/_build -i tests/path.py \
|
-i .ropeproject -i doc/_build -i tests/path.py \
|
||||||
-i tests/coverage.py -i env -i utils/convert.py \
|
-i tests/coverage.py -i env -i utils/convert.py \
|
||||||
-i sphinx/search/ja.py \
|
-i sphinx/search/da.py \
|
||||||
|
-i sphinx/search/de.py \
|
||||||
|
-i sphinx/search/en.py \
|
||||||
|
-i sphinx/search/es.py \
|
||||||
|
-i sphinx/search/fi.py \
|
||||||
|
-i sphinx/search/fr.py \
|
||||||
|
-i sphinx/search/hu.py \
|
||||||
|
-i sphinx/search/it.py \
|
||||||
|
-i sphinx/search/ja.py \
|
||||||
|
-i sphinx/search/nl.py \
|
||||||
|
-i sphinx/search/no.py \
|
||||||
|
-i sphinx/search/pt.py \
|
||||||
|
-i sphinx/search/ro.py \
|
||||||
|
-i sphinx/search/ru.py \
|
||||||
|
-i sphinx/search/sv.py \
|
||||||
|
-i sphinx/search/tr.py \
|
||||||
-i utils/reindent3.py -i utils/check_sources3.py -i .tox
|
-i utils/reindent3.py -i utils/check_sources3.py -i .tox
|
||||||
|
|
||||||
all: clean-pyc clean-backupfiles check test
|
all: clean-pyc clean-backupfiles check test
|
||||||
@ -48,10 +63,10 @@ reindent:
|
|||||||
@$(PYTHON) utils/reindent.py -r -n .
|
@$(PYTHON) utils/reindent.py -r -n .
|
||||||
endif
|
endif
|
||||||
|
|
||||||
test: build
|
test:
|
||||||
@cd tests; $(PYTHON) run.py -d -m '^[tT]est' $(TEST)
|
@cd tests; $(PYTHON) run.py -d -m '^[tT]est' $(TEST)
|
||||||
|
|
||||||
covertest: build
|
covertest:
|
||||||
@cd tests; $(PYTHON) run.py -d -m '^[tT]est' --with-coverage \
|
@cd tests; $(PYTHON) run.py -d -m '^[tT]est' --with-coverage \
|
||||||
--cover-package=sphinx $(TEST)
|
--cover-package=sphinx $(TEST)
|
||||||
|
|
||||||
|
21
README.rst
21
README.rst
@ -2,6 +2,9 @@
|
|||||||
README for Sphinx
|
README for Sphinx
|
||||||
=================
|
=================
|
||||||
|
|
||||||
|
This is the Sphinx documentation generator, see http://sphinx-doc.org/.
|
||||||
|
|
||||||
|
|
||||||
Installing
|
Installing
|
||||||
==========
|
==========
|
||||||
|
|
||||||
@ -17,7 +20,7 @@ Reading the docs
|
|||||||
After installing::
|
After installing::
|
||||||
|
|
||||||
cd doc
|
cd doc
|
||||||
sphinx-build . _build/html
|
make html
|
||||||
|
|
||||||
Then, direct your browser to ``_build/html/index.html``.
|
Then, direct your browser to ``_build/html/index.html``.
|
||||||
|
|
||||||
@ -35,21 +38,25 @@ If you want to use a different interpreter, e.g. ``python3``, use::
|
|||||||
|
|
||||||
PYTHON=python3 make test
|
PYTHON=python3 make test
|
||||||
|
|
||||||
|
Continuous testing runs on travis:
|
||||||
|
|
||||||
|
.. image:: https://travis-ci.org/sphinx-doc/sphinx.svg?branch=master
|
||||||
|
:target: https://travis-ci.org/sphinx-doc/sphinx
|
||||||
|
|
||||||
|
|
||||||
Contributing
|
Contributing
|
||||||
============
|
============
|
||||||
|
|
||||||
#. Check for open issues or open a fresh issue to start a discussion around a
|
#. Check for open issues or open a fresh issue to start a discussion around a
|
||||||
feature idea or a bug. There are Non Assigned issues:
|
feature idea or a bug.
|
||||||
https://bitbucket.org/birkenfeld/sphinx/issues?status=new&status=open&responsible=
|
|
||||||
#. If you feel uncomfortable or uncertain about an issue or your changes, feel
|
#. If you feel uncomfortable or uncertain about an issue or your changes, feel
|
||||||
free to email sphinx-dev@googlegroups.com.
|
free to email sphinx-dev@googlegroups.com.
|
||||||
#. Fork the repository on Bitbucket https://bitbucket.org/birkenfeld/sphinx
|
#. Fork the repository on GitHub https://github.com/sphinx-doc/sphinx
|
||||||
to start making your changes to the **default** branch for next major
|
to start making your changes to the **master** branch for next major
|
||||||
version, or **stable** branch for next minor version.
|
version, or **stable** branch for next minor version.
|
||||||
#. Write a test which shows that the bug was fixed or that the feature works
|
#. Write a test which shows that the bug was fixed or that the feature works
|
||||||
as expected.
|
as expected.
|
||||||
#. Send a pull request and bug the maintainer until it gets merged and
|
#. Send a pull request and bug the maintainer until it gets merged and
|
||||||
published. Make sure to add yourself to AUTHORS
|
published. Make sure to add yourself to AUTHORS
|
||||||
<https://bitbucket.org/birkenfeld/sphinx/src/tip/AUTHORS> and the change to
|
<https://github.com/sphinx-doc/sphinx/blob/master/AUTHORS> and the change to
|
||||||
CHANGES <https://bitbucket.org/birkenfeld/sphinx/src/tip/CHANGES>.
|
CHANGES <https://github.com/sphinx-doc/sphinx/blob/master/CHANGES>.
|
||||||
|
5
TODO
5
TODO
@ -1,5 +0,0 @@
|
|||||||
Sphinx TODO
|
|
||||||
===========
|
|
||||||
|
|
||||||
All todo items are now tracked as issues in the Sphinx issue tracker at
|
|
||||||
<http://www.bitbucket.org/birkenfeld/sphinx/issues/>.
|
|
3
doc/_templates/index.html
vendored
3
doc/_templates/index.html
vendored
@ -34,6 +34,9 @@
|
|||||||
<li>{%trans path=pathto('extensions')%}<b>Extensions:</b> automatic testing of code snippets, inclusion of
|
<li>{%trans path=pathto('extensions')%}<b>Extensions:</b> automatic testing of code snippets, inclusion of
|
||||||
docstrings from Python modules (API docs), and
|
docstrings from Python modules (API docs), and
|
||||||
<a href="{{ path }}#builtin-sphinx-extensions">more</a>{%endtrans%}</li>
|
<a href="{{ path }}#builtin-sphinx-extensions">more</a>{%endtrans%}</li>
|
||||||
|
<li>{%trans path=pathto('develop')%}<b>Contributed extensions:</b> more than
|
||||||
|
50 extensions <a href="{{ path }}#extensions">contributed by users</a>
|
||||||
|
in a second repository; most of them installable from PyPI{%endtrans%}</li>
|
||||||
</ul>
|
</ul>
|
||||||
<p>{%trans%}
|
<p>{%trans%}
|
||||||
Sphinx uses <a href="http://docutils.sf.net/rst.html">reStructuredText</a>
|
Sphinx uses <a href="http://docutils.sf.net/rst.html">reStructuredText</a>
|
||||||
|
8
doc/_templates/indexsidebar.html
vendored
8
doc/_templates/indexsidebar.html
vendored
@ -3,18 +3,18 @@
|
|||||||
{%trans%}project{%endtrans%}</p>
|
{%trans%}project{%endtrans%}</p>
|
||||||
|
|
||||||
<h3>Download</h3>
|
<h3>Download</h3>
|
||||||
{% if version.endswith('(hg)') %}
|
{% if version.endswith('a0') %}
|
||||||
<p>{%trans%}This documentation is for version <b>{{ version }}</b>, which is
|
<p>{%trans%}This documentation is for version <b>{{ version }}</b>, which is
|
||||||
not released yet.{%endtrans%}</p>
|
not released yet.{%endtrans%}</p>
|
||||||
<p>{%trans%}You can use it from the
|
<p>{%trans%}You can use it from the
|
||||||
<a href="http://bitbucket.org/birkenfeld/sphinx/">Mercurial repo</a> or look for
|
<a href="https://github.com/sphinx-doc/sphinx/">Git repo</a> or look for
|
||||||
released versions in the <a href="http://pypi.python.org/pypi/Sphinx">Python
|
released versions in the <a href="http://pypi.python.org/pypi/Sphinx">Python
|
||||||
Package Index</a>.{%endtrans%}</p>
|
Package Index</a>.{%endtrans%}</p>
|
||||||
{% else %}
|
{% else %}
|
||||||
<p>{%trans%}Current version: <b>{{ version }}</b>{%endtrans%}</p>
|
<p>{%trans%}Current version: <b>{{ version }}</b>{%endtrans%}</p>
|
||||||
<p>{%trans%}Get Sphinx from the <a href="http://pypi.python.org/pypi/Sphinx">Python Package
|
<p>{%trans%}Get Sphinx from the <a href="http://pypi.python.org/pypi/Sphinx">Python Package
|
||||||
Index</a>, or install it with:{%endtrans%}</p>
|
Index</a>, or install it with:{%endtrans%}</p>
|
||||||
<pre>easy_install -U Sphinx</pre>
|
<pre>pip install -U Sphinx</pre>
|
||||||
<p>{%trans%}Latest <a href="http://sphinx-doc.org/latest/">development version docs</a>
|
<p>{%trans%}Latest <a href="http://sphinx-doc.org/latest/">development version docs</a>
|
||||||
are also available.{%endtrans%}</p>
|
are also available.{%endtrans%}</p>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
@ -30,4 +30,4 @@ are also available.{%endtrans%}</p>
|
|||||||
</form>
|
</form>
|
||||||
<p>{%trans%}or come to the <tt>#sphinx-doc</tt> channel on FreeNode.{%endtrans%}</p>
|
<p>{%trans%}or come to the <tt>#sphinx-doc</tt> channel on FreeNode.{%endtrans%}</p>
|
||||||
<p>{%trans%}You can also open an issue at the
|
<p>{%trans%}You can also open an issue at the
|
||||||
<a href="http://www.bitbucket.org/birkenfeld/sphinx/issues/">tracker</a>.{%endtrans%}</p>
|
<a href="https://github.com/sphinx-doc/sphinx/issues">tracker</a>.{%endtrans%}</p>
|
||||||
|
2
doc/_themes/sphinx13/layout.html
vendored
2
doc/_themes/sphinx13/layout.html
vendored
@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
Sphinx layout template for the sphinxdoc theme.
|
Sphinx layout template for the sphinxdoc theme.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
#}
|
#}
|
||||||
{%- extends "basic/layout.html" %}
|
{%- extends "basic/layout.html" %}
|
||||||
|
2
doc/_themes/sphinx13/static/sphinx13.css
vendored
2
doc/_themes/sphinx13/static/sphinx13.css
vendored
@ -4,7 +4,7 @@
|
|||||||
*
|
*
|
||||||
* Sphinx stylesheet -- sphinx13 theme.
|
* Sphinx stylesheet -- sphinx13 theme.
|
||||||
*
|
*
|
||||||
* :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
* :copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
* :license: BSD, see LICENSE for details.
|
* :license: BSD, see LICENSE for details.
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
:tocdepth: 2
|
:tocdepth: 2
|
||||||
|
|
||||||
|
.. default-role:: any
|
||||||
|
|
||||||
.. _changes:
|
.. _changes:
|
||||||
|
|
||||||
Changes in Sphinx
|
Changes in Sphinx
|
||||||
|
@ -7,14 +7,15 @@ import sphinx
|
|||||||
|
|
||||||
|
|
||||||
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo',
|
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo',
|
||||||
'sphinx.ext.autosummary', 'sphinx.ext.extlinks']
|
'sphinx.ext.autosummary', 'sphinx.ext.extlinks',
|
||||||
|
'sphinx.ext.viewcode']
|
||||||
|
|
||||||
master_doc = 'contents'
|
master_doc = 'contents'
|
||||||
templates_path = ['_templates']
|
templates_path = ['_templates']
|
||||||
exclude_patterns = ['_build']
|
exclude_patterns = ['_build']
|
||||||
|
|
||||||
project = 'Sphinx'
|
project = 'Sphinx'
|
||||||
copyright = '2007-2014, Georg Brandl and the Sphinx team'
|
copyright = '2007-2015, Georg Brandl and the Sphinx team'
|
||||||
version = sphinx.__released__
|
version = sphinx.__released__
|
||||||
release = version
|
release = version
|
||||||
show_authors = True
|
show_authors = True
|
||||||
@ -83,7 +84,7 @@ texinfo_documents = [
|
|||||||
|
|
||||||
# We're not using intersphinx right now, but if we did, this would be part of
|
# We're not using intersphinx right now, but if we did, this would be part of
|
||||||
# the mapping:
|
# the mapping:
|
||||||
intersphinx_mapping = {'python': ('http://docs.python.org/dev', None)}
|
intersphinx_mapping = {'python': ('http://docs.python.org/2/', None)}
|
||||||
|
|
||||||
# Sphinx document translation with sphinx gettext feature uses these settings:
|
# Sphinx document translation with sphinx gettext feature uses these settings:
|
||||||
locale_dirs = ['locale/']
|
locale_dirs = ['locale/']
|
||||||
|
@ -231,6 +231,30 @@ General configuration
|
|||||||
|
|
||||||
.. versionadded:: 1.1
|
.. versionadded:: 1.1
|
||||||
|
|
||||||
|
.. confval:: numfig
|
||||||
|
|
||||||
|
If true, figures, tables and code-blocks are automatically numbered if they
|
||||||
|
has caption. For now, it works only with the HTML builder. Default is ``False``.
|
||||||
|
|
||||||
|
.. versionadded:: 1.3
|
||||||
|
|
||||||
|
.. confval:: numfig_format
|
||||||
|
|
||||||
|
A dictionary mapping ``'figure'``, ``'table'`` and ``'code-block'`` to
|
||||||
|
strings that are used for format of figure numbers. Default is to use
|
||||||
|
``'Fig. %s'`` for ``'figure'``, ``'Table %s'`` for ``'table'`` and
|
||||||
|
``'Listing %s'`` for ``'code-block'``.
|
||||||
|
|
||||||
|
.. versionadded:: 1.3
|
||||||
|
|
||||||
|
.. confval:: numfig_secnum_depth
|
||||||
|
|
||||||
|
The scope of figure numbers, that is, the numfig feature numbers figures
|
||||||
|
in which scope. ``0`` means "whole document". ``1`` means "in a section".
|
||||||
|
Sphinx numbers like x.1, x.2, x.3... ``2`` means "in a subsection". Sphinx
|
||||||
|
numbers like x.x.1, x.x.2, x.x.3..., and so on. Default is ``1``.
|
||||||
|
|
||||||
|
.. versionadded:: 1.3
|
||||||
|
|
||||||
Project information
|
Project information
|
||||||
-------------------
|
-------------------
|
||||||
@ -279,10 +303,19 @@ Project information
|
|||||||
|
|
||||||
.. versionadded:: 0.5
|
.. versionadded:: 0.5
|
||||||
|
|
||||||
|
.. confval:: highlight_options
|
||||||
|
|
||||||
|
A dictionary of options that modify how the lexer specified by
|
||||||
|
:confval:`highlight_language` generates highlighted source code. These are
|
||||||
|
lexer-specific; for the options understood by each, see the
|
||||||
|
`Pygments documentation <http://pygments.org/docs/lexers/>`_.
|
||||||
|
|
||||||
|
.. versionadded:: 1.3
|
||||||
|
|
||||||
.. confval:: pygments_style
|
.. confval:: pygments_style
|
||||||
|
|
||||||
The style name to use for Pygments highlighting of source code. The default
|
The style name to use for Pygments highlighting of source code. If not set,
|
||||||
style is selected by the theme for HTML output, and ``'sphinx'`` otherwise.
|
either the theme's default style or ``'sphinx'`` is selected for HTML output.
|
||||||
|
|
||||||
.. versionchanged:: 0.3
|
.. versionchanged:: 0.3
|
||||||
If the value is a fully-qualified name of a custom Pygments style class,
|
If the value is a fully-qualified name of a custom Pygments style class,
|
||||||
@ -424,9 +457,17 @@ documentation on :ref:`intl` for details.
|
|||||||
.. confval:: gettext_uuid
|
.. confval:: gettext_uuid
|
||||||
|
|
||||||
If true, Sphinx generates uuid information for version tracking in message
|
If true, Sphinx generates uuid information for version tracking in message
|
||||||
catalogs.
|
catalogs. It is used for:
|
||||||
|
|
||||||
The default is ``True``.
|
* Add uid line for each msgids in .pot files.
|
||||||
|
* Calculate similarity between new msgids and previously saved old msgids.
|
||||||
|
This calculation takes a long time.
|
||||||
|
|
||||||
|
If you want to accelerate the calculation, you can use
|
||||||
|
``python-levenshtein`` 3rd-party package written in C by using
|
||||||
|
:command:`pip install python-levenshtein`.
|
||||||
|
|
||||||
|
The default is ``False``.
|
||||||
|
|
||||||
.. versionadded:: 1.3
|
.. versionadded:: 1.3
|
||||||
|
|
||||||
@ -447,6 +488,16 @@ documentation on :ref:`intl` for details.
|
|||||||
|
|
||||||
.. versionadded:: 1.3
|
.. versionadded:: 1.3
|
||||||
|
|
||||||
|
.. confval:: gettext_enables
|
||||||
|
|
||||||
|
To specify names to enable gettext extracting and translation applying for
|
||||||
|
i18n. You can specify below names:
|
||||||
|
|
||||||
|
:index: index terms
|
||||||
|
|
||||||
|
The default is ``[]``.
|
||||||
|
|
||||||
|
.. versionadded:: 1.3
|
||||||
|
|
||||||
.. _html-options:
|
.. _html-options:
|
||||||
|
|
||||||
@ -707,7 +758,7 @@ that use Sphinx's HTMLWriter class.
|
|||||||
|
|
||||||
.. confval:: html_use_opensearch
|
.. confval:: html_use_opensearch
|
||||||
|
|
||||||
If nonempty, an `OpenSearch <http://opensearch.org>` description file will be
|
If nonempty, an `OpenSearch <http://opensearch.org>`_ description file will be
|
||||||
output, and all pages will contain a ``<link>`` tag referring to it. Since
|
output, and all pages will contain a ``<link>`` tag referring to it. Since
|
||||||
OpenSearch doesn't support relative URLs for its search page location, the
|
OpenSearch doesn't support relative URLs for its search page location, the
|
||||||
value of this option must be the base URL from which these documents are
|
value of this option must be the base URL from which these documents are
|
||||||
@ -1003,9 +1054,9 @@ the `Dublin Core metadata <http://dublincore.org/>`_.
|
|||||||
|
|
||||||
This flag determines if sphinx should try to fix image formats that are not
|
This flag determines if sphinx should try to fix image formats that are not
|
||||||
supported by some epub readers. At the moment palette images with a small
|
supported by some epub readers. At the moment palette images with a small
|
||||||
color table are upgraded. You need the Python Image Library (PIL) installed
|
color table are upgraded. You need the Python Image Library (Pillow the
|
||||||
to use this option. The default value is ``False`` because the automatic
|
successor of the PIL) installed to use this option. The default value is
|
||||||
conversion may lose information.
|
``False`` because the automatic conversion may lose information.
|
||||||
|
|
||||||
.. versionadded:: 1.2
|
.. versionadded:: 1.2
|
||||||
|
|
||||||
@ -1014,7 +1065,7 @@ the `Dublin Core metadata <http://dublincore.org/>`_.
|
|||||||
This option specifies the maximum width of images. If it is set to a value
|
This option specifies the maximum width of images. If it is set to a value
|
||||||
greater than zero, images with a width larger than the given value are
|
greater than zero, images with a width larger than the given value are
|
||||||
scaled accordingly. If it is zero, no scaling is performed. The default
|
scaled accordingly. If it is zero, no scaling is performed. The default
|
||||||
value is ``0``. You need the Python Image Library (PIL) installed to use
|
value is ``0``. You need the Python Image Library (Pillow) installed to use
|
||||||
this option.
|
this option.
|
||||||
|
|
||||||
.. versionadded:: 1.2
|
.. versionadded:: 1.2
|
||||||
|
@ -5,10 +5,10 @@ Sphinx development
|
|||||||
|
|
||||||
Sphinx is a maintained by a group of volunteers. We value every contribution!
|
Sphinx is a maintained by a group of volunteers. We value every contribution!
|
||||||
|
|
||||||
* The code can be found in a Mercurial repository, at
|
* The code can be found in a Git repository, at
|
||||||
https://bitbucket.org/birkenfeld/sphinx/.
|
https://github.com/sphinx-doc/sphinx/.
|
||||||
* Issues and feature requests should be raised in the `tracker
|
* Issues and feature requests should be raised in the `tracker
|
||||||
<https://bitbucket.org/birkenfeld/sphinx/issues/>`_.
|
<https://github.com/sphinx-doc/sphinx/issues>`_.
|
||||||
* The mailing list for development is at `Google Groups
|
* The mailing list for development is at `Google Groups
|
||||||
<https://groups.google.com/group/sphinx-dev/>`_.
|
<https://groups.google.com/group/sphinx-dev/>`_.
|
||||||
* There is also the #sphinx-doc IRC channel on `freenode
|
* There is also the #sphinx-doc IRC channel on `freenode
|
||||||
@ -55,6 +55,7 @@ This is the current list of contributed extensions in that repository:
|
|||||||
- hyphenator: client-side hyphenation of HTML using hyphenator_
|
- hyphenator: client-side hyphenation of HTML using hyphenator_
|
||||||
- inlinesyntaxhighlight_: inline syntax highlighting
|
- inlinesyntaxhighlight_: inline syntax highlighting
|
||||||
- lassodomain: a domain for documenting Lasso_ source code
|
- lassodomain: a domain for documenting Lasso_ source code
|
||||||
|
- libreoffice: an extension to include any drawing supported by LibreOffice (e.g. odg, vsd...).
|
||||||
- lilypond: an extension inserting music scripts from Lilypond_ in PNG format.
|
- lilypond: an extension inserting music scripts from Lilypond_ in PNG format.
|
||||||
- makedomain_: a domain for `GNU Make`_
|
- makedomain_: a domain for `GNU Make`_
|
||||||
- matlabdomain: document MATLAB_ code.
|
- matlabdomain: document MATLAB_ code.
|
||||||
|
@ -7,10 +7,9 @@ Sphinx Developer's Guide
|
|||||||
system used by developers to document systems used by other developers to
|
system used by developers to document systems used by other developers to
|
||||||
develop other systems that may also be documented using Sphinx.
|
develop other systems that may also be documented using Sphinx.
|
||||||
|
|
||||||
The Sphinx source code is managed using `Mercurial`_ and is hosted on
|
The Sphinx source code is managed using Git and is hosted on Github.
|
||||||
`BitBucket`_.
|
|
||||||
|
|
||||||
hg clone https://bitbucket.org/birkenfeld/sphinx
|
git clone git://github.com/sphinx-doc/sphinx
|
||||||
|
|
||||||
.. rubric:: Community
|
.. rubric:: Community
|
||||||
|
|
||||||
@ -23,15 +22,12 @@ sphinx-dev <sphinx-dev@googlegroups.com>
|
|||||||
#sphinx-doc on irc.freenode.net
|
#sphinx-doc on irc.freenode.net
|
||||||
IRC channel for development questions and user support.
|
IRC channel for development questions and user support.
|
||||||
|
|
||||||
.. _`BitBucket`: https://bitbucket.org/
|
|
||||||
.. _`Mercurial`: http://mercurial.selenic.com/
|
|
||||||
|
|
||||||
|
|
||||||
Bug Reports and Feature Requests
|
Bug Reports and Feature Requests
|
||||||
--------------------------------
|
--------------------------------
|
||||||
|
|
||||||
If you have encountered a problem with Sphinx or have an idea for a new
|
If you have encountered a problem with Sphinx or have an idea for a new
|
||||||
feature, please submit it to the `issue tracker`_ on BitBucket or discuss it
|
feature, please submit it to the `issue tracker`_ on Github or discuss it
|
||||||
on the sphinx-dev mailing list.
|
on the sphinx-dev mailing list.
|
||||||
|
|
||||||
For bug reports, please include the output produced during the build process
|
For bug reports, please include the output produced during the build process
|
||||||
@ -43,22 +39,22 @@ Including or providing a link to the source files involved may help us fix the
|
|||||||
issue. If possible, try to create a minimal project that produces the error
|
issue. If possible, try to create a minimal project that produces the error
|
||||||
and post that instead.
|
and post that instead.
|
||||||
|
|
||||||
.. _`issue tracker`: https://bitbucket.org/birkenfeld/sphinx/issues
|
.. _`issue tracker`: https://github.com/sphinx-doc/sphinx/issues
|
||||||
|
|
||||||
|
|
||||||
Contributing to Sphinx
|
Contributing to Sphinx
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
The recommended way for new contributors to submit code to Sphinx is to fork
|
The recommended way for new contributors to submit code to Sphinx is to fork
|
||||||
the Mercurial repository on BitBucket and then submit a pull request after
|
the repository on Github and then submit a pull request after
|
||||||
committing the changes. The pull request will then need to be approved by one
|
committing the changes. The pull request will then need to be approved by one
|
||||||
of the core developers before it is merged into the main repository.
|
of the core developers before it is merged into the main repository.
|
||||||
|
|
||||||
#. Check for open issues or open a fresh issue to start a discussion around a
|
#. Check for open issues or open a fresh issue to start a discussion around a
|
||||||
feature idea or a bug. There are `Non Assigned`_ issues.
|
feature idea or a bug.
|
||||||
#. If you feel uncomfortable or uncertain about an issue or your changes, feel
|
#. If you feel uncomfortable or uncertain about an issue or your changes, feel
|
||||||
free to email sphinx-dev@googlegroups.com.
|
free to email sphinx-dev@googlegroups.com.
|
||||||
#. Fork `the repository`_ on Bitbucket to start making your changes to the
|
#. Fork `the repository`_ on Github to start making your changes to the
|
||||||
**default** branch for next major version, or **stable** branch for next
|
**default** branch for next major version, or **stable** branch for next
|
||||||
minor version.
|
minor version.
|
||||||
#. Write a test which shows that the bug was fixed or that the feature works
|
#. Write a test which shows that the bug was fixed or that the feature works
|
||||||
@ -67,35 +63,35 @@ of the core developers before it is merged into the main repository.
|
|||||||
published. Make sure to add yourself to AUTHORS_ and the change to
|
published. Make sure to add yourself to AUTHORS_ and the change to
|
||||||
CHANGES_.
|
CHANGES_.
|
||||||
|
|
||||||
.. _`the repository`: https://bitbucket.org/birkenfeld/sphinx
|
.. _`the repository`: https://github.com/sphinx-doc/sphinx
|
||||||
.. _AUTHORS: https://bitbucket.org/birkenfeld/sphinx/src/tip/AUTHORS
|
.. _AUTHORS: https://github.com/sphinx-doc/sphinx/blob/master/AUTHORS
|
||||||
.. _CHANGES: https://bitbucket.org/birkenfeld/sphinx/src/tip/CHANGES
|
.. _CHANGES: https://github.com/sphinx-doc/sphinx/blob/master/CHANGES
|
||||||
.. _Non Assigned: https://bitbucket.org/birkenfeld/sphinx/issues?status=new&status=open&responsible=
|
|
||||||
|
|
||||||
Getting Started
|
Getting Started
|
||||||
~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
These are the basic steps needed to start developing on Sphinx.
|
These are the basic steps needed to start developing on Sphinx.
|
||||||
|
|
||||||
#. Create an account on BitBucket.
|
#. Create an account on Github.
|
||||||
|
|
||||||
#. Fork the main Sphinx repository (`birkenfeld/sphinx
|
#. Fork the main Sphinx repository (`sphinx-doc/sphinx
|
||||||
<https://bitbucket.org/birkenfeld/sphinx>`_) using the BitBucket interface.
|
<https://github.com/sphinx-doc/sphinx>`_) using the Github interface.
|
||||||
|
|
||||||
#. Clone the forked repository to your machine. ::
|
#. Clone the forked repository to your machine. ::
|
||||||
|
|
||||||
hg clone https://bitbucket.org/USERNAME/sphinx-fork
|
git clone https://github.com/USERNAME/sphinx
|
||||||
cd sphinx-fork
|
cd sphinx
|
||||||
|
|
||||||
#. Checkout the appropriate branch.
|
#. Checkout the appropriate branch.
|
||||||
|
|
||||||
For changes that should be included in the next minor release (namely bug
|
For changes that should be included in the next minor release (namely bug
|
||||||
fixes), use the ``stable`` branch. ::
|
fixes), use the ``stable`` branch. ::
|
||||||
|
|
||||||
hg checkout stable
|
git checkout stable
|
||||||
|
|
||||||
For new features or other substantial changes that should wait until the
|
For new features or other substantial changes that should wait until the
|
||||||
next major release, use the ``default`` branch.
|
next major release, use the ``master`` branch.
|
||||||
|
|
||||||
#. Optional: setup a virtual environment. ::
|
#. Optional: setup a virtual environment. ::
|
||||||
|
|
||||||
@ -103,6 +99,10 @@ These are the basic steps needed to start developing on Sphinx.
|
|||||||
. ~/sphinxenv/bin/activate
|
. ~/sphinxenv/bin/activate
|
||||||
pip install -e .
|
pip install -e .
|
||||||
|
|
||||||
|
#. Create a new working branch. Choose any name you like. ::
|
||||||
|
|
||||||
|
git checkout -b feature-xyz
|
||||||
|
|
||||||
#. Hack, hack, hack.
|
#. Hack, hack, hack.
|
||||||
|
|
||||||
For tips on working with the code, see the `Coding Guide`_.
|
For tips on working with the code, see the `Coding Guide`_.
|
||||||
@ -130,28 +130,31 @@ These are the basic steps needed to start developing on Sphinx.
|
|||||||
* For bug fixes, first add a test that fails without your changes and passes
|
* For bug fixes, first add a test that fails without your changes and passes
|
||||||
after they are applied.
|
after they are applied.
|
||||||
|
|
||||||
|
* Tests that need a sphinx-build run should be integrated in one of the
|
||||||
|
existing test modules if possible. New tests that to ``@with_app`` and
|
||||||
|
then ``build_all`` for a few assertions are not good since *the test suite
|
||||||
|
should not take more than a minute to run*.
|
||||||
|
|
||||||
#. Please add a bullet point to :file:`CHANGES` if the fix or feature is not
|
#. Please add a bullet point to :file:`CHANGES` if the fix or feature is not
|
||||||
trivial (small doc updates, typo fixes). Then commit::
|
trivial (small doc updates, typo fixes). Then commit::
|
||||||
|
|
||||||
hg commit -m '#42: Add useful new feature that does this.'
|
git commit -m '#42: Add useful new feature that does this.'
|
||||||
|
|
||||||
BitBucket recognizes `certain phrases`__ that can be used to automatically
|
Github recognizes certain phrases that can be used to automatically
|
||||||
update the issue tracker.
|
update the issue tracker.
|
||||||
|
|
||||||
For example::
|
For example::
|
||||||
|
|
||||||
hg commit -m 'Closes #42: Fix invalid markup in docstring of Foo.bar.'
|
git commit -m 'Closes #42: Fix invalid markup in docstring of Foo.bar.'
|
||||||
|
|
||||||
would close issue #42.
|
would close issue #42.
|
||||||
|
|
||||||
__ https://confluence.atlassian.com/display/BITBUCKET/Resolve+issues+automatically+when+users+push+code
|
#. Push changes in the branch to your forked repository on Github. ::
|
||||||
|
|
||||||
#. Push changes to your forked repository on BitBucket. ::
|
git push origin feature-xyz
|
||||||
|
|
||||||
hg push
|
#. Submit a pull request from your branch to the respective branch (``master``
|
||||||
|
or ``stable``) on ``sphinx-doc/sphinx`` using the Github interface.
|
||||||
#. Submit a pull request from your repository to ``birkenfeld/sphinx`` using
|
|
||||||
the BitBucket interface.
|
|
||||||
|
|
||||||
#. Wait for a core developer to review your changes.
|
#. Wait for a core developer to review your changes.
|
||||||
|
|
||||||
@ -179,9 +182,6 @@ The following are some general guidelines for core developers:
|
|||||||
* When committing code written by someone else, please attribute the original
|
* When committing code written by someone else, please attribute the original
|
||||||
author in the commit message and any relevant :file:`CHANGES` entry.
|
author in the commit message and any relevant :file:`CHANGES` entry.
|
||||||
|
|
||||||
* Using Mercurial named branches other than ``default`` and ``stable`` is not
|
|
||||||
encouraged.
|
|
||||||
|
|
||||||
|
|
||||||
Locale updates
|
Locale updates
|
||||||
~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~
|
||||||
|
@ -310,12 +310,12 @@ are recognized and formatted nicely:
|
|||||||
|
|
||||||
* ``param``, ``parameter``, ``arg``, ``argument``, ``key``, ``keyword``:
|
* ``param``, ``parameter``, ``arg``, ``argument``, ``key``, ``keyword``:
|
||||||
Description of a parameter.
|
Description of a parameter.
|
||||||
* ``type``: Type of a parameter.
|
* ``type``: Type of a parameter. Creates a link if possible.
|
||||||
* ``raises``, ``raise``, ``except``, ``exception``: That (and when) a specific
|
* ``raises``, ``raise``, ``except``, ``exception``: That (and when) a specific
|
||||||
exception is raised.
|
exception is raised.
|
||||||
* ``var``, ``ivar``, ``cvar``: Description of a variable.
|
* ``var``, ``ivar``, ``cvar``: Description of a variable.
|
||||||
* ``returns``, ``return``: Description of the return value.
|
* ``returns``, ``return``: Description of the return value.
|
||||||
* ``rtype``: Return type.
|
* ``rtype``: Return type. Creates a link if possible.
|
||||||
|
|
||||||
The field names must consist of one of these keywords and an argument (except
|
The field names must consist of one of these keywords and an argument (except
|
||||||
for ``returns`` and ``rtype``, which do not need an argument). This is best
|
for ``returns`` and ``rtype``, which do not need an argument). This is best
|
||||||
|
@ -15,7 +15,7 @@ so by providing aliases to base URLs, so that you only need to give the subpage
|
|||||||
name when creating a link.
|
name when creating a link.
|
||||||
|
|
||||||
Let's assume that you want to include many links to issues at the Sphinx
|
Let's assume that you want to include many links to issues at the Sphinx
|
||||||
tracker, at :samp:`http://bitbucket.org/birkenfeld/sphinx/issue/{num}`. Typing
|
tracker, at :samp:`http://github.com/sphinx-doc/sphinx/issues/{num}`. Typing
|
||||||
this URL again and again is tedious, so you can use :mod:`~sphinx.ext.extlinks`
|
this URL again and again is tedious, so you can use :mod:`~sphinx.ext.extlinks`
|
||||||
to avoid repeating yourself.
|
to avoid repeating yourself.
|
||||||
|
|
||||||
@ -27,11 +27,11 @@ The extension adds one new config value:
|
|||||||
short alias names to a base URL and a *prefix*. For example, to create an
|
short alias names to a base URL and a *prefix*. For example, to create an
|
||||||
alias for the above mentioned issues, you would add ::
|
alias for the above mentioned issues, you would add ::
|
||||||
|
|
||||||
extlinks = {'issue': ('https://bitbucket.org/birkenfeld/sphinx/issue/%s',
|
extlinks = {'issue': ('https://github.com/sphinx-doc/sphinx/issues/%s',
|
||||||
'issue ')}
|
'issue ')}
|
||||||
|
|
||||||
Now, you can use the alias name as a new role, e.g. ``:issue:`123```. This
|
Now, you can use the alias name as a new role, e.g. ``:issue:`123```. This
|
||||||
then inserts a link to https://bitbucket.org/birkenfeld/sphinx/issue/123.
|
then inserts a link to https://github.com/sphinx-doc/sphinx/issues/123.
|
||||||
As you can see, the target given in the role is substituted in the base URL
|
As you can see, the target given in the role is substituted in the base URL
|
||||||
in the place of ``%s``.
|
in the place of ``%s``.
|
||||||
|
|
||||||
|
@ -84,7 +84,7 @@ linking:
|
|||||||
To add links to modules and objects in the Python standard library
|
To add links to modules and objects in the Python standard library
|
||||||
documentation, use::
|
documentation, use::
|
||||||
|
|
||||||
intersphinx_mapping = {'python': ('http://docs.python.org/3.2', None)}
|
intersphinx_mapping = {'python': ('http://docs.python.org/3.4', None)}
|
||||||
|
|
||||||
This will download the corresponding :file:`objects.inv` file from the
|
This will download the corresponding :file:`objects.inv` file from the
|
||||||
Internet and generate links to the pages under the given URI. The downloaded
|
Internet and generate links to the pages under the given URI. The downloaded
|
||||||
@ -94,12 +94,12 @@ linking:
|
|||||||
A second example, showing the meaning of a non-``None`` value of the second
|
A second example, showing the meaning of a non-``None`` value of the second
|
||||||
tuple item::
|
tuple item::
|
||||||
|
|
||||||
intersphinx_mapping = {'python': ('http://docs.python.org/3.2',
|
intersphinx_mapping = {'python': ('http://docs.python.org/3.4',
|
||||||
'python-inv.txt')}
|
'python-inv.txt')}
|
||||||
|
|
||||||
This will read the inventory from :file:`python-inv.txt` in the source
|
This will read the inventory from :file:`python-inv.txt` in the source
|
||||||
directory, but still generate links to the pages under
|
directory, but still generate links to the pages under
|
||||||
``http://docs.python.org/3.2``. It is up to you to update the inventory file
|
``http://docs.python.org/3.4``. It is up to you to update the inventory file
|
||||||
as new objects are added to the Python documentation.
|
as new objects are added to the Python documentation.
|
||||||
|
|
||||||
**Multiple target for the inventory**
|
**Multiple target for the inventory**
|
||||||
@ -113,7 +113,7 @@ linking:
|
|||||||
this to specify mirror sites for server downtime of the primary
|
this to specify mirror sites for server downtime of the primary
|
||||||
inventory::
|
inventory::
|
||||||
|
|
||||||
intersphinx_mapping = {'python': ('http://docs.python.org/3.2',
|
intersphinx_mapping = {'python': ('http://docs.python.org/3.4',
|
||||||
(None, 'python-inv.txt'))}
|
(None, 'python-inv.txt'))}
|
||||||
|
|
||||||
.. confval:: intersphinx_cache_limit
|
.. confval:: intersphinx_cache_limit
|
||||||
|
@ -288,6 +288,18 @@ package.
|
|||||||
|
|
||||||
.. versionadded:: 1.0
|
.. versionadded:: 1.0
|
||||||
|
|
||||||
|
.. method:: Sphinx.add_latex_package(packagename, options=None)
|
||||||
|
|
||||||
|
Add *packagename* to the list of packages that LaTeX source code will include.
|
||||||
|
If you provide *options*, it will be taken to `\usepackage` declaration.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
app.add_latex_package('mypackage') # => \usepackage{mypackage}
|
||||||
|
app.add_latex_package('mypackage', 'foo,bar') # => \usepackage[foo,bar]{mypackage}
|
||||||
|
|
||||||
|
.. versionadded:: 1.3
|
||||||
|
|
||||||
.. method:: Sphinx.add_lexer(alias, lexer)
|
.. method:: Sphinx.add_lexer(alias, lexer)
|
||||||
|
|
||||||
Use *lexer*, which must be an instance of a Pygments lexer class, to
|
Use *lexer*, which must be an instance of a Pygments lexer class, to
|
||||||
@ -437,6 +449,19 @@ handlers to the events. Example:
|
|||||||
|
|
||||||
.. versionadded:: 0.5
|
.. versionadded:: 0.5
|
||||||
|
|
||||||
|
.. event:: env-before-read-docs (app, env, docnames)
|
||||||
|
|
||||||
|
Emitted after the environment has determined the list of all added and
|
||||||
|
changed files and just before it reads them. It allows extension authors to
|
||||||
|
reorder the list of docnames (*inplace*) before processing, or add more
|
||||||
|
docnames that Sphinx did not consider changed (but never add any docnames
|
||||||
|
that are not in ``env.found_docs``).
|
||||||
|
|
||||||
|
You can also remove document names; do this with caution since it will make
|
||||||
|
Sphinx treat changed files as unchanged.
|
||||||
|
|
||||||
|
.. versionadded:: 1.3
|
||||||
|
|
||||||
.. event:: source-read (app, docname, source)
|
.. event:: source-read (app, docname, source)
|
||||||
|
|
||||||
Emitted when a source file has been read. The *source* argument is a list
|
Emitted when a source file has been read. The *source* argument is a list
|
||||||
@ -480,13 +505,40 @@ handlers to the events. Example:
|
|||||||
Here is the place to replace custom nodes that don't have visitor methods in
|
Here is the place to replace custom nodes that don't have visitor methods in
|
||||||
the writers, so that they don't cause errors when the writers encounter them.
|
the writers, so that they don't cause errors when the writers encounter them.
|
||||||
|
|
||||||
|
.. event:: env-merge-info (env, docnames, other)
|
||||||
|
|
||||||
|
This event is only emitted when parallel reading of documents is enabled. It
|
||||||
|
is emitted once for every subprocess that has read some documents.
|
||||||
|
|
||||||
|
You must handle this event in an extension that stores data in the
|
||||||
|
environment in a custom location. Otherwise the environment in the main
|
||||||
|
process will not be aware of the information stored in the subprocess.
|
||||||
|
|
||||||
|
*other* is the environment object from the subprocess, *env* is the
|
||||||
|
environment from the main process. *docnames* is a set of document names
|
||||||
|
that have been read in the subprocess.
|
||||||
|
|
||||||
|
For a sample of how to deal with this event, look at the standard
|
||||||
|
``sphinx.ext.todo`` extension. The implementation is often similar to that
|
||||||
|
of :event:`env-purge-doc`, only that information is not removed, but added to
|
||||||
|
the main environment from the other environment.
|
||||||
|
|
||||||
|
.. versionadded:: 1.3
|
||||||
|
|
||||||
.. event:: env-updated (app, env)
|
.. event:: env-updated (app, env)
|
||||||
|
|
||||||
Emitted when the :meth:`update` method of the build environment has
|
Emitted when the :meth:`update` method of the build environment has
|
||||||
completed, that is, the environment and all doctrees are now up-to-date.
|
completed, that is, the environment and all doctrees are now up-to-date.
|
||||||
|
|
||||||
|
You can return an iterable of docnames from the handler. These documents
|
||||||
|
will then be considered updated, and will be (re-)written during the writing
|
||||||
|
phase.
|
||||||
|
|
||||||
.. versionadded:: 0.5
|
.. versionadded:: 0.5
|
||||||
|
|
||||||
|
.. versionchanged:: 1.3
|
||||||
|
The handlers' return value is now used.
|
||||||
|
|
||||||
.. event:: html-collect-pages (app)
|
.. event:: html-collect-pages (app)
|
||||||
|
|
||||||
Emitted when the HTML builder is starting to write non-document pages. You
|
Emitted when the HTML builder is starting to write non-document pages. You
|
||||||
@ -513,8 +565,14 @@ handlers to the events. Example:
|
|||||||
documents; it will be ``None`` when the page is created from an HTML template
|
documents; it will be ``None`` when the page is created from an HTML template
|
||||||
alone.
|
alone.
|
||||||
|
|
||||||
|
You can return a string from the handler, it will then replace
|
||||||
|
``'page.html'`` as the HTML template for this page.
|
||||||
|
|
||||||
.. versionadded:: 0.4
|
.. versionadded:: 0.4
|
||||||
|
|
||||||
|
.. versionchanged:: 1.3
|
||||||
|
The return value can now specify a template name.
|
||||||
|
|
||||||
.. event:: build-finished (app, exception)
|
.. event:: build-finished (app, exception)
|
||||||
|
|
||||||
Emitted when a build has finished, before Sphinx exits, usually used for
|
Emitted when a build has finished, before Sphinx exits, usually used for
|
||||||
|
@ -18,15 +18,32 @@ imports this module and executes its ``setup()`` function, which in turn
|
|||||||
notifies Sphinx of everything the extension offers -- see the extension tutorial
|
notifies Sphinx of everything the extension offers -- see the extension tutorial
|
||||||
for examples.
|
for examples.
|
||||||
|
|
||||||
.. versionadded:: 1.3
|
|
||||||
The ``setup()`` function can return a string, this is treated by Sphinx as
|
|
||||||
the version of the extension and used for informational purposes such as the
|
|
||||||
traceback file when an exception occurs.
|
|
||||||
|
|
||||||
The configuration file itself can be treated as an extension if it contains a
|
The configuration file itself can be treated as an extension if it contains a
|
||||||
``setup()`` function. All other extensions to load must be listed in the
|
``setup()`` function. All other extensions to load must be listed in the
|
||||||
:confval:`extensions` configuration value.
|
:confval:`extensions` configuration value.
|
||||||
|
|
||||||
|
Extension metadata
|
||||||
|
------------------
|
||||||
|
|
||||||
|
.. versionadded:: 1.3
|
||||||
|
|
||||||
|
The ``setup()`` function can return a dictionary. This is treated by Sphinx
|
||||||
|
as metadata of the extension. Metadata keys currently recognized are:
|
||||||
|
|
||||||
|
* ``'version'``: a string that identifies the extension version. It is used for
|
||||||
|
extension version requirement checking (see :confval:`needs_extensions`) and
|
||||||
|
informational purposes. If not given, ``"unknown version"`` is substituted.
|
||||||
|
* ``'parallel_read_safe'``: a boolean that specifies if parallel reading of
|
||||||
|
source files can be used when the extension is loaded. It defaults to
|
||||||
|
``False``, i.e. you have to explicitly specify your extension to be
|
||||||
|
parallel-read-safe after checking that it is.
|
||||||
|
* ``'parallel_write_safe'``: a boolean that specifies if parallel writing of
|
||||||
|
output files can be used when the extension is loaded. Since extensions
|
||||||
|
usually don't negatively influence the process, this defaults to ``True``.
|
||||||
|
|
||||||
|
APIs used for writing extensions
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
|
|
||||||
tutorial
|
tutorial
|
||||||
|
@ -162,7 +162,7 @@ new Python module called :file:`todo.py` and add the setup function::
|
|||||||
app.connect('doctree-resolved', process_todo_nodes)
|
app.connect('doctree-resolved', process_todo_nodes)
|
||||||
app.connect('env-purge-doc', purge_todos)
|
app.connect('env-purge-doc', purge_todos)
|
||||||
|
|
||||||
return '0.1' # identifies the version of our extension
|
return {'version': '0.1'} # identifies the version of our extension
|
||||||
|
|
||||||
The calls in this function refer to classes and functions not yet written. What
|
The calls in this function refer to classes and functions not yet written. What
|
||||||
the individual calls do is the following:
|
the individual calls do is the following:
|
||||||
@ -247,6 +247,7 @@ todolist directive has neither content nor arguments that need to be handled.
|
|||||||
The ``todo`` directive function looks like this::
|
The ``todo`` directive function looks like this::
|
||||||
|
|
||||||
from sphinx.util.compat import make_admonition
|
from sphinx.util.compat import make_admonition
|
||||||
|
from sphinx.locale import _
|
||||||
|
|
||||||
class TodoDirective(Directive):
|
class TodoDirective(Directive):
|
||||||
|
|
||||||
|
@ -41,14 +41,14 @@ You can find several extensions contributed by users in the `Sphinx Contrib`_
|
|||||||
repository. It is open for anyone who wants to maintain an extension
|
repository. It is open for anyone who wants to maintain an extension
|
||||||
publicly; just send a short message asking for write permissions.
|
publicly; just send a short message asking for write permissions.
|
||||||
|
|
||||||
There are also several extensions hosted elsewhere. The `Wiki at BitBucket`_
|
There are also several extensions hosted elsewhere. The `Sphinx extension
|
||||||
maintains a list of those.
|
survey <http://sphinxext-survey.readthedocs.org/en/latest/>`__ contains a
|
||||||
|
comprehensive list.
|
||||||
|
|
||||||
If you write an extension that you think others will find useful or you think
|
If you write an extension that you think others will find useful or you think
|
||||||
should be included as a part of Sphinx, please write to the project mailing
|
should be included as a part of Sphinx, please write to the project mailing
|
||||||
list (`join here <https://groups.google.com/group/sphinx-dev>`_).
|
list (`join here <https://groups.google.com/group/sphinx-dev>`_).
|
||||||
|
|
||||||
.. _Wiki at BitBucket: https://bitbucket.org/birkenfeld/sphinx/wiki/Home
|
|
||||||
.. _Sphinx Contrib: https://bitbucket.org/birkenfeld/sphinx-contrib
|
.. _Sphinx Contrib: https://bitbucket.org/birkenfeld/sphinx-contrib
|
||||||
|
|
||||||
|
|
||||||
|
@ -9,11 +9,11 @@ Since Sphinx is written in the Python language, you need to install Python
|
|||||||
Sphinx packages are available on the `Python Package Index
|
Sphinx packages are available on the `Python Package Index
|
||||||
<https://pypi.python.org/pypi/Sphinx>`_.
|
<https://pypi.python.org/pypi/Sphinx>`_.
|
||||||
|
|
||||||
You can also download a snapshot from the Mercurial development repository:
|
You can also download a snapshot from the Git repository:
|
||||||
|
|
||||||
* as a `.tar.bz2 <https://bitbucket.org/birkenfeld/sphinx/get/default.tar.bz2>`_
|
* as a `.tar.bz2 <https://github.com/sphinx-doc/sphinx/archive/master.tar.bz2>`_
|
||||||
file or
|
file or
|
||||||
* as a `.zip <https://bitbucket.org/birkenfeld/sphinx/get/default.zip>`_ file
|
* as a `.zip <https://github.com/sphinx-doc/sphinx/archive/master.zip>`_ file
|
||||||
|
|
||||||
There are introductions for several environments:
|
There are introductions for several environments:
|
||||||
|
|
||||||
@ -80,7 +80,7 @@ sidebar and under "Quick Links", click "Windows Installer" to download.
|
|||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Currently, Python offers two major versions, 2.x and 3.x. Sphinx 1.3 can run
|
Currently, Python offers two major versions, 2.x and 3.x. Sphinx 1.3 can run
|
||||||
under Python 2.6, 2.7, 3.2, 3.3, with the recommended version being
|
under Python 2.6, 2.7, 3.3, 3.4, with the recommended version being
|
||||||
2.7. This chapter assumes you have installed Python 2.7.
|
2.7. This chapter assumes you have installed Python 2.7.
|
||||||
|
|
||||||
Follow the Windows installer for Python.
|
Follow the Windows installer for Python.
|
||||||
@ -101,7 +101,7 @@ as ``sphinx-build`` easily from the Command Prompt.
|
|||||||
|
|
||||||
- ``C:\Python27`` -- this folder contains the main Python executable
|
- ``C:\Python27`` -- this folder contains the main Python executable
|
||||||
- ``C:\Python27\Scripts`` -- this folder will contain executables added by
|
- ``C:\Python27\Scripts`` -- this folder will contain executables added by
|
||||||
Python packages installed with easy_install (see below)
|
Python packages installed with pip (see below)
|
||||||
|
|
||||||
This is for Python 2.7. If you use another version of
|
This is for Python 2.7. If you use another version of
|
||||||
Python or installed to a non-default location, change the digits "27"
|
Python or installed to a non-default location, change the digits "27"
|
||||||
@ -113,35 +113,39 @@ as ``sphinx-build`` easily from the Command Prompt.
|
|||||||
``>>>``. Type ``Ctrl+Z`` and Enter to quit.
|
``>>>``. Type ``Ctrl+Z`` and Enter to quit.
|
||||||
|
|
||||||
|
|
||||||
Install the easy_install command
|
Install the pip command
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Python has a very useful :command:`easy_install` command which can download and
|
Python has a very useful :command:`pip` command which can download and install
|
||||||
install 3rd-party libraries with a single command. This is provided by the
|
3rd-party libraries with a single command. This is provided by the
|
||||||
"setuptools" project: https://pypi.python.org/pypi/setuptools.
|
Python Packaging Authority(PyPA):
|
||||||
|
https://groups.google.com/forum/#!forum/pypa-dev
|
||||||
|
|
||||||
To install setuptools, download
|
To install pip, download https://bootstrap.pypa.io/get-pip.py and
|
||||||
https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py and
|
|
||||||
save it somewhere. After download, invoke the command prompt, go to the
|
save it somewhere. After download, invoke the command prompt, go to the
|
||||||
directory with ez_setup.py and run this command:
|
directory with ``get-pip.py`` and run this command:
|
||||||
|
|
||||||
.. code-block:: bat
|
.. code-block:: bat
|
||||||
|
|
||||||
C:\> python ez_setup.py
|
C:\> python get-pip.py
|
||||||
|
|
||||||
Now setuptools and its :command:`easy_install` command is installed. From there
|
Now :command:`pip` command is installed. From there we can go to the Sphinx
|
||||||
we can go to the Sphinx install.
|
install.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
``pip`` has been contained in the Python official installation after version
|
||||||
|
of Python-3.4.0 or Python-2.7.9.
|
||||||
|
|
||||||
|
|
||||||
Installing Sphinx with easy_install
|
Installing Sphinx with pip
|
||||||
-----------------------------------
|
---------------------------
|
||||||
|
|
||||||
If you finished the installation of setuptools, type this line in the command
|
If you finished the installation of pip, type this line in the command prompt:
|
||||||
prompt:
|
|
||||||
|
|
||||||
.. code-block:: bat
|
.. code-block:: bat
|
||||||
|
|
||||||
C:\> easy_install sphinx
|
C:\> pip install sphinx
|
||||||
|
|
||||||
After installation, type :command:`sphinx-build` on the command prompt. If
|
After installation, type :command:`sphinx-build` on the command prompt. If
|
||||||
everything worked fine, you will get a Sphinx version number and a list of
|
everything worked fine, you will get a Sphinx version number and a list of
|
||||||
|
@ -277,7 +277,7 @@ Contributing to Sphinx reference translation
|
|||||||
The recommended way for new contributors to translate Sphinx reference
|
The recommended way for new contributors to translate Sphinx reference
|
||||||
is to join the translation team on Transifex.
|
is to join the translation team on Transifex.
|
||||||
|
|
||||||
There is `sphinx translation page`_ for Sphinx-1.2 documentation.
|
There is `sphinx translation page`_ for Sphinx-1.3 documentation.
|
||||||
|
|
||||||
1. Login to transifex_ service.
|
1. Login to transifex_ service.
|
||||||
2. Go to `sphinx translation page`_.
|
2. Go to `sphinx translation page`_.
|
||||||
@ -297,5 +297,5 @@ There is `sphinx translation page`_ for Sphinx-1.2 documentation.
|
|||||||
.. _`transifex-client`: https://pypi.python.org/pypi/transifex-client
|
.. _`transifex-client`: https://pypi.python.org/pypi/transifex-client
|
||||||
.. _`sphinx-intl`: https://pypi.python.org/pypi/sphinx-intl
|
.. _`sphinx-intl`: https://pypi.python.org/pypi/sphinx-intl
|
||||||
.. _Transifex: https://www.transifex.com/
|
.. _Transifex: https://www.transifex.com/
|
||||||
.. _`sphinx translation page`: https://www.transifex.com/projects/p/sphinx-doc-1_2_0/
|
.. _`sphinx translation page`: https://www.transifex.com/projects/p/sphinx-doc-1_3/
|
||||||
.. _`Transifex Client v0.8 — Transifex documentation`: http://help.transifex.com/features/client/index.html
|
.. _`Transifex Client v0.8 — Transifex documentation`: http://help.transifex.com/features/client/index.html
|
||||||
|
@ -54,7 +54,7 @@ See the :ref:`pertinent section in the FAQ list <usingwith>`.
|
|||||||
Prerequisites
|
Prerequisites
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
Sphinx needs at least **Python 2.6** or **Python 3.2** to run, as well as the
|
Sphinx needs at least **Python 2.6** or **Python 3.3** to run, as well as the
|
||||||
docutils_ and Jinja2_ libraries. Sphinx should work with docutils version 0.10
|
docutils_ and Jinja2_ libraries. Sphinx should work with docutils version 0.10
|
||||||
or some (not broken) SVN trunk snapshot. If you like to have source code
|
or some (not broken) SVN trunk snapshot. If you like to have source code
|
||||||
highlighting support, you must also install the Pygments_ library.
|
highlighting support, you must also install the Pygments_ library.
|
||||||
|
@ -1,5 +1,138 @@
|
|||||||
|
.. default-role:: any
|
||||||
|
|
||||||
.. _invocation:
|
.. _invocation:
|
||||||
|
|
||||||
|
Invocation of sphinx-quickstart
|
||||||
|
===============================
|
||||||
|
|
||||||
|
The :program:`sphinx-quickstart` script generates a Sphinx documentation set.
|
||||||
|
It is called like this::
|
||||||
|
|
||||||
|
$ sphinx-quickstart [options] [projectdir]
|
||||||
|
|
||||||
|
where *projectdir* is the Sphinx documentation set directory in which you want
|
||||||
|
to place. If you omit *projectdir*, files are generated into current directory
|
||||||
|
by default.
|
||||||
|
|
||||||
|
The :program:`sphinx-quickstart` script has several options:
|
||||||
|
|
||||||
|
.. program:: sphinx-quickstart
|
||||||
|
|
||||||
|
.. option:: -q, --quiet
|
||||||
|
|
||||||
|
Quiet mode that will skips interactive wizard to specify options.
|
||||||
|
This option requires `-p`, `-a` and `-v` options.
|
||||||
|
|
||||||
|
.. option:: -h, --help, --version
|
||||||
|
|
||||||
|
Display usage summary or Sphinx version.
|
||||||
|
|
||||||
|
|
||||||
|
Structure options
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
.. option:: --sep
|
||||||
|
|
||||||
|
If specified, separate source and build directories.
|
||||||
|
|
||||||
|
.. option:: --dot=DOT
|
||||||
|
|
||||||
|
Inside the root directory, two more directories will be created;
|
||||||
|
"_templates" for custom HTML templates and "_static" for custom stylesheets
|
||||||
|
and other static files. You can enter another prefix (such as ".") to
|
||||||
|
replace the underscore.
|
||||||
|
|
||||||
|
Project basic options
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
.. option:: -p PROJECT, --project=PROJECT
|
||||||
|
|
||||||
|
Project name will be set. (see :confval:`project`).
|
||||||
|
|
||||||
|
.. option:: -a AUTHOR, --author=AUTHOR
|
||||||
|
|
||||||
|
Author names. (see :confval:`copyright`).
|
||||||
|
|
||||||
|
.. option:: -v VERSION
|
||||||
|
|
||||||
|
Version of project. (see :confval:`version`).
|
||||||
|
|
||||||
|
.. option:: -r RELEASE, --release=RELEASE
|
||||||
|
|
||||||
|
Release of project. (see :confval:`release`).
|
||||||
|
|
||||||
|
.. option:: -l LANGUAGE, --language=LANGUAGE
|
||||||
|
|
||||||
|
Document language. (see :confval:`language`).
|
||||||
|
|
||||||
|
.. option:: --suffix=SUFFIX
|
||||||
|
|
||||||
|
Source file suffix. (see :confval:`source_suffix`).
|
||||||
|
|
||||||
|
.. option:: --master=MASTER
|
||||||
|
|
||||||
|
Master document name. (see :confval:`master_doc`).
|
||||||
|
|
||||||
|
.. option:: --epub
|
||||||
|
|
||||||
|
Use epub.
|
||||||
|
|
||||||
|
Extension options
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
.. option:: --ext-autodoc
|
||||||
|
|
||||||
|
Enable `sphinx.ext.autodoc` extension.
|
||||||
|
|
||||||
|
.. option:: --ext-doctest
|
||||||
|
|
||||||
|
Enable `sphinx.ext.doctest` extension.
|
||||||
|
|
||||||
|
.. option:: --ext-intersphinx
|
||||||
|
|
||||||
|
Enable `sphinx.ext.intersphinx` extension.
|
||||||
|
|
||||||
|
.. option:: --ext-todo
|
||||||
|
|
||||||
|
Enable `sphinx.ext.todo` extension.
|
||||||
|
|
||||||
|
.. option:: --ext-coverage
|
||||||
|
|
||||||
|
Enable `sphinx.ext.coverage` extension.
|
||||||
|
|
||||||
|
.. option:: --ext-pngmath
|
||||||
|
|
||||||
|
Enable `sphinx.ext.pngmath` extension.
|
||||||
|
|
||||||
|
.. option:: --ext-mathjax
|
||||||
|
|
||||||
|
Enable `sphinx.ext.mathjax` extension.
|
||||||
|
|
||||||
|
.. option:: --ext-ifconfig
|
||||||
|
|
||||||
|
Enable `sphinx.ext.ifconfig` extension.
|
||||||
|
|
||||||
|
.. option:: --ext-viewcode
|
||||||
|
|
||||||
|
Enable `sphinx.ext.viewcode` extension.
|
||||||
|
|
||||||
|
|
||||||
|
Makefile and Batchfile creation options
|
||||||
|
---------------------------------------
|
||||||
|
|
||||||
|
.. option:: --makefile, --no-makefile
|
||||||
|
|
||||||
|
Create (or not create) makefile.
|
||||||
|
|
||||||
|
.. option:: --batchfile, --no-batchfile
|
||||||
|
|
||||||
|
Create (or not create) batchfile
|
||||||
|
|
||||||
|
|
||||||
|
.. versionadded:: 1.3
|
||||||
|
Add various options for sphinx-quickstart invocation.
|
||||||
|
|
||||||
|
|
||||||
Invocation of sphinx-build
|
Invocation of sphinx-build
|
||||||
==========================
|
==========================
|
||||||
|
|
||||||
@ -305,6 +438,11 @@ The :program:`sphinx-apidoc` script has several options:
|
|||||||
to default values, but you can influence the most important ones using the
|
to default values, but you can influence the most important ones using the
|
||||||
following options.
|
following options.
|
||||||
|
|
||||||
|
.. option:: -M
|
||||||
|
|
||||||
|
This option makes sphinx-apidoc put module documentation before submodule
|
||||||
|
documentation.
|
||||||
|
|
||||||
.. option:: -H project
|
.. option:: -H project
|
||||||
|
|
||||||
Sets the project name to put in generated files (see :confval:`project`).
|
Sets the project name to put in generated files (see :confval:`project`).
|
||||||
|
@ -36,21 +36,29 @@ installed) and handled in a smart way:
|
|||||||
highlighted as Python).
|
highlighted as Python).
|
||||||
|
|
||||||
* The highlighting language can be changed using the ``highlight`` directive,
|
* The highlighting language can be changed using the ``highlight`` directive,
|
||||||
used as follows::
|
used as follows:
|
||||||
|
|
||||||
.. highlight:: c
|
.. rst:directive:: .. highlight:: language
|
||||||
|
|
||||||
This language is used until the next ``highlight`` directive is encountered.
|
Example::
|
||||||
|
|
||||||
|
.. highlight:: c
|
||||||
|
|
||||||
|
This language is used until the next ``highlight`` directive is encountered.
|
||||||
|
|
||||||
* For documents that have to show snippets in different languages, there's also
|
* For documents that have to show snippets in different languages, there's also
|
||||||
a :rst:dir:`code-block` directive that is given the highlighting language
|
a :rst:dir:`code-block` directive that is given the highlighting language
|
||||||
directly::
|
directly:
|
||||||
|
|
||||||
.. code-block:: ruby
|
.. rst:directive:: .. code-block:: language
|
||||||
|
|
||||||
Some Ruby code.
|
Use it like this::
|
||||||
|
|
||||||
The directive's alias name :rst:dir:`sourcecode` works as well.
|
.. code-block:: ruby
|
||||||
|
|
||||||
|
Some Ruby code.
|
||||||
|
|
||||||
|
The directive's alias name :rst:dir:`sourcecode` works as well.
|
||||||
|
|
||||||
* The valid values for the highlighting language are:
|
* The valid values for the highlighting language are:
|
||||||
|
|
||||||
@ -176,6 +184,10 @@ Includes
|
|||||||
string option, only lines that precede the first lines containing that string
|
string option, only lines that precede the first lines containing that string
|
||||||
are included.
|
are included.
|
||||||
|
|
||||||
|
When specifying particular parts of a file to display, it can be useful to
|
||||||
|
display exactly which lines are being presented.
|
||||||
|
This can be done using the ``lineno-match`` option.
|
||||||
|
|
||||||
You can prepend and/or append a line to the included code, using the
|
You can prepend and/or append a line to the included code, using the
|
||||||
``prepend`` and ``append`` option, respectively. This is useful e.g. for
|
``prepend`` and ``append`` option, respectively. This is useful e.g. for
|
||||||
highlighting PHP code that doesn't include the ``<?php``/``?>`` markers.
|
highlighting PHP code that doesn't include the ``<?php``/``?>`` markers.
|
||||||
@ -187,8 +199,8 @@ Includes
|
|||||||
.. literalinclude:: example.py
|
.. literalinclude:: example.py
|
||||||
:diff: example.py.orig
|
:diff: example.py.orig
|
||||||
|
|
||||||
This shows the diff between example.py and example.py.orig with unified diff format.
|
This shows the diff between example.py and example.py.orig with unified diff
|
||||||
|
format.
|
||||||
|
|
||||||
.. versionadded:: 0.4.3
|
.. versionadded:: 0.4.3
|
||||||
The ``encoding`` option.
|
The ``encoding`` option.
|
||||||
@ -199,6 +211,7 @@ Includes
|
|||||||
The ``prepend`` and ``append`` options, as well as ``tab-width``.
|
The ``prepend`` and ``append`` options, as well as ``tab-width``.
|
||||||
.. versionadded:: 1.3
|
.. versionadded:: 1.3
|
||||||
The ``diff`` option.
|
The ``diff`` option.
|
||||||
|
The ``lineno-match`` option.
|
||||||
|
|
||||||
|
|
||||||
Showing a file name
|
Showing a file name
|
||||||
|
@ -12,7 +12,9 @@ They are written as ``:rolename:`content```.
|
|||||||
|
|
||||||
The default role (```content```) has no special meaning by default. You are
|
The default role (```content```) has no special meaning by default. You are
|
||||||
free to use it for anything you like, e.g. variable names; use the
|
free to use it for anything you like, e.g. variable names; use the
|
||||||
:confval:`default_role` config value to set it to a known role.
|
:confval:`default_role` config value to set it to a known role -- the
|
||||||
|
:rst:role:`any` role to find anything or the :rst:role:`py:obj` role to find
|
||||||
|
Python objects are very useful for this.
|
||||||
|
|
||||||
See :ref:`domains` for roles added by domains.
|
See :ref:`domains` for roles added by domains.
|
||||||
|
|
||||||
@ -38,12 +40,57 @@ more versatile:
|
|||||||
|
|
||||||
* If you prefix the content with ``~``, the link text will only be the last
|
* If you prefix the content with ``~``, the link text will only be the last
|
||||||
component of the target. For example, ``:py:meth:`~Queue.Queue.get``` will
|
component of the target. For example, ``:py:meth:`~Queue.Queue.get``` will
|
||||||
refer to ``Queue.Queue.get`` but only display ``get`` as the link text.
|
refer to ``Queue.Queue.get`` but only display ``get`` as the link text. This
|
||||||
|
does not work with all cross-reference roles, but is domain specific.
|
||||||
|
|
||||||
In HTML output, the link's ``title`` attribute (that is e.g. shown as a
|
In HTML output, the link's ``title`` attribute (that is e.g. shown as a
|
||||||
tool-tip on mouse-hover) will always be the full target name.
|
tool-tip on mouse-hover) will always be the full target name.
|
||||||
|
|
||||||
|
|
||||||
|
.. _any-role:
|
||||||
|
|
||||||
|
Cross-referencing anything
|
||||||
|
--------------------------
|
||||||
|
|
||||||
|
.. rst:role:: any
|
||||||
|
|
||||||
|
.. versionadded:: 1.3
|
||||||
|
|
||||||
|
This convenience role tries to do its best to find a valid target for its
|
||||||
|
reference text.
|
||||||
|
|
||||||
|
* First, it tries standard cross-reference targets that would be referenced
|
||||||
|
by :rst:role:`doc`, :rst:role:`ref` or :rst:role:`option`.
|
||||||
|
|
||||||
|
Custom objects added to the standard domain by extensions (see
|
||||||
|
:meth:`.add_object_type`) are also searched.
|
||||||
|
|
||||||
|
* Then, it looks for objects (targets) in all loaded domains. It is up to
|
||||||
|
the domains how specific a match must be. For example, in the Python
|
||||||
|
domain a reference of ``:any:`Builder``` would match the
|
||||||
|
``sphinx.builders.Builder`` class.
|
||||||
|
|
||||||
|
If none or multiple targets are found, a warning will be emitted. In the
|
||||||
|
case of multiple targets, you can change "any" to a specific role.
|
||||||
|
|
||||||
|
This role is a good candidate for setting :confval:`default_role`. If you
|
||||||
|
do, you can write cross-references without a lot of markup overhead. For
|
||||||
|
example, in this Python function documentation ::
|
||||||
|
|
||||||
|
.. function:: install()
|
||||||
|
|
||||||
|
This function installs a `handler` for every signal known by the
|
||||||
|
`signal` module. See the section `about-signals` for more information.
|
||||||
|
|
||||||
|
there could be references to a glossary term (usually ``:term:`handler```), a
|
||||||
|
Python module (usually ``:py:mod:`signal``` or ``:mod:`signal```) and a
|
||||||
|
section (usually ``:ref:`about-signals```).
|
||||||
|
|
||||||
|
The :rst:role:`any` role also works together with the
|
||||||
|
:mod:`~sphinx.ext.intersphinx` extension: when no local cross-reference is
|
||||||
|
found, all object types of intersphinx inventories are also searched.
|
||||||
|
|
||||||
|
|
||||||
Cross-referencing objects
|
Cross-referencing objects
|
||||||
-------------------------
|
-------------------------
|
||||||
|
|
||||||
@ -154,6 +201,24 @@ Referencing downloadable files
|
|||||||
suitable link generated to it.
|
suitable link generated to it.
|
||||||
|
|
||||||
|
|
||||||
|
Cross-referencing figures by figure number
|
||||||
|
------------------------------------------
|
||||||
|
|
||||||
|
.. versionadded:: 1.3
|
||||||
|
|
||||||
|
.. rst:role:: numref
|
||||||
|
|
||||||
|
Link to the specified figures, tables and code-blocks; the standard reST
|
||||||
|
labels are used. When you use this role, it will insert a reference to the
|
||||||
|
figure with link text by its figure number like "Fig. 1.1".
|
||||||
|
|
||||||
|
If an explicit link text is given (like usual: ``:doc:`Image of Sphinx (Fig.
|
||||||
|
#) <my-figure>```), the link caption will be the title of the reference.
|
||||||
|
As a special character, `#` will be replaced to figure number.
|
||||||
|
|
||||||
|
If :confval:`numfig` is ``False``, figures are not numbered.
|
||||||
|
so this role inserts not a reference but labels or link text.
|
||||||
|
|
||||||
Cross-referencing other items of interest
|
Cross-referencing other items of interest
|
||||||
-----------------------------------------
|
-----------------------------------------
|
||||||
|
|
||||||
@ -182,11 +247,12 @@ objects:
|
|||||||
exists.
|
exists.
|
||||||
|
|
||||||
|
|
||||||
The following role creates a cross-reference to the term in the glossary:
|
The following role creates a cross-reference to a term in a
|
||||||
|
:ref:`glossary <glossary-directive>`:
|
||||||
|
|
||||||
.. rst:role:: term
|
.. rst:role:: term
|
||||||
|
|
||||||
Reference to a term in the glossary. The glossary is created using the
|
Reference to a term in a glossary. A glossary is created using the
|
||||||
``glossary`` directive containing a definition list with terms and
|
``glossary`` directive containing a definition list with terms and
|
||||||
definitions. It does not have to be in the same file as the ``term`` markup,
|
definitions. It does not have to be in the same file as the ``term`` markup,
|
||||||
for example the Python docs have one global glossary in the ``glossary.rst``
|
for example the Python docs have one global glossary in the ``glossary.rst``
|
||||||
|
@ -148,6 +148,8 @@ For local tables of contents, use the standard reST :dudir:`contents directive
|
|||||||
<table-of-contents>`.
|
<table-of-contents>`.
|
||||||
|
|
||||||
|
|
||||||
|
.. _glossary-directive:
|
||||||
|
|
||||||
Glossary
|
Glossary
|
||||||
--------
|
--------
|
||||||
|
|
||||||
|
@ -10,6 +10,15 @@ The green arrows designate "more info" links leading to advanced sections about
|
|||||||
the described task.
|
the described task.
|
||||||
|
|
||||||
|
|
||||||
|
Install Sphinx
|
||||||
|
--------------
|
||||||
|
|
||||||
|
Install Sphinx, either from a distribution package or from
|
||||||
|
`PyPI <https://pypi.python.org/pypi/Sphinx>`_ with ::
|
||||||
|
|
||||||
|
$ pip install Sphinx
|
||||||
|
|
||||||
|
|
||||||
Setting up the documentation sources
|
Setting up the documentation sources
|
||||||
------------------------------------
|
------------------------------------
|
||||||
|
|
||||||
|
@ -57,7 +57,7 @@ This will return a dictionary containing the following items:
|
|||||||
* **relbar**: A div containing links to related documents
|
* **relbar**: A div containing links to related documents
|
||||||
* **title**: The title of the document
|
* **title**: The title of the document
|
||||||
* **css**: Links to CSS files used by Sphinx
|
* **css**: Links to CSS files used by Sphinx
|
||||||
* **js**: JavaScript containing comment options
|
* **script**: JavaScript containing comment options
|
||||||
|
|
||||||
This dict can then be used as context for templates. The goal is to be easy to
|
This dict can then be used as context for templates. The goal is to be easy to
|
||||||
integrate with your existing templating system. An example using `Jinja2
|
integrate with your existing templating system. An example using `Jinja2
|
||||||
@ -77,9 +77,9 @@ integrate with your existing templating system. An example using `Jinja2
|
|||||||
<link rel="stylesheet" href="/static/websupport-custom.css" type="text/css">
|
<link rel="stylesheet" href="/static/websupport-custom.css" type="text/css">
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{%- block js %}
|
{%- block script %}
|
||||||
{{ super() }}
|
{{ super() }}
|
||||||
{{ document.js|safe }}
|
{{ document.script|safe }}
|
||||||
{%- endblock %}
|
{%- endblock %}
|
||||||
|
|
||||||
{%- block relbar %}
|
{%- block relbar %}
|
||||||
|
324
ez_setup.py
324
ez_setup.py
@ -1,14 +1,14 @@
|
|||||||
#!python
|
#!/usr/bin/env python
|
||||||
"""Bootstrap setuptools installation
|
"""Bootstrap setuptools installation
|
||||||
|
|
||||||
If you want to use setuptools in your package's setup.py, just include this
|
To use setuptools in your package's setup.py, include this
|
||||||
file in the same directory with it, and add this to the top of your setup.py::
|
file in the same directory and add this to the top of your setup.py::
|
||||||
|
|
||||||
from ez_setup import use_setuptools
|
from ez_setup import use_setuptools
|
||||||
use_setuptools()
|
use_setuptools()
|
||||||
|
|
||||||
If you want to require a specific version of setuptools, set a download
|
To require a specific version of setuptools, set a download
|
||||||
mirror, or use an alternate download directory, you can do so by supplying
|
mirror, or use an alternate download directory, simply supply
|
||||||
the appropriate options to ``use_setuptools()``.
|
the appropriate options to ``use_setuptools()``.
|
||||||
|
|
||||||
This file can also be run as a script to install or upgrade setuptools.
|
This file can also be run as a script to install or upgrade setuptools.
|
||||||
@ -17,50 +17,38 @@ import os
|
|||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import tarfile
|
import zipfile
|
||||||
import optparse
|
import optparse
|
||||||
import subprocess
|
import subprocess
|
||||||
import platform
|
import platform
|
||||||
|
import textwrap
|
||||||
|
import contextlib
|
||||||
|
|
||||||
from distutils import log
|
from distutils import log
|
||||||
|
|
||||||
|
try:
|
||||||
|
from urllib.request import urlopen
|
||||||
|
except ImportError:
|
||||||
|
from urllib2 import urlopen
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from site import USER_SITE
|
from site import USER_SITE
|
||||||
except ImportError:
|
except ImportError:
|
||||||
USER_SITE = None
|
USER_SITE = None
|
||||||
|
|
||||||
DEFAULT_VERSION = "1.1.6"
|
DEFAULT_VERSION = "7.0"
|
||||||
DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
|
DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
|
||||||
|
|
||||||
def _python_cmd(*args):
|
def _python_cmd(*args):
|
||||||
|
"""
|
||||||
|
Return True if the command succeeded.
|
||||||
|
"""
|
||||||
args = (sys.executable,) + args
|
args = (sys.executable,) + args
|
||||||
return subprocess.call(args) == 0
|
return subprocess.call(args) == 0
|
||||||
|
|
||||||
def _check_call_py24(cmd, *args, **kwargs):
|
|
||||||
res = subprocess.call(cmd, *args, **kwargs)
|
|
||||||
class CalledProcessError(Exception):
|
|
||||||
pass
|
|
||||||
if not res == 0:
|
|
||||||
msg = "Command '%s' return non-zero exit status %d" % (cmd, res)
|
|
||||||
raise CalledProcessError(msg)
|
|
||||||
vars(subprocess).setdefault('check_call', _check_call_py24)
|
|
||||||
|
|
||||||
def _install(tarball, install_args=()):
|
|
||||||
# extracting the tarball
|
|
||||||
tmpdir = tempfile.mkdtemp()
|
|
||||||
log.warn('Extracting in %s', tmpdir)
|
|
||||||
old_wd = os.getcwd()
|
|
||||||
try:
|
|
||||||
os.chdir(tmpdir)
|
|
||||||
tar = tarfile.open(tarball)
|
|
||||||
_extractall(tar)
|
|
||||||
tar.close()
|
|
||||||
|
|
||||||
# going in the directory
|
|
||||||
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
|
|
||||||
os.chdir(subdir)
|
|
||||||
log.warn('Now working in %s', subdir)
|
|
||||||
|
|
||||||
|
def _install(archive_filename, install_args=()):
|
||||||
|
with archive_context(archive_filename):
|
||||||
# installing
|
# installing
|
||||||
log.warn('Installing Setuptools')
|
log.warn('Installing Setuptools')
|
||||||
if not _python_cmd('setup.py', 'install', *install_args):
|
if not _python_cmd('setup.py', 'install', *install_args):
|
||||||
@ -68,47 +56,68 @@ def _install(tarball, install_args=()):
|
|||||||
log.warn('See the error message above.')
|
log.warn('See the error message above.')
|
||||||
# exitcode will be 2
|
# exitcode will be 2
|
||||||
return 2
|
return 2
|
||||||
finally:
|
|
||||||
os.chdir(old_wd)
|
|
||||||
shutil.rmtree(tmpdir)
|
|
||||||
|
|
||||||
|
|
||||||
def _build_egg(egg, tarball, to_dir):
|
def _build_egg(egg, archive_filename, to_dir):
|
||||||
# extracting the tarball
|
with archive_context(archive_filename):
|
||||||
tmpdir = tempfile.mkdtemp()
|
|
||||||
log.warn('Extracting in %s', tmpdir)
|
|
||||||
old_wd = os.getcwd()
|
|
||||||
try:
|
|
||||||
os.chdir(tmpdir)
|
|
||||||
tar = tarfile.open(tarball)
|
|
||||||
_extractall(tar)
|
|
||||||
tar.close()
|
|
||||||
|
|
||||||
# going in the directory
|
|
||||||
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
|
|
||||||
os.chdir(subdir)
|
|
||||||
log.warn('Now working in %s', subdir)
|
|
||||||
|
|
||||||
# building an egg
|
# building an egg
|
||||||
log.warn('Building a Setuptools egg in %s', to_dir)
|
log.warn('Building a Setuptools egg in %s', to_dir)
|
||||||
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
|
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
|
||||||
|
|
||||||
finally:
|
|
||||||
os.chdir(old_wd)
|
|
||||||
shutil.rmtree(tmpdir)
|
|
||||||
# returning the result
|
# returning the result
|
||||||
log.warn(egg)
|
log.warn(egg)
|
||||||
if not os.path.exists(egg):
|
if not os.path.exists(egg):
|
||||||
raise IOError('Could not build the egg.')
|
raise IOError('Could not build the egg.')
|
||||||
|
|
||||||
|
|
||||||
|
class ContextualZipFile(zipfile.ZipFile):
|
||||||
|
"""
|
||||||
|
Supplement ZipFile class to support context manager for Python 2.6
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, type, value, traceback):
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
def __new__(cls, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Construct a ZipFile or ContextualZipFile as appropriate
|
||||||
|
"""
|
||||||
|
if hasattr(zipfile.ZipFile, '__exit__'):
|
||||||
|
return zipfile.ZipFile(*args, **kwargs)
|
||||||
|
return super(ContextualZipFile, cls).__new__(cls)
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def archive_context(filename):
|
||||||
|
# extracting the archive
|
||||||
|
tmpdir = tempfile.mkdtemp()
|
||||||
|
log.warn('Extracting in %s', tmpdir)
|
||||||
|
old_wd = os.getcwd()
|
||||||
|
try:
|
||||||
|
os.chdir(tmpdir)
|
||||||
|
with ContextualZipFile(filename) as archive:
|
||||||
|
archive.extractall()
|
||||||
|
|
||||||
|
# going in the directory
|
||||||
|
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
|
||||||
|
os.chdir(subdir)
|
||||||
|
log.warn('Now working in %s', subdir)
|
||||||
|
yield
|
||||||
|
|
||||||
|
finally:
|
||||||
|
os.chdir(old_wd)
|
||||||
|
shutil.rmtree(tmpdir)
|
||||||
|
|
||||||
|
|
||||||
def _do_download(version, download_base, to_dir, download_delay):
|
def _do_download(version, download_base, to_dir, download_delay):
|
||||||
egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg'
|
egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg'
|
||||||
% (version, sys.version_info[0], sys.version_info[1]))
|
% (version, sys.version_info[0], sys.version_info[1]))
|
||||||
if not os.path.exists(egg):
|
if not os.path.exists(egg):
|
||||||
tarball = download_setuptools(version, download_base,
|
archive = download_setuptools(version, download_base,
|
||||||
to_dir, download_delay)
|
to_dir, download_delay)
|
||||||
_build_egg(egg, tarball, to_dir)
|
_build_egg(egg, archive, to_dir)
|
||||||
sys.path.insert(0, egg)
|
sys.path.insert(0, egg)
|
||||||
|
|
||||||
# Remove previously-imported pkg_resources if present (see
|
# Remove previously-imported pkg_resources if present (see
|
||||||
@ -121,11 +130,10 @@ def _do_download(version, download_base, to_dir, download_delay):
|
|||||||
|
|
||||||
|
|
||||||
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
|
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
|
||||||
to_dir=os.curdir, download_delay=15):
|
to_dir=os.curdir, download_delay=15):
|
||||||
# making sure we use the absolute path
|
|
||||||
to_dir = os.path.abspath(to_dir)
|
to_dir = os.path.abspath(to_dir)
|
||||||
was_imported = 'pkg_resources' in sys.modules or \
|
rep_modules = 'pkg_resources', 'setuptools'
|
||||||
'setuptools' in sys.modules
|
imported = set(sys.modules).intersection(rep_modules)
|
||||||
try:
|
try:
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@ -133,23 +141,36 @@ def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
|
|||||||
try:
|
try:
|
||||||
pkg_resources.require("setuptools>=" + version)
|
pkg_resources.require("setuptools>=" + version)
|
||||||
return
|
return
|
||||||
except pkg_resources.VersionConflict:
|
|
||||||
e = sys.exc_info()[1]
|
|
||||||
if was_imported:
|
|
||||||
sys.stderr.write(
|
|
||||||
"The required version of setuptools (>=%s) is not available,\n"
|
|
||||||
"and can't be installed while this script is running. Please\n"
|
|
||||||
"install a more recent version first, using\n"
|
|
||||||
"'easy_install -U setuptools'."
|
|
||||||
"\n\n(Currently using %r)\n" % (version, e.args[0]))
|
|
||||||
sys.exit(2)
|
|
||||||
else:
|
|
||||||
del pkg_resources, sys.modules['pkg_resources'] # reload ok
|
|
||||||
return _do_download(version, download_base, to_dir,
|
|
||||||
download_delay)
|
|
||||||
except pkg_resources.DistributionNotFound:
|
except pkg_resources.DistributionNotFound:
|
||||||
return _do_download(version, download_base, to_dir,
|
return _do_download(version, download_base, to_dir, download_delay)
|
||||||
download_delay)
|
except pkg_resources.VersionConflict as VC_err:
|
||||||
|
if imported:
|
||||||
|
msg = textwrap.dedent("""
|
||||||
|
The required version of setuptools (>={version}) is not available,
|
||||||
|
and can't be installed while this script is running. Please
|
||||||
|
install a more recent version first, using
|
||||||
|
'easy_install -U setuptools'.
|
||||||
|
|
||||||
|
(Currently using {VC_err.args[0]!r})
|
||||||
|
""").format(VC_err=VC_err, version=version)
|
||||||
|
sys.stderr.write(msg)
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
|
# otherwise, reload ok
|
||||||
|
del pkg_resources, sys.modules['pkg_resources']
|
||||||
|
return _do_download(version, download_base, to_dir, download_delay)
|
||||||
|
|
||||||
|
def _clean_check(cmd, target):
|
||||||
|
"""
|
||||||
|
Run the command to download target. If the command fails, clean up before
|
||||||
|
re-raising the error.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
subprocess.check_call(cmd)
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
if os.access(target, os.F_OK):
|
||||||
|
os.unlink(target)
|
||||||
|
raise
|
||||||
|
|
||||||
def download_file_powershell(url, target):
|
def download_file_powershell(url, target):
|
||||||
"""
|
"""
|
||||||
@ -157,61 +178,58 @@ def download_file_powershell(url, target):
|
|||||||
trust). Raise an exception if the command cannot complete.
|
trust). Raise an exception if the command cannot complete.
|
||||||
"""
|
"""
|
||||||
target = os.path.abspath(target)
|
target = os.path.abspath(target)
|
||||||
|
ps_cmd = (
|
||||||
|
"[System.Net.WebRequest]::DefaultWebProxy.Credentials = "
|
||||||
|
"[System.Net.CredentialCache]::DefaultCredentials; "
|
||||||
|
"(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)"
|
||||||
|
% vars()
|
||||||
|
)
|
||||||
cmd = [
|
cmd = [
|
||||||
'powershell',
|
'powershell',
|
||||||
'-Command',
|
'-Command',
|
||||||
"(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(),
|
ps_cmd,
|
||||||
]
|
]
|
||||||
subprocess.check_call(cmd)
|
_clean_check(cmd, target)
|
||||||
|
|
||||||
def has_powershell():
|
def has_powershell():
|
||||||
if platform.system() != 'Windows':
|
if platform.system() != 'Windows':
|
||||||
return False
|
return False
|
||||||
cmd = ['powershell', '-Command', 'echo test']
|
cmd = ['powershell', '-Command', 'echo test']
|
||||||
devnull = open(os.path.devnull, 'wb')
|
with open(os.path.devnull, 'wb') as devnull:
|
||||||
try:
|
|
||||||
try:
|
try:
|
||||||
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
|
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
|
||||||
except:
|
except Exception:
|
||||||
return False
|
return False
|
||||||
finally:
|
|
||||||
devnull.close()
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
download_file_powershell.viable = has_powershell
|
download_file_powershell.viable = has_powershell
|
||||||
|
|
||||||
def download_file_curl(url, target):
|
def download_file_curl(url, target):
|
||||||
cmd = ['curl', url, '--silent', '--output', target]
|
cmd = ['curl', url, '--silent', '--output', target]
|
||||||
subprocess.check_call(cmd)
|
_clean_check(cmd, target)
|
||||||
|
|
||||||
def has_curl():
|
def has_curl():
|
||||||
cmd = ['curl', '--version']
|
cmd = ['curl', '--version']
|
||||||
devnull = open(os.path.devnull, 'wb')
|
with open(os.path.devnull, 'wb') as devnull:
|
||||||
try:
|
|
||||||
try:
|
try:
|
||||||
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
|
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
|
||||||
except:
|
except Exception:
|
||||||
return False
|
return False
|
||||||
finally:
|
|
||||||
devnull.close()
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
download_file_curl.viable = has_curl
|
download_file_curl.viable = has_curl
|
||||||
|
|
||||||
def download_file_wget(url, target):
|
def download_file_wget(url, target):
|
||||||
cmd = ['wget', url, '--quiet', '--output-document', target]
|
cmd = ['wget', url, '--quiet', '--output-document', target]
|
||||||
subprocess.check_call(cmd)
|
_clean_check(cmd, target)
|
||||||
|
|
||||||
def has_wget():
|
def has_wget():
|
||||||
cmd = ['wget', '--version']
|
cmd = ['wget', '--version']
|
||||||
devnull = open(os.path.devnull, 'wb')
|
with open(os.path.devnull, 'wb') as devnull:
|
||||||
try:
|
|
||||||
try:
|
try:
|
||||||
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
|
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
|
||||||
except:
|
except Exception:
|
||||||
return False
|
return False
|
||||||
finally:
|
|
||||||
devnull.close()
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
download_file_wget.viable = has_wget
|
download_file_wget.viable = has_wget
|
||||||
@ -221,45 +239,36 @@ def download_file_insecure(url, target):
|
|||||||
Use Python to download the file, even though it cannot authenticate the
|
Use Python to download the file, even though it cannot authenticate the
|
||||||
connection.
|
connection.
|
||||||
"""
|
"""
|
||||||
|
src = urlopen(url)
|
||||||
try:
|
try:
|
||||||
from urllib.request import urlopen
|
# Read all the data in one block.
|
||||||
except ImportError:
|
|
||||||
from urllib2 import urlopen
|
|
||||||
src = dst = None
|
|
||||||
try:
|
|
||||||
src = urlopen(url)
|
|
||||||
# Read/write all in one block, so we don't create a corrupt file
|
|
||||||
# if the download is interrupted.
|
|
||||||
data = src.read()
|
data = src.read()
|
||||||
dst = open(target, "wb")
|
|
||||||
dst.write(data)
|
|
||||||
finally:
|
finally:
|
||||||
if src:
|
src.close()
|
||||||
src.close()
|
|
||||||
if dst:
|
# Write all the data in one block to avoid creating a partial file.
|
||||||
dst.close()
|
with open(target, "wb") as dst:
|
||||||
|
dst.write(data)
|
||||||
|
|
||||||
download_file_insecure.viable = lambda: True
|
download_file_insecure.viable = lambda: True
|
||||||
|
|
||||||
def get_best_downloader():
|
def get_best_downloader():
|
||||||
downloaders = [
|
downloaders = (
|
||||||
download_file_powershell,
|
download_file_powershell,
|
||||||
download_file_curl,
|
download_file_curl,
|
||||||
download_file_wget,
|
download_file_wget,
|
||||||
download_file_insecure,
|
download_file_insecure,
|
||||||
]
|
)
|
||||||
|
viable_downloaders = (dl for dl in downloaders if dl.viable())
|
||||||
for dl in downloaders:
|
return next(viable_downloaders, None)
|
||||||
if dl.viable():
|
|
||||||
return dl
|
|
||||||
|
|
||||||
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
|
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
|
||||||
to_dir=os.curdir, delay=15,
|
to_dir=os.curdir, delay=15, downloader_factory=get_best_downloader):
|
||||||
downloader_factory=get_best_downloader):
|
"""
|
||||||
"""Download setuptools from a specified location and return its filename
|
Download setuptools from a specified location and return its filename
|
||||||
|
|
||||||
`version` should be a valid setuptools version number that is available
|
`version` should be a valid setuptools version number that is available
|
||||||
as an egg for download under the `download_base` URL (which should end
|
as an sdist for download under the `download_base` URL (which should end
|
||||||
with a '/'). `to_dir` is the directory where the egg will be downloaded.
|
with a '/'). `to_dir` is the directory where the egg will be downloaded.
|
||||||
`delay` is the number of seconds to pause before an actual download
|
`delay` is the number of seconds to pause before an actual download
|
||||||
attempt.
|
attempt.
|
||||||
@ -269,74 +278,20 @@ def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
|
|||||||
"""
|
"""
|
||||||
# making sure we use the absolute path
|
# making sure we use the absolute path
|
||||||
to_dir = os.path.abspath(to_dir)
|
to_dir = os.path.abspath(to_dir)
|
||||||
tgz_name = "setuptools-%s.tar.gz" % version
|
zip_name = "setuptools-%s.zip" % version
|
||||||
url = download_base + tgz_name
|
url = download_base + zip_name
|
||||||
saveto = os.path.join(to_dir, tgz_name)
|
saveto = os.path.join(to_dir, zip_name)
|
||||||
if not os.path.exists(saveto): # Avoid repeated downloads
|
if not os.path.exists(saveto): # Avoid repeated downloads
|
||||||
log.warn("Downloading %s", url)
|
log.warn("Downloading %s", url)
|
||||||
downloader = downloader_factory()
|
downloader = downloader_factory()
|
||||||
downloader(url, saveto)
|
downloader(url, saveto)
|
||||||
return os.path.realpath(saveto)
|
return os.path.realpath(saveto)
|
||||||
|
|
||||||
|
|
||||||
def _extractall(self, path=".", members=None):
|
|
||||||
"""Extract all members from the archive to the current working
|
|
||||||
directory and set owner, modification time and permissions on
|
|
||||||
directories afterwards. `path' specifies a different directory
|
|
||||||
to extract to. `members' is optional and must be a subset of the
|
|
||||||
list returned by getmembers().
|
|
||||||
"""
|
|
||||||
import copy
|
|
||||||
import operator
|
|
||||||
from tarfile import ExtractError
|
|
||||||
directories = []
|
|
||||||
|
|
||||||
if members is None:
|
|
||||||
members = self
|
|
||||||
|
|
||||||
for tarinfo in members:
|
|
||||||
if tarinfo.isdir():
|
|
||||||
# Extract directories with a safe mode.
|
|
||||||
directories.append(tarinfo)
|
|
||||||
tarinfo = copy.copy(tarinfo)
|
|
||||||
tarinfo.mode = 448 # decimal for oct 0700
|
|
||||||
self.extract(tarinfo, path)
|
|
||||||
|
|
||||||
# Reverse sort directories.
|
|
||||||
if sys.version_info < (2, 4):
|
|
||||||
def sorter(dir1, dir2):
|
|
||||||
return cmp(dir1.name, dir2.name)
|
|
||||||
directories.sort(sorter)
|
|
||||||
directories.reverse()
|
|
||||||
else:
|
|
||||||
directories.sort(key=operator.attrgetter('name'), reverse=True)
|
|
||||||
|
|
||||||
# Set correct owner, mtime and filemode on directories.
|
|
||||||
for tarinfo in directories:
|
|
||||||
dirpath = os.path.join(path, tarinfo.name)
|
|
||||||
try:
|
|
||||||
self.chown(tarinfo, dirpath)
|
|
||||||
self.utime(tarinfo, dirpath)
|
|
||||||
self.chmod(tarinfo, dirpath)
|
|
||||||
except ExtractError:
|
|
||||||
e = sys.exc_info()[1]
|
|
||||||
if self.errorlevel > 1:
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
self._dbg(1, "tarfile: %s" % e)
|
|
||||||
|
|
||||||
|
|
||||||
def _build_install_args(options):
|
def _build_install_args(options):
|
||||||
"""
|
"""
|
||||||
Build the arguments to 'python setup.py install' on the setuptools package
|
Build the arguments to 'python setup.py install' on the setuptools package
|
||||||
"""
|
"""
|
||||||
install_args = []
|
return ['--user'] if options.user_install else []
|
||||||
if options.user_install:
|
|
||||||
if sys.version_info < (2, 6):
|
|
||||||
log.warn("--user requires Python 2.6 or later")
|
|
||||||
raise SystemExit(1)
|
|
||||||
install_args.append('--user')
|
|
||||||
return install_args
|
|
||||||
|
|
||||||
def _parse_args():
|
def _parse_args():
|
||||||
"""
|
"""
|
||||||
@ -355,16 +310,23 @@ def _parse_args():
|
|||||||
const=lambda: download_file_insecure, default=get_best_downloader,
|
const=lambda: download_file_insecure, default=get_best_downloader,
|
||||||
help='Use internal, non-validating downloader'
|
help='Use internal, non-validating downloader'
|
||||||
)
|
)
|
||||||
|
parser.add_option(
|
||||||
|
'--version', help="Specify which version to download",
|
||||||
|
default=DEFAULT_VERSION,
|
||||||
|
)
|
||||||
options, args = parser.parse_args()
|
options, args = parser.parse_args()
|
||||||
# positional arguments are ignored
|
# positional arguments are ignored
|
||||||
return options
|
return options
|
||||||
|
|
||||||
def main(version=DEFAULT_VERSION):
|
def main():
|
||||||
"""Install or upgrade setuptools and EasyInstall"""
|
"""Install or upgrade setuptools and EasyInstall"""
|
||||||
options = _parse_args()
|
options = _parse_args()
|
||||||
tarball = download_setuptools(download_base=options.download_base,
|
archive = download_setuptools(
|
||||||
downloader_factory=options.downloader_factory)
|
version=options.version,
|
||||||
return _install(tarball, _build_install_args(options))
|
download_base=options.download_base,
|
||||||
|
downloader_factory=options.downloader_factory,
|
||||||
|
)
|
||||||
|
return _install(archive, _build_install_args(options))
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
|
@ -19,3 +19,6 @@ output_dir = sphinx/locale/
|
|||||||
[compile_catalog]
|
[compile_catalog]
|
||||||
domain = sphinx
|
domain = sphinx
|
||||||
directory = sphinx/locale/
|
directory = sphinx/locale/
|
||||||
|
|
||||||
|
[wheel]
|
||||||
|
universal = 1
|
||||||
|
18
setup.py
18
setup.py
@ -48,12 +48,19 @@ if sys.version_info < (2, 6) or (3, 0) <= sys.version_info < (3, 3):
|
|||||||
requires = [
|
requires = [
|
||||||
'six>=1.4',
|
'six>=1.4',
|
||||||
'Jinja2>=2.3',
|
'Jinja2>=2.3',
|
||||||
'Pygments>=1.2',
|
'Pygments>=2.0',
|
||||||
'docutils>=0.10',
|
'docutils>=0.11',
|
||||||
'snowballstemmer>=1.1',
|
'snowballstemmer>=1.1',
|
||||||
'babel',
|
'babel',
|
||||||
]
|
]
|
||||||
|
extras_require = {
|
||||||
|
# Environment Marker works for wheel 0.24 or later
|
||||||
|
':sys_platform=="win32"': [
|
||||||
|
'colorama',
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
# for sdist installation with pip-1.5.6
|
||||||
if sys.platform == 'win32':
|
if sys.platform == 'win32':
|
||||||
requires.append('colorama')
|
requires.append('colorama')
|
||||||
|
|
||||||
@ -174,12 +181,16 @@ setup(
|
|||||||
'Programming Language :: Python',
|
'Programming Language :: Python',
|
||||||
'Programming Language :: Python :: 2',
|
'Programming Language :: Python :: 2',
|
||||||
'Programming Language :: Python :: 3',
|
'Programming Language :: Python :: 3',
|
||||||
|
'Framework :: Sphinx',
|
||||||
|
'Framework :: Sphinx :: Extension',
|
||||||
|
'Framework :: Sphinx :: Theme',
|
||||||
'Topic :: Documentation',
|
'Topic :: Documentation',
|
||||||
|
'Topic :: Documentation :: Sphinx',
|
||||||
'Topic :: Text Processing',
|
'Topic :: Text Processing',
|
||||||
'Topic :: Utilities',
|
'Topic :: Utilities',
|
||||||
],
|
],
|
||||||
platforms='any',
|
platforms='any',
|
||||||
packages=find_packages(exclude=['test']),
|
packages=find_packages(exclude=['tests']),
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
entry_points={
|
entry_points={
|
||||||
'console_scripts': [
|
'console_scripts': [
|
||||||
@ -193,5 +204,6 @@ setup(
|
|||||||
],
|
],
|
||||||
},
|
},
|
||||||
install_requires=requires,
|
install_requires=requires,
|
||||||
|
extras_require=extras_require,
|
||||||
cmdclass=cmdclass,
|
cmdclass=cmdclass,
|
||||||
)
|
)
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
Sphinx - Python documentation toolchain
|
Sphinx - Python documentation toolchain
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
Sphinx - Python documentation toolchain
|
Sphinx - Python documentation toolchain
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
Sphinx - Python documentation toolchain
|
Sphinx - Python documentation toolchain
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
Sphinx - Python documentation toolchain
|
Sphinx - Python documentation toolchain
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
The Sphinx documentation toolchain.
|
The Sphinx documentation toolchain.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -15,12 +15,12 @@
|
|||||||
import sys
|
import sys
|
||||||
from os import path
|
from os import path
|
||||||
|
|
||||||
__version__ = '1.3a0'
|
__version__ = '1.3b3'
|
||||||
__released__ = '1.3a0' # used when Sphinx builds its own docs
|
__released__ = '1.3b3' # used when Sphinx builds its own docs
|
||||||
# version info for better programmatic use
|
# version info for better programmatic use
|
||||||
# possible values for 3rd element: 'alpha', 'beta', 'rc', 'final'
|
# possible values for 3rd element: 'alpha', 'beta', 'rc', 'final'
|
||||||
# 'final' has 0 as the last element
|
# 'final' has 0 as the last element
|
||||||
version_info = (1, 3, 0, 'alpha', 0)
|
version_info = (1, 3, 0, 'beta', 3)
|
||||||
|
|
||||||
package_dir = path.abspath(path.dirname(__file__))
|
package_dir = path.abspath(path.dirname(__file__))
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
The Sphinx documentation toolchain.
|
The Sphinx documentation toolchain.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
import sys
|
import sys
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
Additional docutils nodes.
|
Additional docutils nodes.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -25,6 +25,7 @@ class desc(nodes.Admonition, nodes.Element):
|
|||||||
contains one or more ``desc_signature`` and a ``desc_content``.
|
contains one or more ``desc_signature`` and a ``desc_content``.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class desc_signature(nodes.Part, nodes.Inline, nodes.TextElement):
|
class desc_signature(nodes.Part, nodes.Inline, nodes.TextElement):
|
||||||
"""Node for object signatures.
|
"""Node for object signatures.
|
||||||
|
|
||||||
@ -39,33 +40,42 @@ class desc_addname(nodes.Part, nodes.Inline, nodes.TextElement):
|
|||||||
# compatibility alias
|
# compatibility alias
|
||||||
desc_classname = desc_addname
|
desc_classname = desc_addname
|
||||||
|
|
||||||
|
|
||||||
class desc_type(nodes.Part, nodes.Inline, nodes.TextElement):
|
class desc_type(nodes.Part, nodes.Inline, nodes.TextElement):
|
||||||
"""Node for return types or object type names."""
|
"""Node for return types or object type names."""
|
||||||
|
|
||||||
|
|
||||||
class desc_returns(desc_type):
|
class desc_returns(desc_type):
|
||||||
"""Node for a "returns" annotation (a la -> in Python)."""
|
"""Node for a "returns" annotation (a la -> in Python)."""
|
||||||
def astext(self):
|
def astext(self):
|
||||||
return ' -> ' + nodes.TextElement.astext(self)
|
return ' -> ' + nodes.TextElement.astext(self)
|
||||||
|
|
||||||
|
|
||||||
class desc_name(nodes.Part, nodes.Inline, nodes.TextElement):
|
class desc_name(nodes.Part, nodes.Inline, nodes.TextElement):
|
||||||
"""Node for the main object name."""
|
"""Node for the main object name."""
|
||||||
|
|
||||||
|
|
||||||
class desc_parameterlist(nodes.Part, nodes.Inline, nodes.TextElement):
|
class desc_parameterlist(nodes.Part, nodes.Inline, nodes.TextElement):
|
||||||
"""Node for a general parameter list."""
|
"""Node for a general parameter list."""
|
||||||
child_text_separator = ', '
|
child_text_separator = ', '
|
||||||
|
|
||||||
|
|
||||||
class desc_parameter(nodes.Part, nodes.Inline, nodes.TextElement):
|
class desc_parameter(nodes.Part, nodes.Inline, nodes.TextElement):
|
||||||
"""Node for a single parameter."""
|
"""Node for a single parameter."""
|
||||||
|
|
||||||
|
|
||||||
class desc_optional(nodes.Part, nodes.Inline, nodes.TextElement):
|
class desc_optional(nodes.Part, nodes.Inline, nodes.TextElement):
|
||||||
"""Node for marking optional parts of the parameter list."""
|
"""Node for marking optional parts of the parameter list."""
|
||||||
child_text_separator = ', '
|
child_text_separator = ', '
|
||||||
|
|
||||||
def astext(self):
|
def astext(self):
|
||||||
return '[' + nodes.TextElement.astext(self) + ']'
|
return '[' + nodes.TextElement.astext(self) + ']'
|
||||||
|
|
||||||
|
|
||||||
class desc_annotation(nodes.Part, nodes.Inline, nodes.TextElement):
|
class desc_annotation(nodes.Part, nodes.Inline, nodes.TextElement):
|
||||||
"""Node for signature annotations (not Python 3-style annotations)."""
|
"""Node for signature annotations (not Python 3-style annotations)."""
|
||||||
|
|
||||||
|
|
||||||
class desc_content(nodes.General, nodes.Element):
|
class desc_content(nodes.General, nodes.Element):
|
||||||
"""Node for object description content.
|
"""Node for object description content.
|
||||||
|
|
||||||
@ -82,15 +92,18 @@ class versionmodified(nodes.Admonition, nodes.TextElement):
|
|||||||
directives.
|
directives.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class seealso(nodes.Admonition, nodes.Element):
|
class seealso(nodes.Admonition, nodes.Element):
|
||||||
"""Custom "see also" admonition."""
|
"""Custom "see also" admonition."""
|
||||||
|
|
||||||
|
|
||||||
class productionlist(nodes.Admonition, nodes.Element):
|
class productionlist(nodes.Admonition, nodes.Element):
|
||||||
"""Node for grammar production lists.
|
"""Node for grammar production lists.
|
||||||
|
|
||||||
Contains ``production`` nodes.
|
Contains ``production`` nodes.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class production(nodes.Part, nodes.Inline, nodes.TextElement):
|
class production(nodes.Part, nodes.Inline, nodes.TextElement):
|
||||||
"""Node for a single grammar production rule."""
|
"""Node for a single grammar production rule."""
|
||||||
|
|
||||||
@ -107,26 +120,33 @@ class index(nodes.Invisible, nodes.Inline, nodes.TextElement):
|
|||||||
*entrytype* is one of "single", "pair", "double", "triple".
|
*entrytype* is one of "single", "pair", "double", "triple".
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class centered(nodes.Part, nodes.Element):
|
|
||||||
|
class centered(nodes.Part, nodes.TextElement):
|
||||||
"""Deprecated."""
|
"""Deprecated."""
|
||||||
|
|
||||||
|
|
||||||
class acks(nodes.Element):
|
class acks(nodes.Element):
|
||||||
"""Special node for "acks" lists."""
|
"""Special node for "acks" lists."""
|
||||||
|
|
||||||
|
|
||||||
class hlist(nodes.Element):
|
class hlist(nodes.Element):
|
||||||
"""Node for "horizontal lists", i.e. lists that should be compressed to
|
"""Node for "horizontal lists", i.e. lists that should be compressed to
|
||||||
take up less vertical space.
|
take up less vertical space.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class hlistcol(nodes.Element):
|
class hlistcol(nodes.Element):
|
||||||
"""Node for one column in a horizontal list."""
|
"""Node for one column in a horizontal list."""
|
||||||
|
|
||||||
|
|
||||||
class compact_paragraph(nodes.paragraph):
|
class compact_paragraph(nodes.paragraph):
|
||||||
"""Node for a compact paragraph (which never makes a <p> node)."""
|
"""Node for a compact paragraph (which never makes a <p> node)."""
|
||||||
|
|
||||||
|
|
||||||
class glossary(nodes.Element):
|
class glossary(nodes.Element):
|
||||||
"""Node to insert a glossary."""
|
"""Node to insert a glossary."""
|
||||||
|
|
||||||
|
|
||||||
class only(nodes.Element):
|
class only(nodes.Element):
|
||||||
"""Node for "only" directives (conditional inclusion based on tags)."""
|
"""Node for "only" directives (conditional inclusion based on tags)."""
|
||||||
|
|
||||||
@ -136,14 +156,17 @@ class only(nodes.Element):
|
|||||||
class start_of_file(nodes.Element):
|
class start_of_file(nodes.Element):
|
||||||
"""Node to mark start of a new file, used in the LaTeX builder only."""
|
"""Node to mark start of a new file, used in the LaTeX builder only."""
|
||||||
|
|
||||||
|
|
||||||
class highlightlang(nodes.Element):
|
class highlightlang(nodes.Element):
|
||||||
"""Inserted to set the highlight language and line number options for
|
"""Inserted to set the highlight language and line number options for
|
||||||
subsequent code blocks.
|
subsequent code blocks.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class tabular_col_spec(nodes.Element):
|
class tabular_col_spec(nodes.Element):
|
||||||
"""Node for specifying tabular columns, used for LaTeX output."""
|
"""Node for specifying tabular columns, used for LaTeX output."""
|
||||||
|
|
||||||
|
|
||||||
class meta(nodes.Special, nodes.PreBibliographic, nodes.Element):
|
class meta(nodes.Special, nodes.PreBibliographic, nodes.Element):
|
||||||
"""Node for meta directive -- same as docutils' standard meta node,
|
"""Node for meta directive -- same as docutils' standard meta node,
|
||||||
but pickleable.
|
but pickleable.
|
||||||
@ -160,22 +183,31 @@ class pending_xref(nodes.Inline, nodes.Element):
|
|||||||
BuildEnvironment.resolve_references.
|
BuildEnvironment.resolve_references.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class number_reference(nodes.reference):
|
||||||
|
"""Node for number references, similar to pending_xref."""
|
||||||
|
|
||||||
|
|
||||||
class download_reference(nodes.reference):
|
class download_reference(nodes.reference):
|
||||||
"""Node for download references, similar to pending_xref."""
|
"""Node for download references, similar to pending_xref."""
|
||||||
|
|
||||||
|
|
||||||
class literal_emphasis(nodes.emphasis):
|
class literal_emphasis(nodes.emphasis):
|
||||||
"""Node that behaves like `emphasis`, but further text processors are not
|
"""Node that behaves like `emphasis`, but further text processors are not
|
||||||
applied (e.g. smartypants for HTML output).
|
applied (e.g. smartypants for HTML output).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class literal_strong(nodes.strong):
|
class literal_strong(nodes.strong):
|
||||||
"""Node that behaves like `strong`, but further text processors are not
|
"""Node that behaves like `strong`, but further text processors are not
|
||||||
applied (e.g. smartypants for HTML output).
|
applied (e.g. smartypants for HTML output).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class abbreviation(nodes.Inline, nodes.TextElement):
|
class abbreviation(nodes.Inline, nodes.TextElement):
|
||||||
"""Node for abbreviations with explanations."""
|
"""Node for abbreviations with explanations."""
|
||||||
|
|
||||||
|
|
||||||
class termsep(nodes.Structural, nodes.Element):
|
class termsep(nodes.Structural, nodes.Element):
|
||||||
"""Separates two terms within a <term> node."""
|
"""Separates two terms within a <term> node."""
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@
|
|||||||
Copyright 2008 Société des arts technologiques (SAT),
|
Copyright 2008 Société des arts technologiques (SAT),
|
||||||
http://www.sat.qc.ca/
|
http://www.sat.qc.ca/
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
@ -88,7 +88,7 @@ def create_module_file(package, module, opts):
|
|||||||
text = format_heading(1, '%s module' % module)
|
text = format_heading(1, '%s module' % module)
|
||||||
else:
|
else:
|
||||||
text = ''
|
text = ''
|
||||||
#text += format_heading(2, ':mod:`%s` Module' % module)
|
# text += format_heading(2, ':mod:`%s` Module' % module)
|
||||||
text += format_directive(module, package)
|
text += format_directive(module, package)
|
||||||
write_file(makename(package, module), text, opts)
|
write_file(makename(package, module), text, opts)
|
||||||
|
|
||||||
@ -173,7 +173,7 @@ def shall_skip(module, opts):
|
|||||||
# skip if it has a "private" name and this is selected
|
# skip if it has a "private" name and this is selected
|
||||||
filename = path.basename(module)
|
filename = path.basename(module)
|
||||||
if filename != '__init__.py' and filename.startswith('_') and \
|
if filename != '__init__.py' and filename.startswith('_') and \
|
||||||
not opts.includeprivate:
|
not opts.includeprivate:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -218,7 +218,7 @@ def recurse_tree(rootpath, excludes, opts):
|
|||||||
if is_pkg:
|
if is_pkg:
|
||||||
# we are in a package with something to document
|
# we are in a package with something to document
|
||||||
if subs or len(py_files) > 1 or not \
|
if subs or len(py_files) > 1 or not \
|
||||||
shall_skip(path.join(root, INITPY), opts):
|
shall_skip(path.join(root, INITPY), opts):
|
||||||
subpackage = root[len(rootpath):].lstrip(path.sep).\
|
subpackage = root[len(rootpath):].lstrip(path.sep).\
|
||||||
replace(path.sep, '.')
|
replace(path.sep, '.')
|
||||||
create_package_file(root, root_package, subpackage,
|
create_package_file(root, root_package, subpackage,
|
||||||
@ -318,7 +318,7 @@ Note: By default this script will not overwrite already created files.""")
|
|||||||
(opts, args) = parser.parse_args(argv[1:])
|
(opts, args) = parser.parse_args(argv[1:])
|
||||||
|
|
||||||
if opts.show_version:
|
if opts.show_version:
|
||||||
print('Sphinx (sphinx-apidoc) %s' % __version__)
|
print('Sphinx (sphinx-apidoc) %s' % __version__)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
if not args:
|
if not args:
|
||||||
@ -367,6 +367,7 @@ Note: By default this script will not overwrite already created files.""")
|
|||||||
batchfile = True,
|
batchfile = True,
|
||||||
mastertocmaxdepth = opts.maxdepth,
|
mastertocmaxdepth = opts.maxdepth,
|
||||||
mastertoctree = text,
|
mastertoctree = text,
|
||||||
|
language = 'en',
|
||||||
)
|
)
|
||||||
if not opts.dryrun:
|
if not opts.dryrun:
|
||||||
qs.generate(d, silent=True, overwrite=opts.force)
|
qs.generate(d, silent=True, overwrite=opts.force)
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
|
|
||||||
Gracefully adapted from the TextPress system by Armin.
|
Gracefully adapted from the TextPress system by Armin.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
@ -18,19 +18,20 @@ import types
|
|||||||
import posixpath
|
import posixpath
|
||||||
import traceback
|
import traceback
|
||||||
from os import path
|
from os import path
|
||||||
|
from collections import deque
|
||||||
|
|
||||||
from six import iteritems, itervalues
|
from six import iteritems, itervalues, text_type
|
||||||
from six.moves import cStringIO
|
from six.moves import cStringIO
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from docutils.parsers.rst import convert_directive_function, \
|
from docutils.parsers.rst import convert_directive_function, \
|
||||||
directives, roles
|
directives, roles
|
||||||
|
|
||||||
import sphinx
|
import sphinx
|
||||||
from sphinx import package_dir, locale
|
from sphinx import package_dir, locale
|
||||||
from sphinx.roles import XRefRole
|
from sphinx.roles import XRefRole
|
||||||
from sphinx.config import Config
|
from sphinx.config import Config
|
||||||
from sphinx.errors import SphinxError, SphinxWarning, ExtensionError, \
|
from sphinx.errors import SphinxError, SphinxWarning, ExtensionError, \
|
||||||
VersionRequirementError, ConfigError
|
VersionRequirementError, ConfigError
|
||||||
from sphinx.domains import ObjType, BUILTIN_DOMAINS
|
from sphinx.domains import ObjType, BUILTIN_DOMAINS
|
||||||
from sphinx.domains.std import GenericObject, Target, StandardDomain
|
from sphinx.domains.std import GenericObject, Target, StandardDomain
|
||||||
from sphinx.builders import BUILTIN_BUILDERS
|
from sphinx.builders import BUILTIN_BUILDERS
|
||||||
@ -38,7 +39,8 @@ from sphinx.environment import BuildEnvironment, SphinxStandaloneReader
|
|||||||
from sphinx.util import pycompat # imported for side-effects
|
from sphinx.util import pycompat # imported for side-effects
|
||||||
from sphinx.util.tags import Tags
|
from sphinx.util.tags import Tags
|
||||||
from sphinx.util.osutil import ENOENT
|
from sphinx.util.osutil import ENOENT
|
||||||
from sphinx.util.console import bold, lightgray, darkgray
|
from sphinx.util.console import bold, lightgray, darkgray, darkgreen, \
|
||||||
|
term_width_line
|
||||||
|
|
||||||
if hasattr(sys, 'intern'):
|
if hasattr(sys, 'intern'):
|
||||||
intern = sys.intern
|
intern = sys.intern
|
||||||
@ -48,8 +50,10 @@ events = {
|
|||||||
'builder-inited': '',
|
'builder-inited': '',
|
||||||
'env-get-outdated': 'env, added, changed, removed',
|
'env-get-outdated': 'env, added, changed, removed',
|
||||||
'env-purge-doc': 'env, docname',
|
'env-purge-doc': 'env, docname',
|
||||||
|
'env-before-read-docs': 'env, docnames',
|
||||||
'source-read': 'docname, source text',
|
'source-read': 'docname, source text',
|
||||||
'doctree-read': 'the doctree before being pickled',
|
'doctree-read': 'the doctree before being pickled',
|
||||||
|
'env-merge-info': 'env, read docnames, other env instance',
|
||||||
'missing-reference': 'env, node, contnode',
|
'missing-reference': 'env, node, contnode',
|
||||||
'doctree-resolved': 'doctree, docname',
|
'doctree-resolved': 'doctree, docname',
|
||||||
'env-updated': 'env',
|
'env-updated': 'env',
|
||||||
@ -71,7 +75,7 @@ class Sphinx(object):
|
|||||||
self.verbosity = verbosity
|
self.verbosity = verbosity
|
||||||
self.next_listener_id = 0
|
self.next_listener_id = 0
|
||||||
self._extensions = {}
|
self._extensions = {}
|
||||||
self._extension_versions = {}
|
self._extension_metadata = {}
|
||||||
self._listeners = {}
|
self._listeners = {}
|
||||||
self.domains = BUILTIN_DOMAINS.copy()
|
self.domains = BUILTIN_DOMAINS.copy()
|
||||||
self.builderclasses = BUILTIN_BUILDERS.copy()
|
self.builderclasses = BUILTIN_BUILDERS.copy()
|
||||||
@ -102,17 +106,25 @@ class Sphinx(object):
|
|||||||
self._events = events.copy()
|
self._events = events.copy()
|
||||||
self._translators = {}
|
self._translators = {}
|
||||||
|
|
||||||
|
# keep last few messages for traceback
|
||||||
|
self.messagelog = deque(maxlen=10)
|
||||||
|
|
||||||
# say hello to the world
|
# say hello to the world
|
||||||
self.info(bold('Running Sphinx v%s' % sphinx.__version__))
|
self.info(bold('Running Sphinx v%s' % sphinx.__version__))
|
||||||
|
|
||||||
# status code for command-line application
|
# status code for command-line application
|
||||||
self.statuscode = 0
|
self.statuscode = 0
|
||||||
|
|
||||||
|
if not path.isdir(outdir):
|
||||||
|
self.info('making output directory...')
|
||||||
|
os.makedirs(outdir)
|
||||||
|
|
||||||
# read config
|
# read config
|
||||||
self.tags = Tags(tags)
|
self.tags = Tags(tags)
|
||||||
self.config = Config(confdir, CONFIG_FILENAME,
|
self.config = Config(confdir, CONFIG_FILENAME,
|
||||||
confoverrides or {}, self.tags)
|
confoverrides or {}, self.tags)
|
||||||
self.config.check_unicode(self.warn)
|
self.config.check_unicode(self.warn)
|
||||||
|
# defer checking types until i18n has been initialized
|
||||||
|
|
||||||
# set confdir to srcdir if -C given (!= no confdir); a few pieces
|
# set confdir to srcdir if -C given (!= no confdir); a few pieces
|
||||||
# of code expect a confdir to be set
|
# of code expect a confdir to be set
|
||||||
@ -124,7 +136,7 @@ class Sphinx(object):
|
|||||||
self.setup_extension(extension)
|
self.setup_extension(extension)
|
||||||
# the config file itself can be an extension
|
# the config file itself can be an extension
|
||||||
if self.config.setup:
|
if self.config.setup:
|
||||||
# py31 doesn't have 'callable' function for bellow check
|
# py31 doesn't have 'callable' function for below check
|
||||||
if hasattr(self.config.setup, '__call__'):
|
if hasattr(self.config.setup, '__call__'):
|
||||||
self.config.setup(self)
|
self.config.setup(self)
|
||||||
else:
|
else:
|
||||||
@ -152,7 +164,7 @@ class Sphinx(object):
|
|||||||
'version requirement for extension %s, but it is '
|
'version requirement for extension %s, but it is '
|
||||||
'not loaded' % extname)
|
'not loaded' % extname)
|
||||||
continue
|
continue
|
||||||
has_ver = self._extension_versions[extname]
|
has_ver = self._extension_metadata[extname]['version']
|
||||||
if has_ver == 'unknown version' or needs_ver > has_ver:
|
if has_ver == 'unknown version' or needs_ver > has_ver:
|
||||||
raise VersionRequirementError(
|
raise VersionRequirementError(
|
||||||
'This project needs the extension %s at least in '
|
'This project needs the extension %s at least in '
|
||||||
@ -161,6 +173,8 @@ class Sphinx(object):
|
|||||||
|
|
||||||
# set up translation infrastructure
|
# set up translation infrastructure
|
||||||
self._init_i18n()
|
self._init_i18n()
|
||||||
|
# check all configuration values for permissible types
|
||||||
|
self.config.check_types(self.warn)
|
||||||
# set up the build environment
|
# set up the build environment
|
||||||
self._init_env(freshenv)
|
self._init_env(freshenv)
|
||||||
# set up the builder
|
# set up the builder
|
||||||
@ -196,15 +210,15 @@ class Sphinx(object):
|
|||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
self.info(bold('loading pickled environment... '), nonl=True)
|
self.info(bold('loading pickled environment... '), nonl=True)
|
||||||
self.env = BuildEnvironment.frompickle(self.config,
|
self.env = BuildEnvironment.frompickle(
|
||||||
path.join(self.doctreedir, ENV_PICKLE_FILENAME))
|
self.config, path.join(self.doctreedir, ENV_PICKLE_FILENAME))
|
||||||
self.env.domains = {}
|
self.env.domains = {}
|
||||||
for domain in self.domains.keys():
|
for domain in self.domains.keys():
|
||||||
# this can raise if the data version doesn't fit
|
# this can raise if the data version doesn't fit
|
||||||
self.env.domains[domain] = self.domains[domain](self.env)
|
self.env.domains[domain] = self.domains[domain](self.env)
|
||||||
self.info('done')
|
self.info('done')
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
if type(err) is IOError and err.errno == ENOENT:
|
if isinstance(err, IOError) and err.errno == ENOENT:
|
||||||
self.info('not yet created')
|
self.info('not yet created')
|
||||||
else:
|
else:
|
||||||
self.info('failed: %s' % err)
|
self.info('failed: %s' % err)
|
||||||
@ -241,6 +255,15 @@ class Sphinx(object):
|
|||||||
else:
|
else:
|
||||||
self.builder.compile_update_catalogs()
|
self.builder.compile_update_catalogs()
|
||||||
self.builder.build_update()
|
self.builder.build_update()
|
||||||
|
|
||||||
|
status = (self.statuscode == 0
|
||||||
|
and 'succeeded' or 'finished with problems')
|
||||||
|
if self._warncount:
|
||||||
|
self.info(bold('build %s, %s warning%s.' %
|
||||||
|
(status, self._warncount,
|
||||||
|
self._warncount != 1 and 's' or '')))
|
||||||
|
else:
|
||||||
|
self.info(bold('build %s.' % status))
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
# delete the saved env to force a fresh build next time
|
# delete the saved env to force a fresh build next time
|
||||||
envfile = path.join(self.doctreedir, ENV_PICKLE_FILENAME)
|
envfile = path.join(self.doctreedir, ENV_PICKLE_FILENAME)
|
||||||
@ -264,6 +287,7 @@ class Sphinx(object):
|
|||||||
wfile.write('\n')
|
wfile.write('\n')
|
||||||
if hasattr(wfile, 'flush'):
|
if hasattr(wfile, 'flush'):
|
||||||
wfile.flush()
|
wfile.flush()
|
||||||
|
self.messagelog.append(message)
|
||||||
|
|
||||||
def warn(self, message, location=None, prefix='WARNING: '):
|
def warn(self, message, location=None, prefix='WARNING: '):
|
||||||
"""Emit a warning.
|
"""Emit a warning.
|
||||||
@ -286,7 +310,7 @@ class Sphinx(object):
|
|||||||
else:
|
else:
|
||||||
location = None
|
location = None
|
||||||
warntext = location and '%s: %s%s\n' % (location, prefix, message) or \
|
warntext = location and '%s: %s%s\n' % (location, prefix, message) or \
|
||||||
'%s%s\n' % (prefix, message)
|
'%s%s\n' % (prefix, message)
|
||||||
if self.warningiserror:
|
if self.warningiserror:
|
||||||
raise SphinxWarning(warntext)
|
raise SphinxWarning(warntext)
|
||||||
self._warncount += 1
|
self._warncount += 1
|
||||||
@ -345,6 +369,48 @@ class Sphinx(object):
|
|||||||
message = message % (args or kwargs)
|
message = message % (args or kwargs)
|
||||||
self._log(lightgray(message), self._status)
|
self._log(lightgray(message), self._status)
|
||||||
|
|
||||||
|
def _display_chunk(chunk):
|
||||||
|
if isinstance(chunk, (list, tuple)):
|
||||||
|
if len(chunk) == 1:
|
||||||
|
return text_type(chunk[0])
|
||||||
|
return '%s .. %s' % (chunk[0], chunk[-1])
|
||||||
|
return text_type(chunk)
|
||||||
|
|
||||||
|
def old_status_iterator(self, iterable, summary, colorfunc=darkgreen,
|
||||||
|
stringify_func=_display_chunk):
|
||||||
|
l = 0
|
||||||
|
for item in iterable:
|
||||||
|
if l == 0:
|
||||||
|
self.info(bold(summary), nonl=1)
|
||||||
|
l = 1
|
||||||
|
self.info(colorfunc(stringify_func(item)) + ' ', nonl=1)
|
||||||
|
yield item
|
||||||
|
if l == 1:
|
||||||
|
self.info()
|
||||||
|
|
||||||
|
# new version with progress info
|
||||||
|
def status_iterator(self, iterable, summary, colorfunc=darkgreen, length=0,
|
||||||
|
stringify_func=_display_chunk):
|
||||||
|
if length == 0:
|
||||||
|
for item in self.old_status_iterator(iterable, summary, colorfunc,
|
||||||
|
stringify_func):
|
||||||
|
yield item
|
||||||
|
return
|
||||||
|
l = 0
|
||||||
|
summary = bold(summary)
|
||||||
|
for item in iterable:
|
||||||
|
l += 1
|
||||||
|
s = '%s[%3d%%] %s' % (summary, 100*l/length,
|
||||||
|
colorfunc(stringify_func(item)))
|
||||||
|
if self.verbosity:
|
||||||
|
s += '\n'
|
||||||
|
else:
|
||||||
|
s = term_width_line(s)
|
||||||
|
self.info(s, nonl=1)
|
||||||
|
yield item
|
||||||
|
if l > 0:
|
||||||
|
self.info()
|
||||||
|
|
||||||
# ---- general extensibility interface -------------------------------------
|
# ---- general extensibility interface -------------------------------------
|
||||||
|
|
||||||
def setup_extension(self, extension):
|
def setup_extension(self, extension):
|
||||||
@ -361,20 +427,22 @@ class Sphinx(object):
|
|||||||
if not hasattr(mod, 'setup'):
|
if not hasattr(mod, 'setup'):
|
||||||
self.warn('extension %r has no setup() function; is it really '
|
self.warn('extension %r has no setup() function; is it really '
|
||||||
'a Sphinx extension module?' % extension)
|
'a Sphinx extension module?' % extension)
|
||||||
version = None
|
ext_meta = None
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
version = mod.setup(self)
|
ext_meta = mod.setup(self)
|
||||||
except VersionRequirementError as err:
|
except VersionRequirementError as err:
|
||||||
# add the extension name to the version required
|
# add the extension name to the version required
|
||||||
raise VersionRequirementError(
|
raise VersionRequirementError(
|
||||||
'The %s extension used by this project needs at least '
|
'The %s extension used by this project needs at least '
|
||||||
'Sphinx v%s; it therefore cannot be built with this '
|
'Sphinx v%s; it therefore cannot be built with this '
|
||||||
'version.' % (extension, err))
|
'version.' % (extension, err))
|
||||||
if version is None:
|
if ext_meta is None:
|
||||||
version = 'unknown version'
|
ext_meta = {}
|
||||||
|
if not ext_meta.get('version'):
|
||||||
|
ext_meta['version'] = 'unknown version'
|
||||||
self._extensions[extension] = mod
|
self._extensions[extension] = mod
|
||||||
self._extension_versions[extension] = version
|
self._extension_metadata[extension] = ext_meta
|
||||||
|
|
||||||
def require_sphinx(self, version):
|
def require_sphinx(self, version):
|
||||||
# check the Sphinx version if requested
|
# check the Sphinx version if requested
|
||||||
@ -456,7 +524,7 @@ class Sphinx(object):
|
|||||||
else:
|
else:
|
||||||
raise ExtensionError(
|
raise ExtensionError(
|
||||||
'Builder %r already exists (in module %s)' % (
|
'Builder %r already exists (in module %s)' % (
|
||||||
builder.name, self.builderclasses[builder.name].__module__))
|
builder.name, self.builderclasses[builder.name].__module__))
|
||||||
self.builderclasses[builder.name] = builder
|
self.builderclasses[builder.name] = builder
|
||||||
|
|
||||||
def add_config_value(self, name, default, rebuild):
|
def add_config_value(self, name, default, rebuild):
|
||||||
@ -629,6 +697,11 @@ class Sphinx(object):
|
|||||||
StandaloneHTMLBuilder.css_files.append(
|
StandaloneHTMLBuilder.css_files.append(
|
||||||
posixpath.join('_static', filename))
|
posixpath.join('_static', filename))
|
||||||
|
|
||||||
|
def add_latex_package(self, packagename, options=None):
|
||||||
|
self.debug('[app] adding latex package: %r', packagename)
|
||||||
|
from sphinx.builders.latex import LaTeXBuilder
|
||||||
|
LaTeXBuilder.usepackages.append((packagename, options))
|
||||||
|
|
||||||
def add_lexer(self, alias, lexer):
|
def add_lexer(self, alias, lexer):
|
||||||
self.debug('[app] adding lexer: %r', (alias, lexer))
|
self.debug('[app] adding lexer: %r', (alias, lexer))
|
||||||
from sphinx.highlighting import lexers
|
from sphinx.highlighting import lexers
|
||||||
@ -650,7 +723,7 @@ class Sphinx(object):
|
|||||||
def add_search_language(self, cls):
|
def add_search_language(self, cls):
|
||||||
self.debug('[app] adding search language: %r', cls)
|
self.debug('[app] adding search language: %r', cls)
|
||||||
from sphinx.search import languages, SearchLanguage
|
from sphinx.search import languages, SearchLanguage
|
||||||
assert isinstance(cls, SearchLanguage)
|
assert issubclass(cls, SearchLanguage)
|
||||||
languages[cls.lang] = cls
|
languages[cls.lang] = cls
|
||||||
|
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
Builder superclass for all builders.
|
Builder superclass for all builders.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -22,7 +22,9 @@ from docutils import nodes
|
|||||||
|
|
||||||
from sphinx.util import i18n, path_stabilize
|
from sphinx.util import i18n, path_stabilize
|
||||||
from sphinx.util.osutil import SEP, relative_uri, find_catalog
|
from sphinx.util.osutil import SEP, relative_uri, find_catalog
|
||||||
from sphinx.util.console import bold, purple, darkgreen, term_width_line
|
from sphinx.util.console import bold, darkgreen
|
||||||
|
from sphinx.util.parallel import ParallelTasks, SerialTasks, make_chunks, \
|
||||||
|
parallel_available
|
||||||
|
|
||||||
# side effect: registers roles and directives
|
# side effect: registers roles and directives
|
||||||
from sphinx import roles
|
from sphinx import roles
|
||||||
@ -40,12 +42,14 @@ class Builder(object):
|
|||||||
format = ''
|
format = ''
|
||||||
# doctree versioning method
|
# doctree versioning method
|
||||||
versioning_method = 'none'
|
versioning_method = 'none'
|
||||||
|
versioning_compare = False
|
||||||
# allow parallel write_doc() calls
|
# allow parallel write_doc() calls
|
||||||
allow_parallel = False
|
allow_parallel = False
|
||||||
|
|
||||||
def __init__(self, app):
|
def __init__(self, app):
|
||||||
self.env = app.env
|
self.env = app.env
|
||||||
self.env.set_versioning_method(self.versioning_method)
|
self.env.set_versioning_method(self.versioning_method,
|
||||||
|
self.versioning_compare)
|
||||||
self.srcdir = app.srcdir
|
self.srcdir = app.srcdir
|
||||||
self.confdir = app.confdir
|
self.confdir = app.confdir
|
||||||
self.outdir = app.outdir
|
self.outdir = app.outdir
|
||||||
@ -62,9 +66,20 @@ class Builder(object):
|
|||||||
self.tags.add(self.name)
|
self.tags.add(self.name)
|
||||||
self.tags.add("format_%s" % self.format)
|
self.tags.add("format_%s" % self.format)
|
||||||
self.tags.add("builder_%s" % self.name)
|
self.tags.add("builder_%s" % self.name)
|
||||||
|
# compatibility aliases
|
||||||
|
self.status_iterator = app.status_iterator
|
||||||
|
self.old_status_iterator = app.old_status_iterator
|
||||||
|
|
||||||
# images that need to be copied over (source -> dest)
|
# images that need to be copied over (source -> dest)
|
||||||
self.images = {}
|
self.images = {}
|
||||||
|
# basename of images directory
|
||||||
|
self.imagedir = ""
|
||||||
|
# relative path to image directory from current docname (used at writing docs)
|
||||||
|
self.imgpath = ""
|
||||||
|
|
||||||
|
# these get set later
|
||||||
|
self.parallel_ok = False
|
||||||
|
self.finish_tasks = None
|
||||||
|
|
||||||
# load default translator class
|
# load default translator class
|
||||||
self.translator_class = app._translators.get(self.name)
|
self.translator_class = app._translators.get(self.name)
|
||||||
@ -113,41 +128,6 @@ class Builder(object):
|
|||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def old_status_iterator(self, iterable, summary, colorfunc=darkgreen,
|
|
||||||
stringify_func=lambda x: x):
|
|
||||||
l = 0
|
|
||||||
for item in iterable:
|
|
||||||
if l == 0:
|
|
||||||
self.info(bold(summary), nonl=1)
|
|
||||||
l = 1
|
|
||||||
self.info(colorfunc(stringify_func(item)) + ' ', nonl=1)
|
|
||||||
yield item
|
|
||||||
if l == 1:
|
|
||||||
self.info()
|
|
||||||
|
|
||||||
# new version with progress info
|
|
||||||
def status_iterator(self, iterable, summary, colorfunc=darkgreen, length=0,
|
|
||||||
stringify_func=lambda x: x):
|
|
||||||
if length == 0:
|
|
||||||
for item in self.old_status_iterator(iterable, summary, colorfunc,
|
|
||||||
stringify_func):
|
|
||||||
yield item
|
|
||||||
return
|
|
||||||
l = 0
|
|
||||||
summary = bold(summary)
|
|
||||||
for item in iterable:
|
|
||||||
l += 1
|
|
||||||
s = '%s[%3d%%] %s' % (summary, 100*l/length,
|
|
||||||
colorfunc(stringify_func(item)))
|
|
||||||
if self.app.verbosity:
|
|
||||||
s += '\n'
|
|
||||||
else:
|
|
||||||
s = term_width_line(s)
|
|
||||||
self.info(s, nonl=1)
|
|
||||||
yield item
|
|
||||||
if l > 0:
|
|
||||||
self.info()
|
|
||||||
|
|
||||||
supported_image_types = []
|
supported_image_types = []
|
||||||
|
|
||||||
def post_process_images(self, doctree):
|
def post_process_images(self, doctree):
|
||||||
@ -179,9 +159,8 @@ class Builder(object):
|
|||||||
def compile_catalogs(self, catalogs, message):
|
def compile_catalogs(self, catalogs, message):
|
||||||
if not self.config.gettext_auto_build:
|
if not self.config.gettext_auto_build:
|
||||||
return
|
return
|
||||||
self.info(bold('building [mo]: '), nonl=1)
|
self.info(bold('building [mo]: ') + message)
|
||||||
self.info(message)
|
for catalog in self.app.status_iterator(
|
||||||
for catalog in self.status_iterator(
|
|
||||||
catalogs, 'writing output... ', darkgreen, len(catalogs),
|
catalogs, 'writing output... ', darkgreen, len(catalogs),
|
||||||
lambda c: c.mo_path):
|
lambda c: c.mo_path):
|
||||||
catalog.write_mo(self.config.language)
|
catalog.write_mo(self.config.language)
|
||||||
@ -263,25 +242,16 @@ class Builder(object):
|
|||||||
First updates the environment, and then calls :meth:`write`.
|
First updates the environment, and then calls :meth:`write`.
|
||||||
"""
|
"""
|
||||||
if summary:
|
if summary:
|
||||||
self.info(bold('building [%s]: ' % self.name), nonl=1)
|
self.info(bold('building [%s]' % self.name) + ': ' + summary)
|
||||||
self.info(summary)
|
|
||||||
|
|
||||||
updated_docnames = set()
|
|
||||||
# while reading, collect all warnings from docutils
|
# while reading, collect all warnings from docutils
|
||||||
warnings = []
|
warnings = []
|
||||||
self.env.set_warnfunc(lambda *args: warnings.append(args))
|
self.env.set_warnfunc(lambda *args: warnings.append(args))
|
||||||
self.info(bold('updating environment: '), nonl=1)
|
updated_docnames = set(self.env.update(self.config, self.srcdir,
|
||||||
msg, length, iterator = self.env.update(self.config, self.srcdir,
|
self.doctreedir, self.app))
|
||||||
self.doctreedir, self.app)
|
self.env.set_warnfunc(self.warn)
|
||||||
self.info(msg)
|
|
||||||
for docname in self.status_iterator(iterator, 'reading sources... ',
|
|
||||||
purple, length):
|
|
||||||
updated_docnames.add(docname)
|
|
||||||
# nothing further to do, the environment has already
|
|
||||||
# done the reading
|
|
||||||
for warning in warnings:
|
for warning in warnings:
|
||||||
self.warn(*warning)
|
self.warn(*warning)
|
||||||
self.env.set_warnfunc(self.warn)
|
|
||||||
|
|
||||||
doccount = len(updated_docnames)
|
doccount = len(updated_docnames)
|
||||||
self.info(bold('looking for now-outdated files... '), nonl=1)
|
self.info(bold('looking for now-outdated files... '), nonl=1)
|
||||||
@ -315,20 +285,33 @@ class Builder(object):
|
|||||||
if docnames and docnames != ['__all__']:
|
if docnames and docnames != ['__all__']:
|
||||||
docnames = set(docnames) & self.env.found_docs
|
docnames = set(docnames) & self.env.found_docs
|
||||||
|
|
||||||
# another indirection to support builders that don't build
|
# determine if we can write in parallel
|
||||||
# files individually
|
self.parallel_ok = False
|
||||||
|
if parallel_available and self.app.parallel > 1 and self.allow_parallel:
|
||||||
|
self.parallel_ok = True
|
||||||
|
for extname, md in self.app._extension_metadata.items():
|
||||||
|
par_ok = md.get('parallel_write_safe', True)
|
||||||
|
if not par_ok:
|
||||||
|
self.app.warn('the %s extension is not safe for parallel '
|
||||||
|
'writing, doing serial write' % extname)
|
||||||
|
self.parallel_ok = False
|
||||||
|
break
|
||||||
|
|
||||||
|
# create a task executor to use for misc. "finish-up" tasks
|
||||||
|
# if self.parallel_ok:
|
||||||
|
# self.finish_tasks = ParallelTasks(self.app.parallel)
|
||||||
|
# else:
|
||||||
|
# for now, just execute them serially
|
||||||
|
self.finish_tasks = SerialTasks()
|
||||||
|
|
||||||
|
# write all "normal" documents (or everything for some builders)
|
||||||
self.write(docnames, list(updated_docnames), method)
|
self.write(docnames, list(updated_docnames), method)
|
||||||
|
|
||||||
# finish (write static files etc.)
|
# finish (write static files etc.)
|
||||||
self.finish()
|
self.finish()
|
||||||
status = (self.app.statuscode == 0 and 'succeeded'
|
|
||||||
or 'finished with problems')
|
# wait for all tasks
|
||||||
if self.app._warncount:
|
self.finish_tasks.join()
|
||||||
self.info(bold('build %s, %s warning%s.' %
|
|
||||||
(status, self.app._warncount,
|
|
||||||
self.app._warncount != 1 and 's' or '')))
|
|
||||||
else:
|
|
||||||
self.info(bold('build %s.' % status))
|
|
||||||
|
|
||||||
def write(self, build_docnames, updated_docnames, method='update'):
|
def write(self, build_docnames, updated_docnames, method='update'):
|
||||||
if build_docnames is None or build_docnames == ['__all__']:
|
if build_docnames is None or build_docnames == ['__all__']:
|
||||||
@ -354,23 +337,17 @@ class Builder(object):
|
|||||||
|
|
||||||
warnings = []
|
warnings = []
|
||||||
self.env.set_warnfunc(lambda *args: warnings.append(args))
|
self.env.set_warnfunc(lambda *args: warnings.append(args))
|
||||||
# check for prerequisites to parallel build
|
if self.parallel_ok:
|
||||||
# (parallel only works on POSIX, because the forking impl of
|
|
||||||
# multiprocessing is required)
|
|
||||||
if not (multiprocessing and
|
|
||||||
self.app.parallel > 1 and
|
|
||||||
self.allow_parallel and
|
|
||||||
os.name == 'posix'):
|
|
||||||
self._write_serial(sorted(docnames), warnings)
|
|
||||||
else:
|
|
||||||
# number of subprocesses is parallel-1 because the main process
|
# number of subprocesses is parallel-1 because the main process
|
||||||
# is busy loading doctrees and doing write_doc_serialized()
|
# is busy loading doctrees and doing write_doc_serialized()
|
||||||
self._write_parallel(sorted(docnames), warnings,
|
self._write_parallel(sorted(docnames), warnings,
|
||||||
nproc=self.app.parallel - 1)
|
nproc=self.app.parallel - 1)
|
||||||
|
else:
|
||||||
|
self._write_serial(sorted(docnames), warnings)
|
||||||
self.env.set_warnfunc(self.warn)
|
self.env.set_warnfunc(self.warn)
|
||||||
|
|
||||||
def _write_serial(self, docnames, warnings):
|
def _write_serial(self, docnames, warnings):
|
||||||
for docname in self.status_iterator(
|
for docname in self.app.status_iterator(
|
||||||
docnames, 'writing output... ', darkgreen, len(docnames)):
|
docnames, 'writing output... ', darkgreen, len(docnames)):
|
||||||
doctree = self.env.get_and_resolve_doctree(docname, self)
|
doctree = self.env.get_and_resolve_doctree(docname, self)
|
||||||
self.write_doc_serialized(docname, doctree)
|
self.write_doc_serialized(docname, doctree)
|
||||||
@ -380,60 +357,39 @@ class Builder(object):
|
|||||||
|
|
||||||
def _write_parallel(self, docnames, warnings, nproc):
|
def _write_parallel(self, docnames, warnings, nproc):
|
||||||
def write_process(docs):
|
def write_process(docs):
|
||||||
try:
|
local_warnings = []
|
||||||
for docname, doctree in docs:
|
self.env.set_warnfunc(lambda *args: local_warnings.append(args))
|
||||||
self.write_doc(docname, doctree)
|
for docname, doctree in docs:
|
||||||
except KeyboardInterrupt:
|
self.write_doc(docname, doctree)
|
||||||
pass # do not print a traceback on Ctrl-C
|
return local_warnings
|
||||||
finally:
|
|
||||||
for warning in warnings:
|
|
||||||
self.warn(*warning)
|
|
||||||
|
|
||||||
def process_thread(docs):
|
def add_warnings(docs, wlist):
|
||||||
p = multiprocessing.Process(target=write_process, args=(docs,))
|
warnings.extend(wlist)
|
||||||
p.start()
|
|
||||||
p.join()
|
|
||||||
semaphore.release()
|
|
||||||
|
|
||||||
# allow only "nproc" worker processes at once
|
|
||||||
semaphore = threading.Semaphore(nproc)
|
|
||||||
# list of threads to join when waiting for completion
|
|
||||||
threads = []
|
|
||||||
|
|
||||||
# warm up caches/compile templates using the first document
|
# warm up caches/compile templates using the first document
|
||||||
firstname, docnames = docnames[0], docnames[1:]
|
firstname, docnames = docnames[0], docnames[1:]
|
||||||
doctree = self.env.get_and_resolve_doctree(firstname, self)
|
doctree = self.env.get_and_resolve_doctree(firstname, self)
|
||||||
self.write_doc_serialized(firstname, doctree)
|
self.write_doc_serialized(firstname, doctree)
|
||||||
self.write_doc(firstname, doctree)
|
self.write_doc(firstname, doctree)
|
||||||
# for the rest, determine how many documents to write in one go
|
|
||||||
ndocs = len(docnames)
|
tasks = ParallelTasks(nproc)
|
||||||
chunksize = min(ndocs // nproc, 10)
|
chunks = make_chunks(docnames, nproc)
|
||||||
if chunksize == 0:
|
|
||||||
chunksize = 1
|
for chunk in self.app.status_iterator(
|
||||||
nchunks, rest = divmod(ndocs, chunksize)
|
chunks, 'writing output... ', darkgreen, len(chunks)):
|
||||||
if rest:
|
arg = []
|
||||||
nchunks += 1
|
for i, docname in enumerate(chunk):
|
||||||
# partition documents in "chunks" that will be written by one Process
|
|
||||||
chunks = [docnames[i*chunksize:(i+1)*chunksize] for i in range(nchunks)]
|
|
||||||
for docnames in self.status_iterator(
|
|
||||||
chunks, 'writing output... ', darkgreen, len(chunks),
|
|
||||||
lambda chk: '%s .. %s' % (chk[0], chk[-1])):
|
|
||||||
docs = []
|
|
||||||
for docname in docnames:
|
|
||||||
doctree = self.env.get_and_resolve_doctree(docname, self)
|
doctree = self.env.get_and_resolve_doctree(docname, self)
|
||||||
self.write_doc_serialized(docname, doctree)
|
self.write_doc_serialized(docname, doctree)
|
||||||
docs.append((docname, doctree))
|
arg.append((docname, doctree))
|
||||||
# start a new thread to oversee the completion of this chunk
|
tasks.add_task(write_process, arg, add_warnings)
|
||||||
semaphore.acquire()
|
|
||||||
t = threading.Thread(target=process_thread, args=(docs,))
|
|
||||||
t.setDaemon(True)
|
|
||||||
t.start()
|
|
||||||
threads.append(t)
|
|
||||||
|
|
||||||
# make sure all threads have finished
|
# make sure all threads have finished
|
||||||
self.info(bold('waiting for workers... '))#, nonl=True)
|
self.info(bold('waiting for workers...'))
|
||||||
for t in threads:
|
tasks.join()
|
||||||
t.join()
|
|
||||||
|
for warning in warnings:
|
||||||
|
self.warn(*warning)
|
||||||
|
|
||||||
def prepare_writing(self, docnames):
|
def prepare_writing(self, docnames):
|
||||||
"""A place where you can add logic before :meth:`write_doc` is run"""
|
"""A place where you can add logic before :meth:`write_doc` is run"""
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
Changelog builder.
|
Changelog builder.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -130,6 +130,9 @@ class ChangesBuilder(Builder):
|
|||||||
self.env.config.source_encoding)
|
self.env.config.source_encoding)
|
||||||
try:
|
try:
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
self.warn('could not read %r for changelog creation' % docname)
|
||||||
|
continue
|
||||||
finally:
|
finally:
|
||||||
f.close()
|
f.close()
|
||||||
targetfn = path.join(self.outdir, 'rst', os_path(docname)) + '.html'
|
targetfn = path.join(self.outdir, 'rst', os_path(docname)) + '.html'
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
|
|
||||||
.. _Devhelp: http://live.gnome.org/devhelp
|
.. _Devhelp: http://live.gnome.org/devhelp
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
Build epub files.
|
Build epub files.
|
||||||
Originally derived from qthelp.py.
|
Originally derived from qthelp.py.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -404,9 +404,9 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
|||||||
The method tries to read and write the files with the PIL,
|
The method tries to read and write the files with the PIL,
|
||||||
converting the format and resizing the image if necessary/possible.
|
converting the format and resizing the image if necessary/possible.
|
||||||
"""
|
"""
|
||||||
ensuredir(path.join(self.outdir, '_images'))
|
ensuredir(path.join(self.outdir, self.imagedir))
|
||||||
for src in self.status_iterator(self.images, 'copying images... ',
|
for src in self.app.status_iterator(self.images, 'copying images... ',
|
||||||
brown, len(self.images)):
|
brown, len(self.images)):
|
||||||
dest = self.images[src]
|
dest = self.images[src]
|
||||||
try:
|
try:
|
||||||
img = Image.open(path.join(self.srcdir, src))
|
img = Image.open(path.join(self.srcdir, src))
|
||||||
@ -416,7 +416,7 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
|||||||
(path.join(self.srcdir, src), ))
|
(path.join(self.srcdir, src), ))
|
||||||
try:
|
try:
|
||||||
copyfile(path.join(self.srcdir, src),
|
copyfile(path.join(self.srcdir, src),
|
||||||
path.join(self.outdir, '_images', dest))
|
path.join(self.outdir, self.imagedir, dest))
|
||||||
except (IOError, OSError) as err:
|
except (IOError, OSError) as err:
|
||||||
self.warn('cannot copy image file %r: %s' %
|
self.warn('cannot copy image file %r: %s' %
|
||||||
(path.join(self.srcdir, src), err))
|
(path.join(self.srcdir, src), err))
|
||||||
@ -432,7 +432,7 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
|||||||
nh = (height * nw) / width
|
nh = (height * nw) / width
|
||||||
img = img.resize((nw, nh), Image.BICUBIC)
|
img = img.resize((nw, nh), Image.BICUBIC)
|
||||||
try:
|
try:
|
||||||
img.save(path.join(self.outdir, '_images', dest))
|
img.save(path.join(self.outdir, self.imagedir, dest))
|
||||||
except (IOError, OSError) as err:
|
except (IOError, OSError) as err:
|
||||||
self.warn('cannot write image file %r: %s' %
|
self.warn('cannot write image file %r: %s' %
|
||||||
(path.join(self.srcdir, src), err))
|
(path.join(self.srcdir, src), err))
|
||||||
|
@ -5,11 +5,10 @@
|
|||||||
|
|
||||||
The MessageCatalogBuilder class.
|
The MessageCatalogBuilder class.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import with_statement
|
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
from os import path, walk
|
from os import path, walk
|
||||||
@ -85,6 +84,11 @@ class I18nBuilder(Builder):
|
|||||||
"""
|
"""
|
||||||
name = 'i18n'
|
name = 'i18n'
|
||||||
versioning_method = 'text'
|
versioning_method = 'text'
|
||||||
|
versioning_compare = None # be set by `gettext_uuid`
|
||||||
|
|
||||||
|
def __init__(self, app):
|
||||||
|
self.versioning_compare = app.env.config.gettext_uuid
|
||||||
|
super(I18nBuilder, self).__init__(app)
|
||||||
|
|
||||||
def init(self):
|
def init(self):
|
||||||
Builder.init(self)
|
Builder.init(self)
|
||||||
@ -109,15 +113,16 @@ class I18nBuilder(Builder):
|
|||||||
for node, msg in extract_messages(doctree):
|
for node, msg in extract_messages(doctree):
|
||||||
catalog.add(msg, node)
|
catalog.add(msg, node)
|
||||||
|
|
||||||
# Extract translatable messages from index entries.
|
if 'index' in self.env.config.gettext_enables:
|
||||||
for node, entries in traverse_translatable_index(doctree):
|
# Extract translatable messages from index entries.
|
||||||
for typ, msg, tid, main in entries:
|
for node, entries in traverse_translatable_index(doctree):
|
||||||
for m in split_index_msg(typ, msg):
|
for typ, msg, tid, main in entries:
|
||||||
if typ == 'pair' and m in pairindextypes.values():
|
for m in split_index_msg(typ, msg):
|
||||||
# avoid built-in translated message was incorporated
|
if typ == 'pair' and m in pairindextypes.values():
|
||||||
# in 'sphinx.util.nodes.process_index_entry'
|
# avoid built-in translated message was incorporated
|
||||||
continue
|
# in 'sphinx.util.nodes.process_index_entry'
|
||||||
catalog.add(m, node)
|
continue
|
||||||
|
catalog.add(m, node)
|
||||||
|
|
||||||
|
|
||||||
# determine tzoffset once to remain unaffected by DST change during build
|
# determine tzoffset once to remain unaffected by DST change during build
|
||||||
@ -170,8 +175,8 @@ class MessageCatalogBuilder(I18nBuilder):
|
|||||||
|
|
||||||
extract_translations = self.templates.environment.extract_translations
|
extract_translations = self.templates.environment.extract_translations
|
||||||
|
|
||||||
for template in self.status_iterator(files,
|
for template in self.app.status_iterator(
|
||||||
'reading templates... ', purple, len(files)):
|
files, 'reading templates... ', purple, len(files)):
|
||||||
with open(template, 'r', encoding='utf-8') as f:
|
with open(template, 'r', encoding='utf-8') as f:
|
||||||
context = f.read()
|
context = f.read()
|
||||||
for line, meth, msg in extract_translations(context):
|
for line, meth, msg in extract_translations(context):
|
||||||
@ -191,7 +196,7 @@ class MessageCatalogBuilder(I18nBuilder):
|
|||||||
ctime = datetime.fromtimestamp(
|
ctime = datetime.fromtimestamp(
|
||||||
timestamp, ltz).strftime('%Y-%m-%d %H:%M%z'),
|
timestamp, ltz).strftime('%Y-%m-%d %H:%M%z'),
|
||||||
)
|
)
|
||||||
for textdomain, catalog in self.status_iterator(
|
for textdomain, catalog in self.app.status_iterator(
|
||||||
iteritems(self.catalogs), "writing message catalogs... ",
|
iteritems(self.catalogs), "writing message catalogs... ",
|
||||||
darkgreen, len(self.catalogs),
|
darkgreen, len(self.catalogs),
|
||||||
lambda textdomain__: textdomain__[0]):
|
lambda textdomain__: textdomain__[0]):
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
Several HTML builders.
|
Several HTML builders.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -17,7 +17,7 @@ import posixpath
|
|||||||
from os import path
|
from os import path
|
||||||
from hashlib import md5
|
from hashlib import md5
|
||||||
|
|
||||||
from six import iteritems, itervalues, text_type, string_types
|
from six import iteritems, text_type, string_types
|
||||||
from six.moves import cPickle as pickle
|
from six.moves import cPickle as pickle
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from docutils.io import DocTreeInput, StringOutput
|
from docutils.io import DocTreeInput, StringOutput
|
||||||
@ -29,7 +29,7 @@ from docutils.readers.doctree import Reader as DoctreeReader
|
|||||||
from sphinx import package_dir, __version__
|
from sphinx import package_dir, __version__
|
||||||
from sphinx.util import jsonimpl, copy_static_entry
|
from sphinx.util import jsonimpl, copy_static_entry
|
||||||
from sphinx.util.osutil import SEP, os_path, relative_uri, ensuredir, \
|
from sphinx.util.osutil import SEP, os_path, relative_uri, ensuredir, \
|
||||||
movefile, ustrftime, copyfile
|
movefile, ustrftime, copyfile
|
||||||
from sphinx.util.nodes import inline_all_toctrees
|
from sphinx.util.nodes import inline_all_toctrees
|
||||||
from sphinx.util.matching import patmatch, compile_matchers
|
from sphinx.util.matching import patmatch, compile_matchers
|
||||||
from sphinx.locale import _
|
from sphinx.locale import _
|
||||||
@ -40,7 +40,7 @@ from sphinx.application import ENV_PICKLE_FILENAME
|
|||||||
from sphinx.highlighting import PygmentsBridge
|
from sphinx.highlighting import PygmentsBridge
|
||||||
from sphinx.util.console import bold, darkgreen, brown
|
from sphinx.util.console import bold, darkgreen, brown
|
||||||
from sphinx.writers.html import HTMLWriter, HTMLTranslator, \
|
from sphinx.writers.html import HTMLWriter, HTMLTranslator, \
|
||||||
SmartyPantsHTMLTranslator
|
SmartyPantsHTMLTranslator
|
||||||
|
|
||||||
#: the filename for the inventory of objects
|
#: the filename for the inventory of objects
|
||||||
INVENTORY_FILENAME = 'objects.inv'
|
INVENTORY_FILENAME = 'objects.inv'
|
||||||
@ -95,6 +95,8 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
# a hash of all config values that, if changed, cause a full rebuild
|
# a hash of all config values that, if changed, cause a full rebuild
|
||||||
self.config_hash = ''
|
self.config_hash = ''
|
||||||
self.tags_hash = ''
|
self.tags_hash = ''
|
||||||
|
# basename of images directory
|
||||||
|
self.imagedir = '_images'
|
||||||
# section numbers for headings in the currently visited document
|
# section numbers for headings in the currently visited document
|
||||||
self.secnumbers = {}
|
self.secnumbers = {}
|
||||||
# currently written docname
|
# currently written docname
|
||||||
@ -266,7 +268,8 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
# html_domain_indices can be False/True or a list of index names
|
# html_domain_indices can be False/True or a list of index names
|
||||||
indices_config = self.config.html_domain_indices
|
indices_config = self.config.html_domain_indices
|
||||||
if indices_config:
|
if indices_config:
|
||||||
for domain in itervalues(self.env.domains):
|
for domain_name in sorted(self.env.domains):
|
||||||
|
domain = self.env.domains[domain_name]
|
||||||
for indexcls in domain.indices:
|
for indexcls in domain.indices:
|
||||||
indexname = '%s-%s' % (domain.name, indexcls.name)
|
indexname = '%s-%s' % (domain.name, indexcls.name)
|
||||||
if isinstance(indices_config, list):
|
if isinstance(indices_config, list):
|
||||||
@ -335,6 +338,7 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
show_source = self.config.html_show_sourcelink,
|
show_source = self.config.html_show_sourcelink,
|
||||||
file_suffix = self.out_suffix,
|
file_suffix = self.out_suffix,
|
||||||
script_files = self.script_files,
|
script_files = self.script_files,
|
||||||
|
language = self.config.language,
|
||||||
css_files = self.css_files,
|
css_files = self.css_files,
|
||||||
sphinx_version = __version__,
|
sphinx_version = __version__,
|
||||||
style = stylename,
|
style = stylename,
|
||||||
@ -424,6 +428,7 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
doctree.settings = self.docsettings
|
doctree.settings = self.docsettings
|
||||||
|
|
||||||
self.secnumbers = self.env.toc_secnumbers.get(docname, {})
|
self.secnumbers = self.env.toc_secnumbers.get(docname, {})
|
||||||
|
self.fignumbers = self.env.toc_fignumbers.get(docname, {})
|
||||||
self.imgpath = relative_uri(self.get_target_uri(docname), '_images')
|
self.imgpath = relative_uri(self.get_target_uri(docname), '_images')
|
||||||
self.dlpath = relative_uri(self.get_target_uri(docname), '_downloads')
|
self.dlpath = relative_uri(self.get_target_uri(docname), '_downloads')
|
||||||
self.current_docname = docname
|
self.current_docname = docname
|
||||||
@ -436,19 +441,26 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
self.handle_page(docname, ctx, event_arg=doctree)
|
self.handle_page(docname, ctx, event_arg=doctree)
|
||||||
|
|
||||||
def write_doc_serialized(self, docname, doctree):
|
def write_doc_serialized(self, docname, doctree):
|
||||||
self.imgpath = relative_uri(self.get_target_uri(docname), '_images')
|
self.imgpath = relative_uri(self.get_target_uri(docname), self.imagedir)
|
||||||
self.post_process_images(doctree)
|
self.post_process_images(doctree)
|
||||||
title = self.env.longtitles.get(docname)
|
title = self.env.longtitles.get(docname)
|
||||||
title = title and self.render_partial(title)['title'] or ''
|
title = title and self.render_partial(title)['title'] or ''
|
||||||
self.index_page(docname, doctree, title)
|
self.index_page(docname, doctree, title)
|
||||||
|
|
||||||
def finish(self):
|
def finish(self):
|
||||||
self.info(bold('writing additional files...'), nonl=1)
|
self.finish_tasks.add_task(self.gen_indices)
|
||||||
|
self.finish_tasks.add_task(self.gen_additional_pages)
|
||||||
|
self.finish_tasks.add_task(self.copy_image_files)
|
||||||
|
self.finish_tasks.add_task(self.copy_download_files)
|
||||||
|
self.finish_tasks.add_task(self.copy_static_files)
|
||||||
|
self.finish_tasks.add_task(self.copy_extra_files)
|
||||||
|
self.finish_tasks.add_task(self.write_buildinfo)
|
||||||
|
|
||||||
# pages from extensions
|
# dump the search index
|
||||||
for pagelist in self.app.emit('html-collect-pages'):
|
self.handle_finish()
|
||||||
for pagename, context, template in pagelist:
|
|
||||||
self.handle_page(pagename, context, template)
|
def gen_indices(self):
|
||||||
|
self.info(bold('generating indices...'), nonl=1)
|
||||||
|
|
||||||
# the global general index
|
# the global general index
|
||||||
if self.get_builder_config('use_index', 'html'):
|
if self.get_builder_config('use_index', 'html'):
|
||||||
@ -457,16 +469,27 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
# the global domain-specific indices
|
# the global domain-specific indices
|
||||||
self.write_domain_indices()
|
self.write_domain_indices()
|
||||||
|
|
||||||
# the search page
|
self.info()
|
||||||
if self.name != 'htmlhelp':
|
|
||||||
self.info(' search', nonl=1)
|
def gen_additional_pages(self):
|
||||||
self.handle_page('search', {}, 'search.html')
|
# pages from extensions
|
||||||
|
for pagelist in self.app.emit('html-collect-pages'):
|
||||||
|
for pagename, context, template in pagelist:
|
||||||
|
self.handle_page(pagename, context, template)
|
||||||
|
|
||||||
|
self.info(bold('writing additional pages...'), nonl=1)
|
||||||
|
|
||||||
# additional pages from conf.py
|
# additional pages from conf.py
|
||||||
for pagename, template in self.config.html_additional_pages.items():
|
for pagename, template in self.config.html_additional_pages.items():
|
||||||
self.info(' '+pagename, nonl=1)
|
self.info(' '+pagename, nonl=1)
|
||||||
self.handle_page(pagename, {}, template)
|
self.handle_page(pagename, {}, template)
|
||||||
|
|
||||||
|
# the search page
|
||||||
|
if self.name != 'htmlhelp':
|
||||||
|
self.info(' search', nonl=1)
|
||||||
|
self.handle_page('search', {}, 'search.html')
|
||||||
|
|
||||||
|
# the opensearch xml file
|
||||||
if self.config.html_use_opensearch and self.name != 'htmlhelp':
|
if self.config.html_use_opensearch and self.name != 'htmlhelp':
|
||||||
self.info(' opensearch', nonl=1)
|
self.info(' opensearch', nonl=1)
|
||||||
fn = path.join(self.outdir, '_static', 'opensearch.xml')
|
fn = path.join(self.outdir, '_static', 'opensearch.xml')
|
||||||
@ -474,15 +497,6 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
|
|
||||||
self.info()
|
self.info()
|
||||||
|
|
||||||
self.copy_image_files()
|
|
||||||
self.copy_download_files()
|
|
||||||
self.copy_static_files()
|
|
||||||
self.copy_extra_files()
|
|
||||||
self.write_buildinfo()
|
|
||||||
|
|
||||||
# dump the search index
|
|
||||||
self.handle_finish()
|
|
||||||
|
|
||||||
def write_genindex(self):
|
def write_genindex(self):
|
||||||
# the total count of lines for each index letter, used to distribute
|
# the total count of lines for each index letter, used to distribute
|
||||||
# the entries into two columns
|
# the entries into two columns
|
||||||
@ -525,13 +539,13 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
def copy_image_files(self):
|
def copy_image_files(self):
|
||||||
# copy image files
|
# copy image files
|
||||||
if self.images:
|
if self.images:
|
||||||
ensuredir(path.join(self.outdir, '_images'))
|
ensuredir(path.join(self.outdir, self.imagedir))
|
||||||
for src in self.status_iterator(self.images, 'copying images... ',
|
for src in self.app.status_iterator(self.images, 'copying images... ',
|
||||||
brown, len(self.images)):
|
brown, len(self.images)):
|
||||||
dest = self.images[src]
|
dest = self.images[src]
|
||||||
try:
|
try:
|
||||||
copyfile(path.join(self.srcdir, src),
|
copyfile(path.join(self.srcdir, src),
|
||||||
path.join(self.outdir, '_images', dest))
|
path.join(self.outdir, self.imagedir, dest))
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
self.warn('cannot copy image file %r: %s' %
|
self.warn('cannot copy image file %r: %s' %
|
||||||
(path.join(self.srcdir, src), err))
|
(path.join(self.srcdir, src), err))
|
||||||
@ -540,9 +554,9 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
# copy downloadable files
|
# copy downloadable files
|
||||||
if self.env.dlfiles:
|
if self.env.dlfiles:
|
||||||
ensuredir(path.join(self.outdir, '_downloads'))
|
ensuredir(path.join(self.outdir, '_downloads'))
|
||||||
for src in self.status_iterator(self.env.dlfiles,
|
for src in self.app.status_iterator(self.env.dlfiles,
|
||||||
'copying downloadable files... ',
|
'copying downloadable files... ',
|
||||||
brown, len(self.env.dlfiles)):
|
brown, len(self.env.dlfiles)):
|
||||||
dest = self.env.dlfiles[src][1]
|
dest = self.env.dlfiles[src][1]
|
||||||
try:
|
try:
|
||||||
copyfile(path.join(self.srcdir, src),
|
copyfile(path.join(self.srcdir, src),
|
||||||
@ -755,8 +769,10 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
self.add_sidebars(pagename, ctx)
|
self.add_sidebars(pagename, ctx)
|
||||||
ctx.update(addctx)
|
ctx.update(addctx)
|
||||||
|
|
||||||
self.app.emit('html-page-context', pagename, templatename,
|
newtmpl = self.app.emit_firstresult('html-page-context', pagename,
|
||||||
ctx, event_arg)
|
templatename, ctx, event_arg)
|
||||||
|
if newtmpl:
|
||||||
|
templatename = newtmpl
|
||||||
|
|
||||||
try:
|
try:
|
||||||
output = self.templates.render(templatename, ctx)
|
output = self.templates.render(templatename, ctx)
|
||||||
@ -786,8 +802,8 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
copyfile(self.env.doc2path(pagename), source_name)
|
copyfile(self.env.doc2path(pagename), source_name)
|
||||||
|
|
||||||
def handle_finish(self):
|
def handle_finish(self):
|
||||||
self.dump_search_index()
|
self.finish_tasks.add_task(self.dump_search_index)
|
||||||
self.dump_inventory()
|
self.finish_tasks.add_task(self.dump_inventory)
|
||||||
|
|
||||||
def dump_inventory(self):
|
def dump_inventory(self):
|
||||||
self.info(bold('dumping object inventory... '), nonl=True)
|
self.info(bold('dumping object inventory... '), nonl=True)
|
||||||
@ -802,7 +818,7 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
compressor = zlib.compressobj(9)
|
compressor = zlib.compressobj(9)
|
||||||
for domainname, domain in iteritems(self.env.domains):
|
for domainname, domain in iteritems(self.env.domains):
|
||||||
for name, dispname, type, docname, anchor, prio in \
|
for name, dispname, type, docname, anchor, prio in \
|
||||||
domain.get_objects():
|
sorted(domain.get_objects()):
|
||||||
if anchor.endswith(name):
|
if anchor.endswith(name):
|
||||||
# this can shorten the inventory by as much as 25%
|
# this can shorten the inventory by as much as 25%
|
||||||
anchor = anchor[:-len(name)] + '$'
|
anchor = anchor[:-len(name)] + '$'
|
||||||
@ -1019,6 +1035,7 @@ class SerializingHTMLBuilder(StandaloneHTMLBuilder):
|
|||||||
def init(self):
|
def init(self):
|
||||||
self.config_hash = ''
|
self.config_hash = ''
|
||||||
self.tags_hash = ''
|
self.tags_hash = ''
|
||||||
|
self.imagedir = '_images'
|
||||||
self.theme = None # no theme necessary
|
self.theme = None # no theme necessary
|
||||||
self.templates = None # no template bridge necessary
|
self.templates = None # no template bridge necessary
|
||||||
self.init_translator_class()
|
self.init_translator_class()
|
||||||
@ -1050,8 +1067,9 @@ class SerializingHTMLBuilder(StandaloneHTMLBuilder):
|
|||||||
outfilename = path.join(self.outdir,
|
outfilename = path.join(self.outdir,
|
||||||
os_path(pagename) + self.out_suffix)
|
os_path(pagename) + self.out_suffix)
|
||||||
|
|
||||||
self.app.emit('html-page-context', pagename, templatename,
|
# we're not taking the return value here, since no template is
|
||||||
ctx, event_arg)
|
# actually rendered
|
||||||
|
self.app.emit('html-page-context', pagename, templatename, ctx, event_arg)
|
||||||
|
|
||||||
ensuredir(path.dirname(outfilename))
|
ensuredir(path.dirname(outfilename))
|
||||||
self.dump_context(ctx, outfilename)
|
self.dump_context(ctx, outfilename)
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
Build HTML help support files.
|
Build HTML help support files.
|
||||||
Parts adapted from Python's Doc/tools/prechm.py.
|
Parts adapted from Python's Doc/tools/prechm.py.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
LaTeX builder.
|
LaTeX builder.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -37,6 +37,7 @@ class LaTeXBuilder(Builder):
|
|||||||
format = 'latex'
|
format = 'latex'
|
||||||
supported_image_types = ['application/pdf', 'image/png',
|
supported_image_types = ['application/pdf', 'image/png',
|
||||||
'image/gif', 'image/jpeg']
|
'image/gif', 'image/jpeg']
|
||||||
|
usepackages = []
|
||||||
|
|
||||||
def init(self):
|
def init(self):
|
||||||
self.docnames = []
|
self.docnames = []
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
The CheckExternalLinksBuilder class.
|
The CheckExternalLinksBuilder class.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -122,7 +122,7 @@ class CheckExternalLinksBuilder(Builder):
|
|||||||
elif not (uri[0:5] == 'http:' or uri[0:6] == 'https:'):
|
elif not (uri[0:5] == 'http:' or uri[0:6] == 'https:'):
|
||||||
return 'local', '', 0
|
return 'local', '', 0
|
||||||
elif uri in self.good:
|
elif uri in self.good:
|
||||||
return 'working', '', 0
|
return 'working', 'old', 0
|
||||||
elif uri in self.broken:
|
elif uri in self.broken:
|
||||||
return 'broken', self.broken[uri], 0
|
return 'broken', self.broken[uri], 0
|
||||||
elif uri in self.redirected:
|
elif uri in self.redirected:
|
||||||
@ -166,13 +166,20 @@ class CheckExternalLinksBuilder(Builder):
|
|||||||
req = Request(req_url)
|
req = Request(req_url)
|
||||||
f = opener.open(req, **kwargs)
|
f = opener.open(req, **kwargs)
|
||||||
f.close()
|
f.close()
|
||||||
|
except HTTPError as err:
|
||||||
|
if err.code == 401:
|
||||||
|
# We'll take "Unauthorized" as working.
|
||||||
|
self.good.add(uri)
|
||||||
|
return 'working', ' - unauthorized', 0
|
||||||
|
else:
|
||||||
|
self.broken[uri] = str(err)
|
||||||
|
return 'broken', str(err), 0
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
self.broken[uri] = str(err)
|
self.broken[uri] = str(err)
|
||||||
return 'broken', str(err), 0
|
return 'broken', str(err), 0
|
||||||
if f.url.rstrip('/') == req_url.rstrip('/'):
|
if f.url.rstrip('/') == req_url.rstrip('/'):
|
||||||
self.good.add(uri)
|
self.good.add(uri)
|
||||||
return 'working', 'new', 0
|
return 'working', '', 0
|
||||||
else:
|
else:
|
||||||
new_url = f.url
|
new_url = f.url
|
||||||
if hash:
|
if hash:
|
||||||
@ -192,7 +199,7 @@ class CheckExternalLinksBuilder(Builder):
|
|||||||
uri, docname, lineno, status, info, code = result
|
uri, docname, lineno, status, info, code = result
|
||||||
if status == 'unchecked':
|
if status == 'unchecked':
|
||||||
return
|
return
|
||||||
if status == 'working' and info != 'new':
|
if status == 'working' and info == 'old':
|
||||||
return
|
return
|
||||||
if lineno:
|
if lineno:
|
||||||
self.info('(line %4d) ' % lineno, nonl=1)
|
self.info('(line %4d) ' % lineno, nonl=1)
|
||||||
@ -202,7 +209,7 @@ class CheckExternalLinksBuilder(Builder):
|
|||||||
self.info(darkgray('-local- ') + uri)
|
self.info(darkgray('-local- ') + uri)
|
||||||
self.write_entry('local', docname, lineno, uri)
|
self.write_entry('local', docname, lineno, uri)
|
||||||
elif status == 'working':
|
elif status == 'working':
|
||||||
self.info(darkgreen('ok ') + uri)
|
self.info(darkgreen('ok ') + uri + info)
|
||||||
elif status == 'broken':
|
elif status == 'broken':
|
||||||
self.info(red('broken ') + uri + red(' - ' + info))
|
self.info(red('broken ') + uri + red(' - ' + info))
|
||||||
self.write_entry('broken', docname, lineno, uri + ': ' + info)
|
self.write_entry('broken', docname, lineno, uri + ': ' + info)
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
Manual pages builder.
|
Manual pages builder.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
Build input files for the Qt collection generator.
|
Build input files for the Qt collection generator.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -157,7 +157,7 @@ class QtHelpBuilder(StandaloneHTMLBuilder):
|
|||||||
olen = len(outdir)
|
olen = len(outdir)
|
||||||
projectfiles = []
|
projectfiles = []
|
||||||
staticdir = path.join(outdir, '_static')
|
staticdir = path.join(outdir, '_static')
|
||||||
imagesdir = path.join(outdir, '_images')
|
imagesdir = path.join(outdir, self.imagedir)
|
||||||
for root, dirs, files in os.walk(outdir):
|
for root, dirs, files in os.walk(outdir):
|
||||||
resourcedir = root.startswith(staticdir) or \
|
resourcedir = root.startswith(staticdir) or \
|
||||||
root.startswith(imagesdir)
|
root.startswith(imagesdir)
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
Texinfo builder.
|
Texinfo builder.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
Plain-text Sphinx builder.
|
Plain-text Sphinx builder.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
Builder for the web support package.
|
Builder for the web support package.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -27,6 +27,7 @@ class WebSupportBuilder(PickleHTMLBuilder):
|
|||||||
"""
|
"""
|
||||||
name = 'websupport'
|
name = 'websupport'
|
||||||
versioning_method = 'commentable'
|
versioning_method = 'commentable'
|
||||||
|
versioning_compare = True # for commentable node's uuid stability.
|
||||||
|
|
||||||
def init(self):
|
def init(self):
|
||||||
PickleHTMLBuilder.init(self)
|
PickleHTMLBuilder.init(self)
|
||||||
@ -58,7 +59,8 @@ class WebSupportBuilder(PickleHTMLBuilder):
|
|||||||
doctree.settings = self.docsettings
|
doctree.settings = self.docsettings
|
||||||
|
|
||||||
self.secnumbers = self.env.toc_secnumbers.get(docname, {})
|
self.secnumbers = self.env.toc_secnumbers.get(docname, {})
|
||||||
self.imgpath = '/' + posixpath.join(self.virtual_staticdir, '_images')
|
self.fignumbers = self.env.toc_fignumbers.get(docname, {})
|
||||||
|
self.imgpath = '/' + posixpath.join(self.virtual_staticdir, self.imagedir)
|
||||||
self.dlpath = '/' + posixpath.join(self.virtual_staticdir, '_downloads')
|
self.dlpath = '/' + posixpath.join(self.virtual_staticdir, '_downloads')
|
||||||
self.current_docname = docname
|
self.current_docname = docname
|
||||||
self.docwriter.write(doctree, destination)
|
self.docwriter.write(doctree, destination)
|
||||||
@ -70,7 +72,7 @@ class WebSupportBuilder(PickleHTMLBuilder):
|
|||||||
self.handle_page(docname, ctx, event_arg=doctree)
|
self.handle_page(docname, ctx, event_arg=doctree)
|
||||||
|
|
||||||
def write_doc_serialized(self, docname, doctree):
|
def write_doc_serialized(self, docname, doctree):
|
||||||
self.imgpath = '/' + posixpath.join(self.virtual_staticdir, '_images')
|
self.imgpath = '/' + posixpath.join(self.virtual_staticdir, self.imagedir)
|
||||||
self.post_process_images(doctree)
|
self.post_process_images(doctree)
|
||||||
title = self.env.longtitles.get(docname)
|
title = self.env.longtitles.get(docname)
|
||||||
title = title and self.render_partial(title)['title'] or ''
|
title = title and self.render_partial(title)['title'] or ''
|
||||||
@ -103,8 +105,10 @@ class WebSupportBuilder(PickleHTMLBuilder):
|
|||||||
self.add_sidebars(pagename, ctx)
|
self.add_sidebars(pagename, ctx)
|
||||||
ctx.update(addctx)
|
ctx.update(addctx)
|
||||||
|
|
||||||
self.app.emit('html-page-context', pagename, templatename,
|
newtmpl = self.app.emit_firstresult('html-page-context', pagename,
|
||||||
ctx, event_arg)
|
templatename, ctx, event_arg)
|
||||||
|
if newtmpl:
|
||||||
|
templatename = newtmpl
|
||||||
|
|
||||||
# create a dict that will be pickled and used by webapps
|
# create a dict that will be pickled and used by webapps
|
||||||
doc_ctx = {
|
doc_ctx = {
|
||||||
@ -148,7 +152,7 @@ class WebSupportBuilder(PickleHTMLBuilder):
|
|||||||
PickleHTMLBuilder.handle_finish(self)
|
PickleHTMLBuilder.handle_finish(self)
|
||||||
|
|
||||||
# move static stuff over to separate directory
|
# move static stuff over to separate directory
|
||||||
directories = ['_images', '_static']
|
directories = [self.imagedir, '_static']
|
||||||
for directory in directories:
|
for directory in directories:
|
||||||
src = path.join(self.outdir, directory)
|
src = path.join(self.outdir, directory)
|
||||||
dst = path.join(self.staticdir, directory)
|
dst = path.join(self.staticdir, directory)
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
Docutils-native XML and pseudo-XML builders.
|
Docutils-native XML and pseudo-XML builders.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -5,14 +5,14 @@
|
|||||||
|
|
||||||
sphinx-build command-line handling.
|
sphinx-build command-line handling.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import getopt
|
import optparse
|
||||||
import traceback
|
import traceback
|
||||||
from os import path
|
from os import path
|
||||||
|
|
||||||
@ -32,89 +32,121 @@ def usage(argv, msg=None):
|
|||||||
if msg:
|
if msg:
|
||||||
print(msg, file=sys.stderr)
|
print(msg, file=sys.stderr)
|
||||||
print(file=sys.stderr)
|
print(file=sys.stderr)
|
||||||
print("""\
|
|
||||||
|
USAGE = """\
|
||||||
Sphinx v%s
|
Sphinx v%s
|
||||||
Usage: %s [options] sourcedir outdir [filenames...]
|
Usage: %%prog [options] sourcedir outdir [filenames...]
|
||||||
|
|
||||||
General options
|
Filename arguments:
|
||||||
^^^^^^^^^^^^^^^
|
without -a and without filenames, write new and changed files.
|
||||||
-b <builder> builder to use; default is html
|
with -a, write all files.
|
||||||
-a write all files; default is to only write new and changed files
|
with filenames, write these.
|
||||||
-E don't use a saved environment, always read all files
|
""" % __version__
|
||||||
-d <path> path for the cached environment and doctree files
|
|
||||||
(default: outdir/.doctrees)
|
|
||||||
-j <N> build in parallel with N processes where possible
|
|
||||||
-M <builder> "make" mode -- used by Makefile, like "sphinx-build -M html"
|
|
||||||
|
|
||||||
Build configuration options
|
EPILOG = """\
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
For more information, visit <http://sphinx-doc.org/>.
|
||||||
-c <path> path where configuration file (conf.py) is located
|
"""
|
||||||
(default: same as sourcedir)
|
|
||||||
-C use no config file at all, only -D options
|
|
||||||
-D <setting=value> override a setting in configuration file
|
|
||||||
-t <tag> define tag: include "only" blocks with <tag>
|
|
||||||
-A <name=value> pass a value into the templates, for HTML builder
|
|
||||||
-n nit-picky mode, warn about all missing references
|
|
||||||
|
|
||||||
Console output options
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
-v increase verbosity (can be repeated)
|
|
||||||
-q no output on stdout, just warnings on stderr
|
|
||||||
-Q no output at all, not even warnings
|
|
||||||
-w <file> write warnings (and errors) to given file
|
|
||||||
-W turn warnings into errors
|
|
||||||
-T show full traceback on exception
|
|
||||||
-N do not emit colored output
|
|
||||||
-P run Pdb on exception
|
|
||||||
|
|
||||||
Filename arguments
|
class MyFormatter(optparse.IndentedHelpFormatter):
|
||||||
^^^^^^^^^^^^^^^^^^
|
def format_usage(self, usage):
|
||||||
* without -a and without filenames, write new and changed files.
|
return usage
|
||||||
* with -a, write all files.
|
|
||||||
* with filenames, write these.
|
|
||||||
|
|
||||||
Standard options
|
def format_help(self, formatter):
|
||||||
^^^^^^^^^^^^^^^^
|
result = []
|
||||||
-h, --help show this help and exit
|
if self.description:
|
||||||
--version show version information and exit
|
result.append(self.format_description(formatter))
|
||||||
""" % (__version__, argv[0]), file=sys.stderr)
|
if self.option_list:
|
||||||
|
result.append(self.format_option_help(formatter))
|
||||||
|
return "\n".join(result)
|
||||||
|
|
||||||
|
|
||||||
def main(argv):
|
def main(argv):
|
||||||
if not color_terminal():
|
if not color_terminal():
|
||||||
nocolor()
|
nocolor()
|
||||||
|
|
||||||
|
parser = optparse.OptionParser(USAGE, epilog=EPILOG, formatter=MyFormatter())
|
||||||
|
parser.add_option('--version', action='store_true', dest='version',
|
||||||
|
help='show version information and exit')
|
||||||
|
|
||||||
|
group = parser.add_option_group('General options')
|
||||||
|
group.add_option('-b', metavar='BUILDER', dest='builder', default='html',
|
||||||
|
help='builder to use; default is html')
|
||||||
|
group.add_option('-a', action='store_true', dest='force_all',
|
||||||
|
help='write all files; default is to only write new and '
|
||||||
|
'changed files')
|
||||||
|
group.add_option('-E', action='store_true', dest='freshenv',
|
||||||
|
help='don\'t use a saved environment, always read '
|
||||||
|
'all files')
|
||||||
|
group.add_option('-d', metavar='PATH', default=None, dest='doctreedir',
|
||||||
|
help='path for the cached environment and doctree files '
|
||||||
|
'(default: outdir/.doctrees)')
|
||||||
|
group.add_option('-j', metavar='N', default=1, type='int', dest='jobs',
|
||||||
|
help='build in parallel with N processes where possible')
|
||||||
|
# this option never gets through to this point (it is intercepted earlier)
|
||||||
|
# group.add_option('-M', metavar='BUILDER', dest='make_mode',
|
||||||
|
# help='"make" mode -- as used by Makefile, like '
|
||||||
|
# '"sphinx-build -M html"')
|
||||||
|
|
||||||
|
group = parser.add_option_group('Build configuration options')
|
||||||
|
group.add_option('-c', metavar='PATH', dest='confdir',
|
||||||
|
help='path where configuration file (conf.py) is located '
|
||||||
|
'(default: same as sourcedir)')
|
||||||
|
group.add_option('-C', action='store_true', dest='noconfig',
|
||||||
|
help='use no config file at all, only -D options')
|
||||||
|
group.add_option('-D', metavar='setting=value', action='append',
|
||||||
|
dest='define', default=[],
|
||||||
|
help='override a setting in configuration file')
|
||||||
|
group.add_option('-A', metavar='name=value', action='append',
|
||||||
|
dest='htmldefine', default=[],
|
||||||
|
help='pass a value into HTML templates')
|
||||||
|
group.add_option('-t', metavar='TAG', action='append',
|
||||||
|
dest='tags', default=[],
|
||||||
|
help='define tag: include "only" blocks with TAG')
|
||||||
|
group.add_option('-n', action='store_true', dest='nitpicky',
|
||||||
|
help='nit-picky mode, warn about all missing references')
|
||||||
|
|
||||||
|
group = parser.add_option_group('Console output options')
|
||||||
|
group.add_option('-v', action='count', dest='verbosity', default=0,
|
||||||
|
help='increase verbosity (can be repeated)')
|
||||||
|
group.add_option('-q', action='store_true', dest='quiet',
|
||||||
|
help='no output on stdout, just warnings on stderr')
|
||||||
|
group.add_option('-Q', action='store_true', dest='really_quiet',
|
||||||
|
help='no output at all, not even warnings')
|
||||||
|
group.add_option('-N', action='store_true', dest='nocolor',
|
||||||
|
help='do not emit colored output')
|
||||||
|
group.add_option('-w', metavar='FILE', dest='warnfile',
|
||||||
|
help='write warnings (and errors) to given file')
|
||||||
|
group.add_option('-W', action='store_true', dest='warningiserror',
|
||||||
|
help='turn warnings into errors')
|
||||||
|
group.add_option('-T', action='store_true', dest='traceback',
|
||||||
|
help='show full traceback on exception')
|
||||||
|
group.add_option('-P', action='store_true', dest='pdb',
|
||||||
|
help='run Pdb on exception')
|
||||||
|
|
||||||
# parse options
|
# parse options
|
||||||
try:
|
try:
|
||||||
opts, args = getopt.getopt(argv[1:], 'ab:t:d:c:CD:A:nNEqQWw:PThvj:',
|
opts, args = parser.parse_args(argv[1:])
|
||||||
['help', 'version'])
|
except SystemExit as err:
|
||||||
except getopt.error as err:
|
return err.code
|
||||||
usage(argv, 'Error: %s' % err)
|
|
||||||
return 1
|
|
||||||
|
|
||||||
# handle basic options
|
# handle basic options
|
||||||
allopts = set(opt[0] for opt in opts)
|
if opts.version:
|
||||||
# help and version options
|
print('Sphinx (sphinx-build) %s' % __version__)
|
||||||
if '-h' in allopts or '--help' in allopts:
|
|
||||||
usage(argv)
|
|
||||||
print(file=sys.stderr)
|
|
||||||
print('For more information, see <http://sphinx-doc.org/>.',
|
|
||||||
file=sys.stderr)
|
|
||||||
return 0
|
|
||||||
if '--version' in allopts:
|
|
||||||
print('Sphinx (sphinx-build) %s' % __version__)
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
# get paths (first and second positional argument)
|
# get paths (first and second positional argument)
|
||||||
try:
|
try:
|
||||||
srcdir = confdir = abspath(args[0])
|
srcdir = abspath(args[0])
|
||||||
|
confdir = abspath(opts.confdir or srcdir)
|
||||||
|
if opts.noconfig:
|
||||||
|
confdir = None
|
||||||
if not path.isdir(srcdir):
|
if not path.isdir(srcdir):
|
||||||
print('Error: Cannot find source directory `%s\'.' % srcdir,
|
print('Error: Cannot find source directory `%s\'.' % srcdir,
|
||||||
file=sys.stderr)
|
file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
if not path.isfile(path.join(srcdir, 'conf.py')) and \
|
if not opts.noconfig and not path.isfile(path.join(confdir, 'conf.py')):
|
||||||
'-c' not in allopts and '-C' not in allopts:
|
print('Error: Config directory doesn\'t contain a conf.py file.',
|
||||||
print('Error: Source directory doesn\'t contain a conf.py file.',
|
|
||||||
file=sys.stderr)
|
file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
outdir = abspath(args[1])
|
outdir = abspath(args[1])
|
||||||
@ -144,116 +176,77 @@ def main(argv):
|
|||||||
except Exception:
|
except Exception:
|
||||||
likely_encoding = None
|
likely_encoding = None
|
||||||
|
|
||||||
buildername = None
|
if opts.force_all and filenames:
|
||||||
force_all = freshenv = warningiserror = use_pdb = False
|
print('Error: Cannot combine -a option and filenames.', file=sys.stderr)
|
||||||
show_traceback = False
|
return 1
|
||||||
verbosity = 0
|
|
||||||
parallel = 0
|
if opts.nocolor:
|
||||||
|
nocolor()
|
||||||
|
|
||||||
|
doctreedir = abspath(opts.doctreedir or path.join(outdir, '.doctrees'))
|
||||||
|
|
||||||
status = sys.stdout
|
status = sys.stdout
|
||||||
warning = sys.stderr
|
warning = sys.stderr
|
||||||
error = sys.stderr
|
error = sys.stderr
|
||||||
warnfile = None
|
|
||||||
|
if opts.quiet:
|
||||||
|
status = None
|
||||||
|
if opts.really_quiet:
|
||||||
|
status = warning = None
|
||||||
|
if warning and opts.warnfile:
|
||||||
|
try:
|
||||||
|
warnfp = open(opts.warnfile, 'w')
|
||||||
|
except Exception as exc:
|
||||||
|
print('Error: Cannot open warning file %r: %s' %
|
||||||
|
(opts.warnfile, exc), file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
warning = Tee(warning, warnfp)
|
||||||
|
error = warning
|
||||||
|
|
||||||
confoverrides = {}
|
confoverrides = {}
|
||||||
tags = []
|
for val in opts.define:
|
||||||
doctreedir = path.join(outdir, '.doctrees')
|
try:
|
||||||
for opt, val in opts:
|
key, val = val.split('=')
|
||||||
if opt == '-b':
|
except ValueError:
|
||||||
buildername = val
|
print('Error: -D option argument must be in the form name=value.',
|
||||||
elif opt == '-a':
|
file=sys.stderr)
|
||||||
if filenames:
|
return 1
|
||||||
usage(argv, 'Error: Cannot combine -a option and filenames.')
|
if likely_encoding and isinstance(val, binary_type):
|
||||||
return 1
|
|
||||||
force_all = True
|
|
||||||
elif opt == '-t':
|
|
||||||
tags.append(val)
|
|
||||||
elif opt == '-d':
|
|
||||||
doctreedir = abspath(val)
|
|
||||||
elif opt == '-c':
|
|
||||||
confdir = abspath(val)
|
|
||||||
if not path.isfile(path.join(confdir, 'conf.py')):
|
|
||||||
print('Error: Configuration directory doesn\'t contain conf.py file.',
|
|
||||||
file=sys.stderr)
|
|
||||||
return 1
|
|
||||||
elif opt == '-C':
|
|
||||||
confdir = None
|
|
||||||
elif opt == '-D':
|
|
||||||
try:
|
try:
|
||||||
key, val = val.split('=')
|
val = val.decode(likely_encoding)
|
||||||
except ValueError:
|
except UnicodeError:
|
||||||
print('Error: -D option argument must be in the form name=value.',
|
pass
|
||||||
file=sys.stderr)
|
confoverrides[key] = val
|
||||||
return 1
|
|
||||||
|
for val in opts.htmldefine:
|
||||||
|
try:
|
||||||
|
key, val = val.split('=')
|
||||||
|
except ValueError:
|
||||||
|
print('Error: -A option argument must be in the form name=value.',
|
||||||
|
file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
try:
|
||||||
|
val = int(val)
|
||||||
|
except ValueError:
|
||||||
if likely_encoding and isinstance(val, binary_type):
|
if likely_encoding and isinstance(val, binary_type):
|
||||||
try:
|
try:
|
||||||
val = val.decode(likely_encoding)
|
val = val.decode(likely_encoding)
|
||||||
except UnicodeError:
|
except UnicodeError:
|
||||||
pass
|
pass
|
||||||
confoverrides[key] = val
|
confoverrides['html_context.%s' % key] = val
|
||||||
elif opt == '-A':
|
|
||||||
try:
|
|
||||||
key, val = val.split('=')
|
|
||||||
except ValueError:
|
|
||||||
print('Error: -A option argument must be in the form name=value.',
|
|
||||||
file=sys.stderr)
|
|
||||||
return 1
|
|
||||||
try:
|
|
||||||
val = int(val)
|
|
||||||
except ValueError:
|
|
||||||
if likely_encoding and isinstance(val, binary_type):
|
|
||||||
try:
|
|
||||||
val = val.decode(likely_encoding)
|
|
||||||
except UnicodeError:
|
|
||||||
pass
|
|
||||||
confoverrides['html_context.%s' % key] = val
|
|
||||||
elif opt == '-n':
|
|
||||||
confoverrides['nitpicky'] = True
|
|
||||||
elif opt == '-N':
|
|
||||||
nocolor()
|
|
||||||
elif opt == '-E':
|
|
||||||
freshenv = True
|
|
||||||
elif opt == '-q':
|
|
||||||
status = None
|
|
||||||
elif opt == '-Q':
|
|
||||||
status = None
|
|
||||||
warning = None
|
|
||||||
elif opt == '-W':
|
|
||||||
warningiserror = True
|
|
||||||
elif opt == '-w':
|
|
||||||
warnfile = val
|
|
||||||
elif opt == '-P':
|
|
||||||
use_pdb = True
|
|
||||||
elif opt == '-T':
|
|
||||||
show_traceback = True
|
|
||||||
elif opt == '-v':
|
|
||||||
verbosity += 1
|
|
||||||
show_traceback = True
|
|
||||||
elif opt == '-j':
|
|
||||||
try:
|
|
||||||
parallel = int(val)
|
|
||||||
except ValueError:
|
|
||||||
print('Error: -j option argument must be an integer.',
|
|
||||||
file=sys.stderr)
|
|
||||||
return 1
|
|
||||||
|
|
||||||
if warning and warnfile:
|
if opts.nitpicky:
|
||||||
warnfp = open(warnfile, 'w')
|
confoverrides['nitpicky'] = True
|
||||||
warning = Tee(warning, warnfp)
|
|
||||||
error = warning
|
|
||||||
|
|
||||||
if not path.isdir(outdir):
|
|
||||||
if status:
|
|
||||||
print('Making output directory...', file=status)
|
|
||||||
os.makedirs(outdir)
|
|
||||||
|
|
||||||
app = None
|
app = None
|
||||||
try:
|
try:
|
||||||
app = Sphinx(srcdir, confdir, outdir, doctreedir, buildername,
|
app = Sphinx(srcdir, confdir, outdir, doctreedir, opts.builder,
|
||||||
confoverrides, status, warning, freshenv,
|
confoverrides, status, warning, opts.freshenv,
|
||||||
warningiserror, tags, verbosity, parallel)
|
opts.warningiserror, opts.tags, opts.verbosity, opts.jobs)
|
||||||
app.build(force_all, filenames)
|
app.build(opts.force_all, filenames)
|
||||||
return app.statuscode
|
return app.statuscode
|
||||||
except (Exception, KeyboardInterrupt) as err:
|
except (Exception, KeyboardInterrupt) as err:
|
||||||
if use_pdb:
|
if opts.pdb:
|
||||||
import pdb
|
import pdb
|
||||||
print(red('Exception occurred while building, starting debugger:'),
|
print(red('Exception occurred while building, starting debugger:'),
|
||||||
file=error)
|
file=error)
|
||||||
@ -261,7 +254,7 @@ def main(argv):
|
|||||||
pdb.post_mortem(sys.exc_info()[2])
|
pdb.post_mortem(sys.exc_info()[2])
|
||||||
else:
|
else:
|
||||||
print(file=error)
|
print(file=error)
|
||||||
if show_traceback:
|
if opts.verbosity or opts.traceback:
|
||||||
traceback.print_exc(None, error)
|
traceback.print_exc(None, error)
|
||||||
print(file=error)
|
print(file=error)
|
||||||
if isinstance(err, KeyboardInterrupt):
|
if isinstance(err, KeyboardInterrupt):
|
||||||
@ -290,6 +283,6 @@ def main(argv):
|
|||||||
'that a better error message can be provided next time.',
|
'that a better error message can be provided next time.',
|
||||||
file=error)
|
file=error)
|
||||||
print('A bug report can be filed in the tracker at '
|
print('A bug report can be filed in the tracker at '
|
||||||
'<https://bitbucket.org/birkenfeld/sphinx/issues/>. Thanks!',
|
'<https://github.com/sphinx-doc/sphinx/issues>. Thanks!',
|
||||||
file=error)
|
file=error)
|
||||||
return 1
|
return 1
|
||||||
|
@ -5,11 +5,10 @@
|
|||||||
|
|
||||||
Build configuration file handling.
|
Build configuration file handling.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
import re
|
||||||
from os import path
|
from os import path
|
||||||
|
|
||||||
@ -17,7 +16,7 @@ from six import PY3, iteritems, string_types, binary_type, integer_types
|
|||||||
|
|
||||||
from sphinx.errors import ConfigError
|
from sphinx.errors import ConfigError
|
||||||
from sphinx.locale import l_
|
from sphinx.locale import l_
|
||||||
from sphinx.util.osutil import make_filename
|
from sphinx.util.osutil import make_filename, cd
|
||||||
from sphinx.util.pycompat import execfile_
|
from sphinx.util.pycompat import execfile_
|
||||||
|
|
||||||
nonascii_re = re.compile(br'[\x80-\xff]')
|
nonascii_re = re.compile(br'[\x80-\xff]')
|
||||||
@ -25,6 +24,9 @@ nonascii_re = re.compile(br'[\x80-\xff]')
|
|||||||
CONFIG_SYNTAX_ERROR = "There is a syntax error in your configuration file: %s"
|
CONFIG_SYNTAX_ERROR = "There is a syntax error in your configuration file: %s"
|
||||||
if PY3:
|
if PY3:
|
||||||
CONFIG_SYNTAX_ERROR += "\nDid you change the syntax from 2.x to 3.x?"
|
CONFIG_SYNTAX_ERROR += "\nDid you change the syntax from 2.x to 3.x?"
|
||||||
|
CONFIG_EXIT_ERROR = "The configuration file (or one of the modules it imports) " \
|
||||||
|
"called sys.exit()"
|
||||||
|
|
||||||
|
|
||||||
class Config(object):
|
class Config(object):
|
||||||
"""
|
"""
|
||||||
@ -59,6 +61,7 @@ class Config(object):
|
|||||||
show_authors = (False, 'env'),
|
show_authors = (False, 'env'),
|
||||||
pygments_style = (None, 'html'),
|
pygments_style = (None, 'html'),
|
||||||
highlight_language = ('python', 'env'),
|
highlight_language = ('python', 'env'),
|
||||||
|
highlight_options = ({}, 'env'),
|
||||||
templates_path = ([], 'html'),
|
templates_path = ([], 'html'),
|
||||||
template_bridge = (None, 'html'),
|
template_bridge = (None, 'html'),
|
||||||
keep_warnings = (False, 'env'),
|
keep_warnings = (False, 'env'),
|
||||||
@ -71,6 +74,12 @@ class Config(object):
|
|||||||
needs_extensions = ({}, None),
|
needs_extensions = ({}, None),
|
||||||
nitpicky = (False, 'env'),
|
nitpicky = (False, 'env'),
|
||||||
nitpick_ignore = ([], 'html'),
|
nitpick_ignore = ([], 'html'),
|
||||||
|
numfig = (False, 'env'),
|
||||||
|
numfig_secnum_depth = (1, 'env'),
|
||||||
|
numfig_format = ({'figure': l_('Fig. %s'),
|
||||||
|
'table': l_('Table %s'),
|
||||||
|
'code-block': l_('Listing %s')},
|
||||||
|
'env'),
|
||||||
|
|
||||||
# HTML options
|
# HTML options
|
||||||
html_theme = ('default', 'html'),
|
html_theme = ('default', 'html'),
|
||||||
@ -204,8 +213,9 @@ class Config(object):
|
|||||||
# gettext options
|
# gettext options
|
||||||
gettext_compact = (True, 'gettext'),
|
gettext_compact = (True, 'gettext'),
|
||||||
gettext_location = (True, 'gettext'),
|
gettext_location = (True, 'gettext'),
|
||||||
gettext_uuid = (True, 'gettext'),
|
gettext_uuid = (False, 'gettext'),
|
||||||
gettext_auto_build = (True, 'env'),
|
gettext_auto_build = (True, 'env'),
|
||||||
|
gettext_enables = ([], 'env'),
|
||||||
|
|
||||||
# XML options
|
# XML options
|
||||||
xml_pretty = (True, 'env'),
|
xml_pretty = (True, 'env'),
|
||||||
@ -215,7 +225,7 @@ class Config(object):
|
|||||||
self.overrides = overrides
|
self.overrides = overrides
|
||||||
self.values = Config.config_values.copy()
|
self.values = Config.config_values.copy()
|
||||||
config = {}
|
config = {}
|
||||||
if 'extensions' in overrides:
|
if 'extensions' in overrides: #XXX do we need this?
|
||||||
if isinstance(overrides['extensions'], string_types):
|
if isinstance(overrides['extensions'], string_types):
|
||||||
config['extensions'] = overrides.pop('extensions').split(',')
|
config['extensions'] = overrides.pop('extensions').split(',')
|
||||||
else:
|
else:
|
||||||
@ -224,17 +234,15 @@ class Config(object):
|
|||||||
config_file = path.join(dirname, filename)
|
config_file = path.join(dirname, filename)
|
||||||
config['__file__'] = config_file
|
config['__file__'] = config_file
|
||||||
config['tags'] = tags
|
config['tags'] = tags
|
||||||
olddir = os.getcwd()
|
with cd(dirname):
|
||||||
try:
|
|
||||||
# we promise to have the config dir as current dir while the
|
# we promise to have the config dir as current dir while the
|
||||||
# config file is executed
|
# config file is executed
|
||||||
os.chdir(dirname)
|
|
||||||
try:
|
try:
|
||||||
execfile_(filename, config)
|
execfile_(filename, config)
|
||||||
except SyntaxError as err:
|
except SyntaxError as err:
|
||||||
raise ConfigError(CONFIG_SYNTAX_ERROR % err)
|
raise ConfigError(CONFIG_SYNTAX_ERROR % err)
|
||||||
finally:
|
except SystemExit:
|
||||||
os.chdir(olddir)
|
raise ConfigError(CONFIG_EXIT_ERROR)
|
||||||
|
|
||||||
self._raw_config = config
|
self._raw_config = config
|
||||||
# these two must be preinitialized because extensions can add their
|
# these two must be preinitialized because extensions can add their
|
||||||
@ -242,6 +250,30 @@ class Config(object):
|
|||||||
self.setup = config.get('setup', None)
|
self.setup = config.get('setup', None)
|
||||||
self.extensions = config.get('extensions', [])
|
self.extensions = config.get('extensions', [])
|
||||||
|
|
||||||
|
def check_types(self, warn):
|
||||||
|
# check all values for deviation from the default value's type, since
|
||||||
|
# that can result in TypeErrors all over the place
|
||||||
|
# NB. since config values might use l_() we have to wait with calling
|
||||||
|
# this method until i18n is initialized
|
||||||
|
for name in self._raw_config:
|
||||||
|
if name not in Config.config_values:
|
||||||
|
continue # we don't know a default value
|
||||||
|
default, dummy_rebuild = Config.config_values[name]
|
||||||
|
if hasattr(default, '__call__'):
|
||||||
|
default = default(self) # could invoke l_()
|
||||||
|
if default is None:
|
||||||
|
continue
|
||||||
|
current = self[name]
|
||||||
|
if type(current) is type(default):
|
||||||
|
continue
|
||||||
|
common_bases = (set(type(current).__bases__ + (type(current),))
|
||||||
|
& set(type(default).__bases__))
|
||||||
|
common_bases.discard(object)
|
||||||
|
if common_bases:
|
||||||
|
continue # at least we share a non-trivial base class
|
||||||
|
warn("the config value %r has type `%s', defaults to `%s.'"
|
||||||
|
% (name, type(current).__name__, type(default).__name__))
|
||||||
|
|
||||||
def check_unicode(self, warn):
|
def check_unicode(self, warn):
|
||||||
# check all string values for non-ASCII characters in bytestrings,
|
# check all string values for non-ASCII characters in bytestrings,
|
||||||
# since that can result in UnicodeErrors all over the place
|
# since that can result in UnicodeErrors all over the place
|
||||||
@ -277,9 +309,11 @@ class Config(object):
|
|||||||
except ValueError:
|
except ValueError:
|
||||||
warn('invalid number %r for config value %r, ignoring'
|
warn('invalid number %r for config value %r, ignoring'
|
||||||
% (value, valname))
|
% (value, valname))
|
||||||
|
elif hasattr(defvalue, '__call__'):
|
||||||
|
config[valname] = value
|
||||||
elif defvalue is not None and not isinstance(defvalue, string_types):
|
elif defvalue is not None and not isinstance(defvalue, string_types):
|
||||||
warn('cannot override config setting %r with unsupported type, '
|
warn('cannot override config setting %r with unsupported '
|
||||||
'ignoring' % valname)
|
'type, ignoring' % valname)
|
||||||
else:
|
else:
|
||||||
config[valname] = value
|
config[valname] = value
|
||||||
else:
|
else:
|
||||||
@ -287,7 +321,6 @@ class Config(object):
|
|||||||
for name in config:
|
for name in config:
|
||||||
if name in self.values:
|
if name in self.values:
|
||||||
self.__dict__[name] = config[name]
|
self.__dict__[name] = config[name]
|
||||||
del self._raw_config
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
if name.startswith('_'):
|
if name.startswith('_'):
|
||||||
|
@ -5,13 +5,14 @@
|
|||||||
|
|
||||||
Handlers for additional ReST directives.
|
Handlers for additional ReST directives.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from docutils.parsers.rst import Directive, directives
|
from docutils import nodes
|
||||||
|
from docutils.parsers.rst import Directive, directives, roles
|
||||||
|
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.util.docfields import DocFieldTransformer
|
from sphinx.util.docfields import DocFieldTransformer
|
||||||
@ -162,6 +163,34 @@ class ObjectDescription(Directive):
|
|||||||
DescDirective = ObjectDescription
|
DescDirective = ObjectDescription
|
||||||
|
|
||||||
|
|
||||||
|
class DefaultRole(Directive):
|
||||||
|
"""
|
||||||
|
Set the default interpreted text role. Overridden from docutils.
|
||||||
|
"""
|
||||||
|
|
||||||
|
optional_arguments = 1
|
||||||
|
final_argument_whitespace = False
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
if not self.arguments:
|
||||||
|
if '' in roles._roles:
|
||||||
|
# restore the "default" default role
|
||||||
|
del roles._roles['']
|
||||||
|
return []
|
||||||
|
role_name = self.arguments[0]
|
||||||
|
role, messages = roles.role(role_name, self.state_machine.language,
|
||||||
|
self.lineno, self.state.reporter)
|
||||||
|
if role is None:
|
||||||
|
error = self.state.reporter.error(
|
||||||
|
'Unknown interpreted text role "%s".' % role_name,
|
||||||
|
nodes.literal_block(self.block_text, self.block_text),
|
||||||
|
line=self.lineno)
|
||||||
|
return messages + [error]
|
||||||
|
roles._roles[''] = role
|
||||||
|
self.state.document.settings.env.temp_data['default_role'] = role_name
|
||||||
|
return messages
|
||||||
|
|
||||||
|
|
||||||
class DefaultDomain(Directive):
|
class DefaultDomain(Directive):
|
||||||
"""
|
"""
|
||||||
Directive to (re-)set the default domain for this source file.
|
Directive to (re-)set the default domain for this source file.
|
||||||
@ -186,6 +215,7 @@ class DefaultDomain(Directive):
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
directives.register_directive('default-role', DefaultRole)
|
||||||
directives.register_directive('default-domain', DefaultDomain)
|
directives.register_directive('default-domain', DefaultDomain)
|
||||||
directives.register_directive('describe', ObjectDescription)
|
directives.register_directive('describe', ObjectDescription)
|
||||||
# new, more consistent, name
|
# new, more consistent, name
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
sphinx.directives.code
|
sphinx.directives.code
|
||||||
~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -13,6 +13,7 @@ from difflib import unified_diff
|
|||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from docutils.parsers.rst import Directive, directives
|
from docutils.parsers.rst import Directive, directives
|
||||||
|
from docutils.statemachine import ViewList
|
||||||
|
|
||||||
from six import string_types
|
from six import string_types
|
||||||
|
|
||||||
@ -47,7 +48,6 @@ class Highlight(Directive):
|
|||||||
linenothreshold=linenothreshold)]
|
linenothreshold=linenothreshold)]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def dedent_lines(lines, dedent):
|
def dedent_lines(lines, dedent):
|
||||||
if not dedent:
|
if not dedent:
|
||||||
return lines
|
return lines
|
||||||
@ -62,6 +62,20 @@ def dedent_lines(lines, dedent):
|
|||||||
return new_lines
|
return new_lines
|
||||||
|
|
||||||
|
|
||||||
|
def container_wrapper(directive, literal_node, caption):
|
||||||
|
container_node = nodes.container('', literal_block=True)
|
||||||
|
parsed = nodes.Element()
|
||||||
|
directive.state.nested_parse(ViewList([caption], source=''),
|
||||||
|
directive.content_offset, parsed)
|
||||||
|
caption_node = nodes.caption(parsed[0].rawsource, '',
|
||||||
|
*parsed[0].children)
|
||||||
|
caption_node.source = parsed[0].source
|
||||||
|
caption_node.line = parsed[0].line
|
||||||
|
container_node += caption_node
|
||||||
|
container_node += literal_node
|
||||||
|
return container_node
|
||||||
|
|
||||||
|
|
||||||
class CodeBlock(Directive):
|
class CodeBlock(Directive):
|
||||||
"""
|
"""
|
||||||
Directive for a code block with special highlighting or line numbering
|
Directive for a code block with special highlighting or line numbering
|
||||||
@ -101,9 +115,6 @@ class CodeBlock(Directive):
|
|||||||
|
|
||||||
literal = nodes.literal_block(code, code)
|
literal = nodes.literal_block(code, code)
|
||||||
literal['language'] = self.arguments[0]
|
literal['language'] = self.arguments[0]
|
||||||
caption = self.options.get('caption')
|
|
||||||
if caption:
|
|
||||||
literal['caption'] = caption
|
|
||||||
literal['linenos'] = 'linenos' in self.options or \
|
literal['linenos'] = 'linenos' in self.options or \
|
||||||
'lineno-start' in self.options
|
'lineno-start' in self.options
|
||||||
extra_args = literal['highlight_args'] = {}
|
extra_args = literal['highlight_args'] = {}
|
||||||
@ -112,6 +123,11 @@ class CodeBlock(Directive):
|
|||||||
if 'lineno-start' in self.options:
|
if 'lineno-start' in self.options:
|
||||||
extra_args['linenostart'] = self.options['lineno-start']
|
extra_args['linenostart'] = self.options['lineno-start']
|
||||||
set_source_info(self, literal)
|
set_source_info(self, literal)
|
||||||
|
|
||||||
|
caption = self.options.get('caption')
|
||||||
|
if caption:
|
||||||
|
literal = container_wrapper(self, literal, caption)
|
||||||
|
|
||||||
return [literal]
|
return [literal]
|
||||||
|
|
||||||
|
|
||||||
@ -130,6 +146,7 @@ class LiteralInclude(Directive):
|
|||||||
'dedent': int,
|
'dedent': int,
|
||||||
'linenos': directives.flag,
|
'linenos': directives.flag,
|
||||||
'lineno-start': int,
|
'lineno-start': int,
|
||||||
|
'lineno-match': directives.flag,
|
||||||
'tab-width': int,
|
'tab-width': int,
|
||||||
'language': directives.unchanged_required,
|
'language': directives.unchanged_required,
|
||||||
'encoding': directives.encoding,
|
'encoding': directives.encoding,
|
||||||
@ -147,8 +164,8 @@ class LiteralInclude(Directive):
|
|||||||
def read_with_encoding(self, filename, document, codec_info, encoding):
|
def read_with_encoding(self, filename, document, codec_info, encoding):
|
||||||
f = None
|
f = None
|
||||||
try:
|
try:
|
||||||
f = codecs.StreamReaderWriter(open(filename, 'rb'),
|
f = codecs.StreamReaderWriter(open(filename, 'rb'), codec_info[2],
|
||||||
codec_info[2], codec_info[3], 'strict')
|
codec_info[3], 'strict')
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
lines = dedent_lines(lines, self.options.get('dedent'))
|
lines = dedent_lines(lines, self.options.get('dedent'))
|
||||||
return lines
|
return lines
|
||||||
@ -178,6 +195,17 @@ class LiteralInclude(Directive):
|
|||||||
'Cannot use both "pyobject" and "lines" options',
|
'Cannot use both "pyobject" and "lines" options',
|
||||||
line=self.lineno)]
|
line=self.lineno)]
|
||||||
|
|
||||||
|
if 'lineno-match' in self.options and 'lineno-start' in self.options:
|
||||||
|
return [document.reporter.warning(
|
||||||
|
'Cannot use both "lineno-match" and "lineno-start"',
|
||||||
|
line=self.lineno)]
|
||||||
|
|
||||||
|
if 'lineno-match' in self.options and \
|
||||||
|
(set(['append', 'prepend']) & set(self.options.keys())):
|
||||||
|
return [document.reporter.warning(
|
||||||
|
'Cannot use "lineno-match" and "append" or "prepend"',
|
||||||
|
line=self.lineno)]
|
||||||
|
|
||||||
encoding = self.options.get('encoding', env.config.source_encoding)
|
encoding = self.options.get('encoding', env.config.source_encoding)
|
||||||
codec_info = codecs.lookup(encoding)
|
codec_info = codecs.lookup(encoding)
|
||||||
|
|
||||||
@ -191,7 +219,7 @@ class LiteralInclude(Directive):
|
|||||||
tmp, fulldiffsource = env.relfn2path(diffsource)
|
tmp, fulldiffsource = env.relfn2path(diffsource)
|
||||||
|
|
||||||
difflines = self.read_with_encoding(fulldiffsource, document,
|
difflines = self.read_with_encoding(fulldiffsource, document,
|
||||||
codec_info, encoding)
|
codec_info, encoding)
|
||||||
if not isinstance(difflines[0], string_types):
|
if not isinstance(difflines[0], string_types):
|
||||||
return difflines
|
return difflines
|
||||||
diff = unified_diff(
|
diff = unified_diff(
|
||||||
@ -201,6 +229,7 @@ class LiteralInclude(Directive):
|
|||||||
self.arguments[0])
|
self.arguments[0])
|
||||||
lines = list(diff)
|
lines = list(diff)
|
||||||
|
|
||||||
|
linenostart = self.options.get('lineno-start', 1)
|
||||||
objectname = self.options.get('pyobject')
|
objectname = self.options.get('pyobject')
|
||||||
if objectname is not None:
|
if objectname is not None:
|
||||||
from sphinx.pycode import ModuleAnalyzer
|
from sphinx.pycode import ModuleAnalyzer
|
||||||
@ -211,17 +240,30 @@ class LiteralInclude(Directive):
|
|||||||
'Object named %r not found in include file %r' %
|
'Object named %r not found in include file %r' %
|
||||||
(objectname, filename), line=self.lineno)]
|
(objectname, filename), line=self.lineno)]
|
||||||
else:
|
else:
|
||||||
lines = lines[tags[objectname][1]-1 : tags[objectname][2]-1]
|
lines = lines[tags[objectname][1]-1: tags[objectname][2]-1]
|
||||||
|
if 'lineno-match' in self.options:
|
||||||
|
linenostart = tags[objectname][1]
|
||||||
|
|
||||||
linespec = self.options.get('lines')
|
linespec = self.options.get('lines')
|
||||||
if linespec is not None:
|
if linespec:
|
||||||
try:
|
try:
|
||||||
linelist = parselinenos(linespec, len(lines))
|
linelist = parselinenos(linespec, len(lines))
|
||||||
except ValueError as err:
|
except ValueError as err:
|
||||||
return [document.reporter.warning(str(err), line=self.lineno)]
|
return [document.reporter.warning(str(err), line=self.lineno)]
|
||||||
# just ignore nonexisting lines
|
|
||||||
nlines = len(lines)
|
if 'lineno-match' in self.options:
|
||||||
lines = [lines[i] for i in linelist if i < nlines]
|
# make sure the line list is not "disjoint".
|
||||||
|
previous = linelist[0]
|
||||||
|
for line_number in linelist[1:]:
|
||||||
|
if line_number == previous + 1:
|
||||||
|
previous = line_number
|
||||||
|
continue
|
||||||
|
return [document.reporter.warning(
|
||||||
|
'Cannot use "lineno-match" with a disjoint set of '
|
||||||
|
'"lines"', line=self.lineno)]
|
||||||
|
linenostart = linelist[0] + 1
|
||||||
|
# just ignore non-existing lines
|
||||||
|
lines = [lines[i] for i in linelist if i < len(lines)]
|
||||||
if not lines:
|
if not lines:
|
||||||
return [document.reporter.warning(
|
return [document.reporter.warning(
|
||||||
'Line spec %r: no lines pulled from include file %r' %
|
'Line spec %r: no lines pulled from include file %r' %
|
||||||
@ -237,54 +279,68 @@ class LiteralInclude(Directive):
|
|||||||
hl_lines = None
|
hl_lines = None
|
||||||
|
|
||||||
startafter = self.options.get('start-after')
|
startafter = self.options.get('start-after')
|
||||||
endbefore = self.options.get('end-before')
|
endbefore = self.options.get('end-before')
|
||||||
prepend = self.options.get('prepend')
|
|
||||||
append = self.options.get('append')
|
|
||||||
if startafter is not None or endbefore is not None:
|
if startafter is not None or endbefore is not None:
|
||||||
use = not startafter
|
use = not startafter
|
||||||
res = []
|
res = []
|
||||||
for line in lines:
|
for line_number, line in enumerate(lines):
|
||||||
if not use and startafter and startafter in line:
|
if not use and startafter and startafter in line:
|
||||||
|
if 'lineno-match' in self.options:
|
||||||
|
linenostart += line_number + 1
|
||||||
use = True
|
use = True
|
||||||
elif use and endbefore and endbefore in line:
|
elif use and endbefore and endbefore in line:
|
||||||
use = False
|
|
||||||
break
|
break
|
||||||
elif use:
|
elif use:
|
||||||
res.append(line)
|
res.append(line)
|
||||||
lines = res
|
lines = res
|
||||||
|
|
||||||
|
if 'lineno-match' in self.options:
|
||||||
|
# handle that docutils remove preceding lines which only contains
|
||||||
|
# line separation.
|
||||||
|
for line in lines:
|
||||||
|
# check if line contains anything else than line separation.
|
||||||
|
if line and line.splitlines()[0]:
|
||||||
|
break
|
||||||
|
linenostart += 1
|
||||||
|
|
||||||
|
prepend = self.options.get('prepend')
|
||||||
if prepend:
|
if prepend:
|
||||||
lines.insert(0, prepend + '\n')
|
lines.insert(0, prepend + '\n')
|
||||||
|
|
||||||
|
append = self.options.get('append')
|
||||||
if append:
|
if append:
|
||||||
lines.append(append + '\n')
|
lines.append(append + '\n')
|
||||||
|
|
||||||
text = ''.join(lines)
|
text = ''.join(lines)
|
||||||
if self.options.get('tab-width'):
|
if self.options.get('tab-width'):
|
||||||
text = text.expandtabs(self.options['tab-width'])
|
text = text.expandtabs(self.options['tab-width'])
|
||||||
retnode = nodes.literal_block(text, text, source=filename)
|
retnode = nodes.literal_block(text, text, source=filename)
|
||||||
set_source_info(self, retnode)
|
set_source_info(self, retnode)
|
||||||
if diffsource is not None: # if diff is set, set udiff
|
if diffsource: # if diff is set, set udiff
|
||||||
retnode['language'] = 'udiff'
|
retnode['language'] = 'udiff'
|
||||||
if self.options.get('language', ''):
|
if 'language' in self.options:
|
||||||
retnode['language'] = self.options['language']
|
retnode['language'] = self.options['language']
|
||||||
retnode['linenos'] = 'linenos' in self.options or \
|
retnode['linenos'] = 'linenos' in self.options or \
|
||||||
'lineno-start' in self.options
|
'lineno-start' in self.options or \
|
||||||
caption = self.options.get('caption')
|
'lineno-match' in self.options
|
||||||
if caption is not None:
|
|
||||||
if not caption:
|
|
||||||
caption = self.arguments[0]
|
|
||||||
retnode['caption'] = caption
|
|
||||||
extra_args = retnode['highlight_args'] = {}
|
extra_args = retnode['highlight_args'] = {}
|
||||||
if hl_lines is not None:
|
if hl_lines is not None:
|
||||||
extra_args['hl_lines'] = hl_lines
|
extra_args['hl_lines'] = hl_lines
|
||||||
if 'lineno-start' in self.options:
|
extra_args['linenostart'] = linenostart
|
||||||
extra_args['linenostart'] = self.options['lineno-start']
|
|
||||||
env.note_dependency(rel_filename)
|
env.note_dependency(rel_filename)
|
||||||
|
|
||||||
|
caption = self.options.get('caption')
|
||||||
|
if caption is not None:
|
||||||
|
if caption:
|
||||||
|
retnode = container_wrapper(self, retnode, caption)
|
||||||
|
else:
|
||||||
|
retnode = container_wrapper(self, retnode, self.arguments[0])
|
||||||
|
|
||||||
return [retnode]
|
return [retnode]
|
||||||
|
|
||||||
|
|
||||||
directives.register_directive('highlight', Highlight)
|
directives.register_directive('highlight', Highlight)
|
||||||
directives.register_directive('highlightlang', Highlight) # old
|
directives.register_directive('highlightlang', Highlight) # old
|
||||||
directives.register_directive('code-block', CodeBlock)
|
directives.register_directive('code-block', CodeBlock)
|
||||||
directives.register_directive('sourcecode', CodeBlock)
|
directives.register_directive('sourcecode', CodeBlock)
|
||||||
directives.register_directive('literalinclude', LiteralInclude)
|
directives.register_directive('literalinclude', LiteralInclude)
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
sphinx.directives.other
|
sphinx.directives.other
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -62,7 +62,18 @@ class TocTree(Directive):
|
|||||||
for entry in self.content:
|
for entry in self.content:
|
||||||
if not entry:
|
if not entry:
|
||||||
continue
|
continue
|
||||||
if not glob:
|
if glob and ('*' in entry or '?' in entry or '[' in entry):
|
||||||
|
patname = docname_join(env.docname, entry)
|
||||||
|
docnames = sorted(patfilter(all_docnames, patname))
|
||||||
|
for docname in docnames:
|
||||||
|
all_docnames.remove(docname) # don't include it again
|
||||||
|
entries.append((None, docname))
|
||||||
|
includefiles.append(docname)
|
||||||
|
if not docnames:
|
||||||
|
ret.append(self.state.document.reporter.warning(
|
||||||
|
'toctree glob pattern %r didn\'t match any documents'
|
||||||
|
% entry, line=self.lineno))
|
||||||
|
else:
|
||||||
# look for explicit titles ("Some Title <document>")
|
# look for explicit titles ("Some Title <document>")
|
||||||
m = explicit_title_re.match(entry)
|
m = explicit_title_re.match(entry)
|
||||||
if m:
|
if m:
|
||||||
@ -85,19 +96,9 @@ class TocTree(Directive):
|
|||||||
'document %r' % docname, line=self.lineno))
|
'document %r' % docname, line=self.lineno))
|
||||||
env.note_reread()
|
env.note_reread()
|
||||||
else:
|
else:
|
||||||
|
all_docnames.discard(docname)
|
||||||
entries.append((title, docname))
|
entries.append((title, docname))
|
||||||
includefiles.append(docname)
|
includefiles.append(docname)
|
||||||
else:
|
|
||||||
patname = docname_join(env.docname, entry)
|
|
||||||
docnames = sorted(patfilter(all_docnames, patname))
|
|
||||||
for docname in docnames:
|
|
||||||
all_docnames.remove(docname) # don't include it again
|
|
||||||
entries.append((None, docname))
|
|
||||||
includefiles.append(docname)
|
|
||||||
if not docnames:
|
|
||||||
ret.append(self.state.document.reporter.warning(
|
|
||||||
'toctree glob pattern %r didn\'t match any documents'
|
|
||||||
% entry, line=self.lineno))
|
|
||||||
subnode = addnodes.toctree()
|
subnode = addnodes.toctree()
|
||||||
subnode['parent'] = env.docname
|
subnode['parent'] = env.docname
|
||||||
# entries contains all entries (self references, external links etc.)
|
# entries contains all entries (self references, external links etc.)
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
Support for domains, which are groupings of description directives
|
Support for domains, which are groupings of description directives
|
||||||
and roles describing e.g. constructs of one programming language.
|
and roles describing e.g. constructs of one programming language.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -155,10 +155,13 @@ class Domain(object):
|
|||||||
self._role_cache = {}
|
self._role_cache = {}
|
||||||
self._directive_cache = {}
|
self._directive_cache = {}
|
||||||
self._role2type = {}
|
self._role2type = {}
|
||||||
|
self._type2role = {}
|
||||||
for name, obj in iteritems(self.object_types):
|
for name, obj in iteritems(self.object_types):
|
||||||
for rolename in obj.roles:
|
for rolename in obj.roles:
|
||||||
self._role2type.setdefault(rolename, []).append(name)
|
self._role2type.setdefault(rolename, []).append(name)
|
||||||
|
self._type2role[name] = obj.roles[0] if obj.roles else ''
|
||||||
self.objtypes_for_role = self._role2type.get
|
self.objtypes_for_role = self._role2type.get
|
||||||
|
self.role_for_objtype = self._type2role.get
|
||||||
|
|
||||||
def role(self, name):
|
def role(self, name):
|
||||||
"""Return a role adapter function that always gives the registered
|
"""Return a role adapter function that always gives the registered
|
||||||
@ -199,6 +202,14 @@ class Domain(object):
|
|||||||
"""Remove traces of a document in the domain-specific inventories."""
|
"""Remove traces of a document in the domain-specific inventories."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def merge_domaindata(self, docnames, otherdata):
|
||||||
|
"""Merge in data regarding *docnames* from a different domaindata
|
||||||
|
inventory (coming from a subprocess in parallel builds).
|
||||||
|
"""
|
||||||
|
raise NotImplementedError('merge_domaindata must be implemented in %s '
|
||||||
|
'to be able to do parallel builds!' %
|
||||||
|
self.__class__)
|
||||||
|
|
||||||
def process_doc(self, env, docname, document):
|
def process_doc(self, env, docname, document):
|
||||||
"""Process a document after it is read by the environment."""
|
"""Process a document after it is read by the environment."""
|
||||||
pass
|
pass
|
||||||
@ -220,6 +231,22 @@ class Domain(object):
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def resolve_any_xref(self, env, fromdocname, builder, target, node, contnode):
|
||||||
|
"""Resolve the pending_xref *node* with the given *target*.
|
||||||
|
|
||||||
|
The reference comes from an "any" or similar role, which means that we
|
||||||
|
don't know the type. Otherwise, the arguments are the same as for
|
||||||
|
:meth:`resolve_xref`.
|
||||||
|
|
||||||
|
The method must return a list (potentially empty) of tuples
|
||||||
|
``('domain:role', newnode)``, where ``'domain:role'`` is the name of a
|
||||||
|
role that could have created the same reference, e.g. ``'py:func'``.
|
||||||
|
``newnode`` is what :meth:`resolve_xref` would return.
|
||||||
|
|
||||||
|
.. versionadded:: 1.3
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
def get_objects(self):
|
def get_objects(self):
|
||||||
"""Return an iterable of "object descriptions", which are tuples with
|
"""Return an iterable of "object descriptions", which are tuples with
|
||||||
five items:
|
five items:
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
The C language domain.
|
The C language domain.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -130,7 +130,7 @@ class CObject(ObjectDescription):
|
|||||||
if m:
|
if m:
|
||||||
name = m.group(1)
|
name = m.group(1)
|
||||||
|
|
||||||
typename = self.env.temp_data.get('c:type')
|
typename = self.env.ref_context.get('c:type')
|
||||||
if self.name == 'c:member' and typename:
|
if self.name == 'c:member' and typename:
|
||||||
fullname = typename + '.' + name
|
fullname = typename + '.' + name
|
||||||
else:
|
else:
|
||||||
@ -212,12 +212,12 @@ class CObject(ObjectDescription):
|
|||||||
self.typename_set = False
|
self.typename_set = False
|
||||||
if self.name == 'c:type':
|
if self.name == 'c:type':
|
||||||
if self.names:
|
if self.names:
|
||||||
self.env.temp_data['c:type'] = self.names[0]
|
self.env.ref_context['c:type'] = self.names[0]
|
||||||
self.typename_set = True
|
self.typename_set = True
|
||||||
|
|
||||||
def after_content(self):
|
def after_content(self):
|
||||||
if self.typename_set:
|
if self.typename_set:
|
||||||
self.env.temp_data['c:type'] = None
|
self.env.ref_context.pop('c:type', None)
|
||||||
|
|
||||||
|
|
||||||
class CXRefRole(XRefRole):
|
class CXRefRole(XRefRole):
|
||||||
@ -269,6 +269,12 @@ class CDomain(Domain):
|
|||||||
if fn == docname:
|
if fn == docname:
|
||||||
del self.data['objects'][fullname]
|
del self.data['objects'][fullname]
|
||||||
|
|
||||||
|
def merge_domaindata(self, docnames, otherdata):
|
||||||
|
# XXX check duplicates
|
||||||
|
for fullname, (fn, objtype) in otherdata['objects'].items():
|
||||||
|
if fn in docnames:
|
||||||
|
self.data['objects'][fullname] = (fn, objtype)
|
||||||
|
|
||||||
def resolve_xref(self, env, fromdocname, builder,
|
def resolve_xref(self, env, fromdocname, builder,
|
||||||
typ, target, node, contnode):
|
typ, target, node, contnode):
|
||||||
# strip pointer asterisk
|
# strip pointer asterisk
|
||||||
@ -279,6 +285,17 @@ class CDomain(Domain):
|
|||||||
return make_refnode(builder, fromdocname, obj[0], 'c.' + target,
|
return make_refnode(builder, fromdocname, obj[0], 'c.' + target,
|
||||||
contnode, target)
|
contnode, target)
|
||||||
|
|
||||||
|
def resolve_any_xref(self, env, fromdocname, builder, target,
|
||||||
|
node, contnode):
|
||||||
|
# strip pointer asterisk
|
||||||
|
target = target.rstrip(' *')
|
||||||
|
if target not in self.data['objects']:
|
||||||
|
return []
|
||||||
|
obj = self.data['objects'][target]
|
||||||
|
return [('c:' + self.role_for_objtype(obj[1]),
|
||||||
|
make_refnode(builder, fromdocname, obj[0], 'c.' + target,
|
||||||
|
contnode, target))]
|
||||||
|
|
||||||
def get_objects(self):
|
def get_objects(self):
|
||||||
for refname, (docname, type) in list(self.data['objects'].items()):
|
for refname, (docname, type) in list(self.data['objects'].items()):
|
||||||
yield (refname, refname, type, docname, 'c.' + refname, 1)
|
yield (refname, refname, type, docname, 'c.' + refname, 1)
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
The JavaScript domain.
|
The JavaScript domain.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -45,7 +45,7 @@ class JSObject(ObjectDescription):
|
|||||||
nameprefix = None
|
nameprefix = None
|
||||||
name = prefix
|
name = prefix
|
||||||
|
|
||||||
objectname = self.env.temp_data.get('js:object')
|
objectname = self.env.ref_context.get('js:object')
|
||||||
if nameprefix:
|
if nameprefix:
|
||||||
if objectname:
|
if objectname:
|
||||||
# someone documenting the method of an attribute of the current
|
# someone documenting the method of an attribute of the current
|
||||||
@ -77,7 +77,7 @@ class JSObject(ObjectDescription):
|
|||||||
|
|
||||||
def add_target_and_index(self, name_obj, sig, signode):
|
def add_target_and_index(self, name_obj, sig, signode):
|
||||||
objectname = self.options.get(
|
objectname = self.options.get(
|
||||||
'object', self.env.temp_data.get('js:object'))
|
'object', self.env.ref_context.get('js:object'))
|
||||||
fullname = name_obj[0]
|
fullname = name_obj[0]
|
||||||
if fullname not in self.state.document.ids:
|
if fullname not in self.state.document.ids:
|
||||||
signode['names'].append(fullname)
|
signode['names'].append(fullname)
|
||||||
@ -140,7 +140,7 @@ class JSConstructor(JSCallable):
|
|||||||
class JSXRefRole(XRefRole):
|
class JSXRefRole(XRefRole):
|
||||||
def process_link(self, env, refnode, has_explicit_title, title, target):
|
def process_link(self, env, refnode, has_explicit_title, title, target):
|
||||||
# basically what sphinx.domains.python.PyXRefRole does
|
# basically what sphinx.domains.python.PyXRefRole does
|
||||||
refnode['js:object'] = env.temp_data.get('js:object')
|
refnode['js:object'] = env.ref_context.get('js:object')
|
||||||
if not has_explicit_title:
|
if not has_explicit_title:
|
||||||
title = title.lstrip('.')
|
title = title.lstrip('.')
|
||||||
target = target.lstrip('~')
|
target = target.lstrip('~')
|
||||||
@ -179,7 +179,7 @@ class JavaScriptDomain(Domain):
|
|||||||
'attr': JSXRefRole(),
|
'attr': JSXRefRole(),
|
||||||
}
|
}
|
||||||
initial_data = {
|
initial_data = {
|
||||||
'objects': {}, # fullname -> docname, objtype
|
'objects': {}, # fullname -> docname, objtype
|
||||||
}
|
}
|
||||||
|
|
||||||
def clear_doc(self, docname):
|
def clear_doc(self, docname):
|
||||||
@ -187,6 +187,12 @@ class JavaScriptDomain(Domain):
|
|||||||
if fn == docname:
|
if fn == docname:
|
||||||
del self.data['objects'][fullname]
|
del self.data['objects'][fullname]
|
||||||
|
|
||||||
|
def merge_domaindata(self, docnames, otherdata):
|
||||||
|
# XXX check duplicates
|
||||||
|
for fullname, (fn, objtype) in otherdata['objects'].items():
|
||||||
|
if fn in docnames:
|
||||||
|
self.data['objects'][fullname] = (fn, objtype)
|
||||||
|
|
||||||
def find_obj(self, env, obj, name, typ, searchorder=0):
|
def find_obj(self, env, obj, name, typ, searchorder=0):
|
||||||
if name[-2:] == '()':
|
if name[-2:] == '()':
|
||||||
name = name[:-2]
|
name = name[:-2]
|
||||||
@ -214,6 +220,16 @@ class JavaScriptDomain(Domain):
|
|||||||
return make_refnode(builder, fromdocname, obj[0],
|
return make_refnode(builder, fromdocname, obj[0],
|
||||||
name.replace('$', '_S_'), contnode, name)
|
name.replace('$', '_S_'), contnode, name)
|
||||||
|
|
||||||
|
def resolve_any_xref(self, env, fromdocname, builder, target, node,
|
||||||
|
contnode):
|
||||||
|
objectname = node.get('js:object')
|
||||||
|
name, obj = self.find_obj(env, objectname, target, None, 1)
|
||||||
|
if not obj:
|
||||||
|
return []
|
||||||
|
return [('js:' + self.role_for_objtype(obj[1]),
|
||||||
|
make_refnode(builder, fromdocname, obj[0],
|
||||||
|
name.replace('$', '_S_'), contnode, name))]
|
||||||
|
|
||||||
def get_objects(self):
|
def get_objects(self):
|
||||||
for refname, (docname, type) in list(self.data['objects'].items()):
|
for refname, (docname, type) in list(self.data['objects'].items()):
|
||||||
yield refname, refname, type, docname, \
|
yield refname, refname, type, docname, \
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
The Python domain.
|
The Python domain.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -84,13 +84,14 @@ def _pseudo_parse_arglist(signode, arglist):
|
|||||||
|
|
||||||
# This override allows our inline type specifiers to behave like :class: link
|
# This override allows our inline type specifiers to behave like :class: link
|
||||||
# when it comes to handling "." and "~" prefixes.
|
# when it comes to handling "." and "~" prefixes.
|
||||||
class PyTypedField(TypedField):
|
class PyXrefMixin(object):
|
||||||
def make_xref(self, rolename, domain, target, innernode=nodes.emphasis):
|
def make_xref(self, rolename, domain, target, innernode=nodes.emphasis,
|
||||||
result = super(PyTypedField, self).make_xref(rolename, domain, target,
|
contnode=None):
|
||||||
innernode)
|
result = super(PyXrefMixin, self).make_xref(rolename, domain, target,
|
||||||
|
innernode, contnode)
|
||||||
|
result['refspecific'] = True
|
||||||
if target.startswith('.'):
|
if target.startswith('.'):
|
||||||
result['reftarget'] = target[1:]
|
result['reftarget'] = target[1:]
|
||||||
result['refspecific'] = True
|
|
||||||
result[0][0] = nodes.Text(target[1:])
|
result[0][0] = nodes.Text(target[1:])
|
||||||
if target.startswith('~'):
|
if target.startswith('~'):
|
||||||
result['reftarget'] = target[1:]
|
result['reftarget'] = target[1:]
|
||||||
@ -99,6 +100,14 @@ class PyTypedField(TypedField):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class PyField(PyXrefMixin, Field):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PyTypedField(PyXrefMixin, TypedField):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class PyObject(ObjectDescription):
|
class PyObject(ObjectDescription):
|
||||||
"""
|
"""
|
||||||
Description of a general Python object.
|
Description of a general Python object.
|
||||||
@ -111,21 +120,21 @@ class PyObject(ObjectDescription):
|
|||||||
|
|
||||||
doc_field_types = [
|
doc_field_types = [
|
||||||
PyTypedField('parameter', label=l_('Parameters'),
|
PyTypedField('parameter', label=l_('Parameters'),
|
||||||
names=('param', 'parameter', 'arg', 'argument',
|
names=('param', 'parameter', 'arg', 'argument',
|
||||||
'keyword', 'kwarg', 'kwparam'),
|
'keyword', 'kwarg', 'kwparam'),
|
||||||
typerolename='obj', typenames=('paramtype', 'type'),
|
typerolename='obj', typenames=('paramtype', 'type'),
|
||||||
can_collapse=True),
|
can_collapse=True),
|
||||||
TypedField('variable', label=l_('Variables'), rolename='obj',
|
PyTypedField('variable', label=l_('Variables'), rolename='obj',
|
||||||
names=('var', 'ivar', 'cvar'),
|
names=('var', 'ivar', 'cvar'),
|
||||||
typerolename='obj', typenames=('vartype',),
|
typerolename='obj', typenames=('vartype',),
|
||||||
can_collapse=True),
|
can_collapse=True),
|
||||||
GroupedField('exceptions', label=l_('Raises'), rolename='exc',
|
GroupedField('exceptions', label=l_('Raises'), rolename='exc',
|
||||||
names=('raises', 'raise', 'exception', 'except'),
|
names=('raises', 'raise', 'exception', 'except'),
|
||||||
can_collapse=True),
|
can_collapse=True),
|
||||||
Field('returnvalue', label=l_('Returns'), has_arg=False,
|
Field('returnvalue', label=l_('Returns'), has_arg=False,
|
||||||
names=('returns', 'return')),
|
names=('returns', 'return')),
|
||||||
Field('returntype', label=l_('Return type'), has_arg=False,
|
PyField('returntype', label=l_('Return type'), has_arg=False,
|
||||||
names=('rtype',)),
|
names=('rtype',), bodyrolename='obj'),
|
||||||
]
|
]
|
||||||
|
|
||||||
def get_signature_prefix(self, sig):
|
def get_signature_prefix(self, sig):
|
||||||
@ -156,8 +165,8 @@ class PyObject(ObjectDescription):
|
|||||||
|
|
||||||
# determine module and class name (if applicable), as well as full name
|
# determine module and class name (if applicable), as well as full name
|
||||||
modname = self.options.get(
|
modname = self.options.get(
|
||||||
'module', self.env.temp_data.get('py:module'))
|
'module', self.env.ref_context.get('py:module'))
|
||||||
classname = self.env.temp_data.get('py:class')
|
classname = self.env.ref_context.get('py:class')
|
||||||
if classname:
|
if classname:
|
||||||
add_module = False
|
add_module = False
|
||||||
if name_prefix and name_prefix.startswith(classname):
|
if name_prefix and name_prefix.startswith(classname):
|
||||||
@ -194,7 +203,7 @@ class PyObject(ObjectDescription):
|
|||||||
# 'exceptions' module.
|
# 'exceptions' module.
|
||||||
elif add_module and self.env.config.add_module_names:
|
elif add_module and self.env.config.add_module_names:
|
||||||
modname = self.options.get(
|
modname = self.options.get(
|
||||||
'module', self.env.temp_data.get('py:module'))
|
'module', self.env.ref_context.get('py:module'))
|
||||||
if modname and modname != 'exceptions':
|
if modname and modname != 'exceptions':
|
||||||
nodetext = modname + '.'
|
nodetext = modname + '.'
|
||||||
signode += addnodes.desc_addname(nodetext, nodetext)
|
signode += addnodes.desc_addname(nodetext, nodetext)
|
||||||
@ -225,7 +234,7 @@ class PyObject(ObjectDescription):
|
|||||||
|
|
||||||
def add_target_and_index(self, name_cls, sig, signode):
|
def add_target_and_index(self, name_cls, sig, signode):
|
||||||
modname = self.options.get(
|
modname = self.options.get(
|
||||||
'module', self.env.temp_data.get('py:module'))
|
'module', self.env.ref_context.get('py:module'))
|
||||||
fullname = (modname and modname + '.' or '') + name_cls[0]
|
fullname = (modname and modname + '.' or '') + name_cls[0]
|
||||||
# note target
|
# note target
|
||||||
if fullname not in self.state.document.ids:
|
if fullname not in self.state.document.ids:
|
||||||
@ -254,7 +263,7 @@ class PyObject(ObjectDescription):
|
|||||||
|
|
||||||
def after_content(self):
|
def after_content(self):
|
||||||
if self.clsname_set:
|
if self.clsname_set:
|
||||||
self.env.temp_data['py:class'] = None
|
self.env.ref_context.pop('py:class', None)
|
||||||
|
|
||||||
|
|
||||||
class PyModulelevel(PyObject):
|
class PyModulelevel(PyObject):
|
||||||
@ -299,7 +308,7 @@ class PyClasslike(PyObject):
|
|||||||
def before_content(self):
|
def before_content(self):
|
||||||
PyObject.before_content(self)
|
PyObject.before_content(self)
|
||||||
if self.names:
|
if self.names:
|
||||||
self.env.temp_data['py:class'] = self.names[0][0]
|
self.env.ref_context['py:class'] = self.names[0][0]
|
||||||
self.clsname_set = True
|
self.clsname_set = True
|
||||||
|
|
||||||
|
|
||||||
@ -377,8 +386,8 @@ class PyClassmember(PyObject):
|
|||||||
def before_content(self):
|
def before_content(self):
|
||||||
PyObject.before_content(self)
|
PyObject.before_content(self)
|
||||||
lastname = self.names and self.names[-1][1]
|
lastname = self.names and self.names[-1][1]
|
||||||
if lastname and not self.env.temp_data.get('py:class'):
|
if lastname and not self.env.ref_context.get('py:class'):
|
||||||
self.env.temp_data['py:class'] = lastname.strip('.')
|
self.env.ref_context['py:class'] = lastname.strip('.')
|
||||||
self.clsname_set = True
|
self.clsname_set = True
|
||||||
|
|
||||||
|
|
||||||
@ -434,7 +443,7 @@ class PyModule(Directive):
|
|||||||
env = self.state.document.settings.env
|
env = self.state.document.settings.env
|
||||||
modname = self.arguments[0].strip()
|
modname = self.arguments[0].strip()
|
||||||
noindex = 'noindex' in self.options
|
noindex = 'noindex' in self.options
|
||||||
env.temp_data['py:module'] = modname
|
env.ref_context['py:module'] = modname
|
||||||
ret = []
|
ret = []
|
||||||
if not noindex:
|
if not noindex:
|
||||||
env.domaindata['py']['modules'][modname] = \
|
env.domaindata['py']['modules'][modname] = \
|
||||||
@ -472,16 +481,16 @@ class PyCurrentModule(Directive):
|
|||||||
env = self.state.document.settings.env
|
env = self.state.document.settings.env
|
||||||
modname = self.arguments[0].strip()
|
modname = self.arguments[0].strip()
|
||||||
if modname == 'None':
|
if modname == 'None':
|
||||||
env.temp_data['py:module'] = None
|
env.ref_context.pop('py:module', None)
|
||||||
else:
|
else:
|
||||||
env.temp_data['py:module'] = modname
|
env.ref_context['py:module'] = modname
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
class PyXRefRole(XRefRole):
|
class PyXRefRole(XRefRole):
|
||||||
def process_link(self, env, refnode, has_explicit_title, title, target):
|
def process_link(self, env, refnode, has_explicit_title, title, target):
|
||||||
refnode['py:module'] = env.temp_data.get('py:module')
|
refnode['py:module'] = env.ref_context.get('py:module')
|
||||||
refnode['py:class'] = env.temp_data.get('py:class')
|
refnode['py:class'] = env.ref_context.get('py:class')
|
||||||
if not has_explicit_title:
|
if not has_explicit_title:
|
||||||
title = title.lstrip('.') # only has a meaning for the target
|
title = title.lstrip('.') # only has a meaning for the target
|
||||||
target = target.lstrip('~') # only has a meaning for the title
|
target = target.lstrip('~') # only has a meaning for the title
|
||||||
@ -627,6 +636,15 @@ class PythonDomain(Domain):
|
|||||||
if fn == docname:
|
if fn == docname:
|
||||||
del self.data['modules'][modname]
|
del self.data['modules'][modname]
|
||||||
|
|
||||||
|
def merge_domaindata(self, docnames, otherdata):
|
||||||
|
# XXX check duplicates?
|
||||||
|
for fullname, (fn, objtype) in otherdata['objects'].items():
|
||||||
|
if fn in docnames:
|
||||||
|
self.data['objects'][fullname] = (fn, objtype)
|
||||||
|
for modname, data in otherdata['modules'].items():
|
||||||
|
if data[0] in docnames:
|
||||||
|
self.data['modules'][modname] = data
|
||||||
|
|
||||||
def find_obj(self, env, modname, classname, name, type, searchmode=0):
|
def find_obj(self, env, modname, classname, name, type, searchmode=0):
|
||||||
"""Find a Python object for "name", perhaps using the given module
|
"""Find a Python object for "name", perhaps using the given module
|
||||||
and/or classname. Returns a list of (name, object entry) tuples.
|
and/or classname. Returns a list of (name, object entry) tuples.
|
||||||
@ -643,7 +661,10 @@ class PythonDomain(Domain):
|
|||||||
|
|
||||||
newname = None
|
newname = None
|
||||||
if searchmode == 1:
|
if searchmode == 1:
|
||||||
objtypes = self.objtypes_for_role(type)
|
if type is None:
|
||||||
|
objtypes = list(self.object_types)
|
||||||
|
else:
|
||||||
|
objtypes = self.objtypes_for_role(type)
|
||||||
if objtypes is not None:
|
if objtypes is not None:
|
||||||
if modname and classname:
|
if modname and classname:
|
||||||
fullname = modname + '.' + classname + '.' + name
|
fullname = modname + '.' + classname + '.' + name
|
||||||
@ -704,22 +725,44 @@ class PythonDomain(Domain):
|
|||||||
name, obj = matches[0]
|
name, obj = matches[0]
|
||||||
|
|
||||||
if obj[1] == 'module':
|
if obj[1] == 'module':
|
||||||
# get additional info for modules
|
return self._make_module_refnode(builder, fromdocname, name,
|
||||||
docname, synopsis, platform, deprecated = self.data['modules'][name]
|
contnode)
|
||||||
assert docname == obj[0]
|
|
||||||
title = name
|
|
||||||
if synopsis:
|
|
||||||
title += ': ' + synopsis
|
|
||||||
if deprecated:
|
|
||||||
title += _(' (deprecated)')
|
|
||||||
if platform:
|
|
||||||
title += ' (' + platform + ')'
|
|
||||||
return make_refnode(builder, fromdocname, docname,
|
|
||||||
'module-' + name, contnode, title)
|
|
||||||
else:
|
else:
|
||||||
return make_refnode(builder, fromdocname, obj[0], name,
|
return make_refnode(builder, fromdocname, obj[0], name,
|
||||||
contnode, name)
|
contnode, name)
|
||||||
|
|
||||||
|
def resolve_any_xref(self, env, fromdocname, builder, target,
|
||||||
|
node, contnode):
|
||||||
|
modname = node.get('py:module')
|
||||||
|
clsname = node.get('py:class')
|
||||||
|
results = []
|
||||||
|
|
||||||
|
# always search in "refspecific" mode with the :any: role
|
||||||
|
matches = self.find_obj(env, modname, clsname, target, None, 1)
|
||||||
|
for name, obj in matches:
|
||||||
|
if obj[1] == 'module':
|
||||||
|
results.append(('py:mod',
|
||||||
|
self._make_module_refnode(builder, fromdocname,
|
||||||
|
name, contnode)))
|
||||||
|
else:
|
||||||
|
results.append(('py:' + self.role_for_objtype(obj[1]),
|
||||||
|
make_refnode(builder, fromdocname, obj[0], name,
|
||||||
|
contnode, name)))
|
||||||
|
return results
|
||||||
|
|
||||||
|
def _make_module_refnode(self, builder, fromdocname, name, contnode):
|
||||||
|
# get additional info for modules
|
||||||
|
docname, synopsis, platform, deprecated = self.data['modules'][name]
|
||||||
|
title = name
|
||||||
|
if synopsis:
|
||||||
|
title += ': ' + synopsis
|
||||||
|
if deprecated:
|
||||||
|
title += _(' (deprecated)')
|
||||||
|
if platform:
|
||||||
|
title += ' (' + platform + ')'
|
||||||
|
return make_refnode(builder, fromdocname, docname,
|
||||||
|
'module-' + name, contnode, title)
|
||||||
|
|
||||||
def get_objects(self):
|
def get_objects(self):
|
||||||
for modname, info in iteritems(self.data['modules']):
|
for modname, info in iteritems(self.data['modules']):
|
||||||
yield (modname, modname, 'module', info[0], 'module-' + modname, 0)
|
yield (modname, modname, 'module', info[0], 'module-' + modname, 0)
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
The reStructuredText domain.
|
The reStructuredText domain.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -123,6 +123,12 @@ class ReSTDomain(Domain):
|
|||||||
if doc == docname:
|
if doc == docname:
|
||||||
del self.data['objects'][typ, name]
|
del self.data['objects'][typ, name]
|
||||||
|
|
||||||
|
def merge_domaindata(self, docnames, otherdata):
|
||||||
|
# XXX check duplicates
|
||||||
|
for (typ, name), doc in otherdata['objects'].items():
|
||||||
|
if doc in docnames:
|
||||||
|
self.data['objects'][typ, name] = doc
|
||||||
|
|
||||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node,
|
def resolve_xref(self, env, fromdocname, builder, typ, target, node,
|
||||||
contnode):
|
contnode):
|
||||||
objects = self.data['objects']
|
objects = self.data['objects']
|
||||||
@ -134,6 +140,19 @@ class ReSTDomain(Domain):
|
|||||||
objtype + '-' + target,
|
objtype + '-' + target,
|
||||||
contnode, target + ' ' + objtype)
|
contnode, target + ' ' + objtype)
|
||||||
|
|
||||||
|
def resolve_any_xref(self, env, fromdocname, builder, target,
|
||||||
|
node, contnode):
|
||||||
|
objects = self.data['objects']
|
||||||
|
results = []
|
||||||
|
for objtype in self.object_types:
|
||||||
|
if (objtype, target) in self.data['objects']:
|
||||||
|
results.append(('rst:' + self.role_for_objtype(objtype),
|
||||||
|
make_refnode(builder, fromdocname,
|
||||||
|
objects[objtype, target],
|
||||||
|
objtype + '-' + target,
|
||||||
|
contnode, target + ' ' + objtype)))
|
||||||
|
return results
|
||||||
|
|
||||||
def get_objects(self):
|
def get_objects(self):
|
||||||
for (typ, name), docname in iteritems(self.data['objects']):
|
for (typ, name), docname in iteritems(self.data['objects']):
|
||||||
yield name, name, typ, docname, typ + '-' + name, 1
|
yield name, name, typ, docname, typ + '-' + name, 1
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
The standard domain.
|
The standard domain.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -22,13 +22,15 @@ from sphinx.roles import XRefRole
|
|||||||
from sphinx.locale import l_, _
|
from sphinx.locale import l_, _
|
||||||
from sphinx.domains import Domain, ObjType
|
from sphinx.domains import Domain, ObjType
|
||||||
from sphinx.directives import ObjectDescription
|
from sphinx.directives import ObjectDescription
|
||||||
from sphinx.util import ws_re
|
from sphinx.util import ws_re, get_figtype
|
||||||
from sphinx.util.nodes import clean_astext, make_refnode
|
from sphinx.util.nodes import clean_astext, make_refnode
|
||||||
from sphinx.util.compat import Directive
|
from sphinx.util.compat import Directive
|
||||||
|
|
||||||
|
|
||||||
# RE for option descriptions
|
# RE for option descriptions
|
||||||
option_desc_re = re.compile(r'((?:/|-|--)?[-_a-zA-Z0-9]+)(\s*.*)')
|
option_desc_re = re.compile(r'((?:/|--|-|\+)?[-?@#_a-zA-Z0-9]+)(=?\s*.*)')
|
||||||
|
# RE for grammar tokens
|
||||||
|
token_re = re.compile('`(\w+)`', re.U)
|
||||||
|
|
||||||
|
|
||||||
class GenericObject(ObjectDescription):
|
class GenericObject(ObjectDescription):
|
||||||
@ -144,8 +146,9 @@ class Cmdoption(ObjectDescription):
|
|||||||
self.env.warn(
|
self.env.warn(
|
||||||
self.env.docname,
|
self.env.docname,
|
||||||
'Malformed option description %r, should '
|
'Malformed option description %r, should '
|
||||||
'look like "opt", "-opt args", "--opt args" or '
|
'look like "opt", "-opt args", "--opt args", '
|
||||||
'"/opt args"' % potential_option, self.lineno)
|
'"/opt args" or "+opt args"' % potential_option,
|
||||||
|
self.lineno)
|
||||||
continue
|
continue
|
||||||
optname, args = m.groups()
|
optname, args = m.groups()
|
||||||
if count:
|
if count:
|
||||||
@ -163,7 +166,7 @@ class Cmdoption(ObjectDescription):
|
|||||||
return firstname
|
return firstname
|
||||||
|
|
||||||
def add_target_and_index(self, firstname, sig, signode):
|
def add_target_and_index(self, firstname, sig, signode):
|
||||||
currprogram = self.env.temp_data.get('std:program')
|
currprogram = self.env.ref_context.get('std:program')
|
||||||
for optname in signode.get('allnames', []):
|
for optname in signode.get('allnames', []):
|
||||||
targetname = optname.replace('/', '-')
|
targetname = optname.replace('/', '-')
|
||||||
if not targetname.startswith('-'):
|
if not targetname.startswith('-'):
|
||||||
@ -198,36 +201,19 @@ class Program(Directive):
|
|||||||
env = self.state.document.settings.env
|
env = self.state.document.settings.env
|
||||||
program = ws_re.sub('-', self.arguments[0].strip())
|
program = ws_re.sub('-', self.arguments[0].strip())
|
||||||
if program == 'None':
|
if program == 'None':
|
||||||
env.temp_data['std:program'] = None
|
env.ref_context.pop('std:program', None)
|
||||||
else:
|
else:
|
||||||
env.temp_data['std:program'] = program
|
env.ref_context['std:program'] = program
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
class OptionXRefRole(XRefRole):
|
class OptionXRefRole(XRefRole):
|
||||||
innernodeclass = addnodes.literal_emphasis
|
|
||||||
|
|
||||||
def _split(self, text, refnode, env):
|
|
||||||
try:
|
|
||||||
program, target = re.split(' (?=-|--|/)', text, 1)
|
|
||||||
except ValueError:
|
|
||||||
env.warn_node('Malformed :option: %r, does not contain option '
|
|
||||||
'marker - or -- or /' % text, refnode)
|
|
||||||
return None, text
|
|
||||||
else:
|
|
||||||
program = ws_re.sub('-', program)
|
|
||||||
return program, target
|
|
||||||
|
|
||||||
def process_link(self, env, refnode, has_explicit_title, title, target):
|
def process_link(self, env, refnode, has_explicit_title, title, target):
|
||||||
program = env.temp_data.get('std:program')
|
# validate content
|
||||||
if not has_explicit_title:
|
if not re.match('(.+ )?[-/+]', target):
|
||||||
if ' ' in title and not (title.startswith('/') or
|
env.warn_node('Malformed :option: %r, does not contain option '
|
||||||
title.startswith('-')):
|
'marker - or -- or / or +' % target, refnode)
|
||||||
program, target = self._split(title, refnode, env)
|
refnode['std:program'] = env.ref_context.get('std:program')
|
||||||
target = target.strip()
|
|
||||||
elif ' ' in target:
|
|
||||||
program, target = self._split(target, refnode, env)
|
|
||||||
refnode['refprogram'] = program
|
|
||||||
return title, target
|
return title, target
|
||||||
|
|
||||||
|
|
||||||
@ -327,7 +313,7 @@ class Glossary(Directive):
|
|||||||
else:
|
else:
|
||||||
messages.append(self.state.reporter.system_message(
|
messages.append(self.state.reporter.system_message(
|
||||||
2, 'glossary seems to be misformatted, check '
|
2, 'glossary seems to be misformatted, check '
|
||||||
'indentation', source=source, line=lineno))
|
'indentation', source=source, line=lineno))
|
||||||
else:
|
else:
|
||||||
if not in_definition:
|
if not in_definition:
|
||||||
# first line of definition, determines indentation
|
# first line of definition, determines indentation
|
||||||
@ -338,7 +324,7 @@ class Glossary(Directive):
|
|||||||
else:
|
else:
|
||||||
messages.append(self.state.reporter.system_message(
|
messages.append(self.state.reporter.system_message(
|
||||||
2, 'glossary seems to be misformatted, check '
|
2, 'glossary seems to be misformatted, check '
|
||||||
'indentation', source=source, line=lineno))
|
'indentation', source=source, line=lineno))
|
||||||
was_empty = False
|
was_empty = False
|
||||||
|
|
||||||
# now, parse all the entries into a big definition list
|
# now, parse all the entries into a big definition list
|
||||||
@ -359,7 +345,7 @@ class Glossary(Directive):
|
|||||||
tmp.source = source
|
tmp.source = source
|
||||||
tmp.line = lineno
|
tmp.line = lineno
|
||||||
new_id, termtext, new_termnodes = \
|
new_id, termtext, new_termnodes = \
|
||||||
make_termnodes_from_paragraph_node(env, tmp)
|
make_termnodes_from_paragraph_node(env, tmp)
|
||||||
ids.append(new_id)
|
ids.append(new_id)
|
||||||
termtexts.append(termtext)
|
termtexts.append(termtext)
|
||||||
termnodes.extend(new_termnodes)
|
termnodes.extend(new_termnodes)
|
||||||
@ -386,8 +372,6 @@ class Glossary(Directive):
|
|||||||
return messages + [node]
|
return messages + [node]
|
||||||
|
|
||||||
|
|
||||||
token_re = re.compile('`(\w+)`', re.U)
|
|
||||||
|
|
||||||
def token_xrefs(text):
|
def token_xrefs(text):
|
||||||
retnodes = []
|
retnodes = []
|
||||||
pos = 0
|
pos = 0
|
||||||
@ -472,7 +456,7 @@ class StandardDomain(Domain):
|
|||||||
'productionlist': ProductionList,
|
'productionlist': ProductionList,
|
||||||
}
|
}
|
||||||
roles = {
|
roles = {
|
||||||
'option': OptionXRefRole(innernodeclass=addnodes.literal_emphasis),
|
'option': OptionXRefRole(),
|
||||||
'envvar': EnvVarXRefRole(),
|
'envvar': EnvVarXRefRole(),
|
||||||
# links to tokens in grammar productions
|
# links to tokens in grammar productions
|
||||||
'token': XRefRole(),
|
'token': XRefRole(),
|
||||||
@ -482,6 +466,9 @@ class StandardDomain(Domain):
|
|||||||
# links to headings or arbitrary labels
|
# links to headings or arbitrary labels
|
||||||
'ref': XRefRole(lowercase=True, innernodeclass=nodes.emphasis,
|
'ref': XRefRole(lowercase=True, innernodeclass=nodes.emphasis,
|
||||||
warn_dangling=True),
|
warn_dangling=True),
|
||||||
|
# links to labels of numbered figures, tables and code-blocks
|
||||||
|
'numref': XRefRole(lowercase=True,
|
||||||
|
warn_dangling=True),
|
||||||
# links to labels, without a different title
|
# links to labels, without a different title
|
||||||
'keyword': XRefRole(warn_dangling=True),
|
'keyword': XRefRole(warn_dangling=True),
|
||||||
}
|
}
|
||||||
@ -505,6 +492,7 @@ class StandardDomain(Domain):
|
|||||||
'term': 'term not in glossary: %(target)s',
|
'term': 'term not in glossary: %(target)s',
|
||||||
'ref': 'undefined label: %(target)s (if the link has no caption '
|
'ref': 'undefined label: %(target)s (if the link has no caption '
|
||||||
'the label must precede a section header)',
|
'the label must precede a section header)',
|
||||||
|
'numref': 'undefined label: %(target)s',
|
||||||
'keyword': 'unknown keyword: %(target)s',
|
'keyword': 'unknown keyword: %(target)s',
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -522,6 +510,21 @@ class StandardDomain(Domain):
|
|||||||
if fn == docname:
|
if fn == docname:
|
||||||
del self.data['anonlabels'][key]
|
del self.data['anonlabels'][key]
|
||||||
|
|
||||||
|
def merge_domaindata(self, docnames, otherdata):
|
||||||
|
# XXX duplicates?
|
||||||
|
for key, data in otherdata['progoptions'].items():
|
||||||
|
if data[0] in docnames:
|
||||||
|
self.data['progoptions'][key] = data
|
||||||
|
for key, data in otherdata['objects'].items():
|
||||||
|
if data[0] in docnames:
|
||||||
|
self.data['objects'][key] = data
|
||||||
|
for key, data in otherdata['labels'].items():
|
||||||
|
if data[0] in docnames:
|
||||||
|
self.data['labels'][key] = data
|
||||||
|
for key, data in otherdata['anonlabels'].items():
|
||||||
|
if data[0] in docnames:
|
||||||
|
self.data['anonlabels'][key] = data
|
||||||
|
|
||||||
def process_doc(self, env, docname, document):
|
def process_doc(self, env, docname, document):
|
||||||
labels, anonlabels = self.data['labels'], self.data['anonlabels']
|
labels, anonlabels = self.data['labels'], self.data['anonlabels']
|
||||||
for name, explicit in iteritems(document.nametypes):
|
for name, explicit in iteritems(document.nametypes):
|
||||||
@ -532,7 +535,7 @@ class StandardDomain(Domain):
|
|||||||
continue
|
continue
|
||||||
node = document.ids[labelid]
|
node = document.ids[labelid]
|
||||||
if name.isdigit() or 'refuri' in node or \
|
if name.isdigit() or 'refuri' in node or \
|
||||||
node.tagname.startswith('desc_'):
|
node.tagname.startswith('desc_'):
|
||||||
# ignore footnote labels, labels automatically generated from a
|
# ignore footnote labels, labels automatically generated from a
|
||||||
# link and object descriptions
|
# link and object descriptions
|
||||||
continue
|
continue
|
||||||
@ -541,7 +544,7 @@ class StandardDomain(Domain):
|
|||||||
'in ' + env.doc2path(labels[name][0]), node)
|
'in ' + env.doc2path(labels[name][0]), node)
|
||||||
anonlabels[name] = docname, labelid
|
anonlabels[name] = docname, labelid
|
||||||
if node.tagname == 'section':
|
if node.tagname == 'section':
|
||||||
sectname = clean_astext(node[0]) # node[0] == title node
|
sectname = clean_astext(node[0]) # node[0] == title node
|
||||||
elif node.tagname == 'figure':
|
elif node.tagname == 'figure':
|
||||||
for n in node:
|
for n in node:
|
||||||
if n.tagname == 'caption':
|
if n.tagname == 'caption':
|
||||||
@ -563,52 +566,105 @@ class StandardDomain(Domain):
|
|||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
elif node.tagname == 'container' and node.get('literal_block'):
|
||||||
|
for n in node:
|
||||||
|
if n.tagname == 'caption':
|
||||||
|
sectname = clean_astext(n)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
continue
|
||||||
else:
|
else:
|
||||||
# anonymous-only labels
|
# anonymous-only labels
|
||||||
continue
|
continue
|
||||||
labels[name] = docname, labelid, sectname
|
labels[name] = docname, labelid, sectname
|
||||||
|
|
||||||
|
def build_reference_node(self, fromdocname, builder,
|
||||||
|
docname, labelid, sectname,
|
||||||
|
**options):
|
||||||
|
nodeclass = options.pop('nodeclass', nodes.reference)
|
||||||
|
newnode = nodeclass('', '', internal=True, **options)
|
||||||
|
innernode = nodes.emphasis(sectname, sectname)
|
||||||
|
if docname == fromdocname:
|
||||||
|
newnode['refid'] = labelid
|
||||||
|
else:
|
||||||
|
# set more info in contnode; in case the
|
||||||
|
# get_relative_uri call raises NoUri,
|
||||||
|
# the builder will then have to resolve these
|
||||||
|
contnode = addnodes.pending_xref('')
|
||||||
|
contnode['refdocname'] = docname
|
||||||
|
contnode['refsectname'] = sectname
|
||||||
|
newnode['refuri'] = builder.get_relative_uri(
|
||||||
|
fromdocname, docname)
|
||||||
|
if labelid:
|
||||||
|
newnode['refuri'] += '#' + labelid
|
||||||
|
newnode.append(innernode)
|
||||||
|
return newnode
|
||||||
|
|
||||||
def resolve_xref(self, env, fromdocname, builder,
|
def resolve_xref(self, env, fromdocname, builder,
|
||||||
typ, target, node, contnode):
|
typ, target, node, contnode):
|
||||||
if typ == 'ref':
|
if typ == 'ref':
|
||||||
if node['refexplicit']:
|
if node['refexplicit']:
|
||||||
# reference to anonymous label; the reference uses
|
# reference to anonymous label; the reference uses
|
||||||
# the supplied link caption
|
# the supplied link caption
|
||||||
docname, labelid = self.data['anonlabels'].get(target, ('',''))
|
docname, labelid = self.data['anonlabels'].get(target, ('', ''))
|
||||||
sectname = node.astext()
|
sectname = node.astext()
|
||||||
else:
|
else:
|
||||||
# reference to named label; the final node will
|
# reference to named label; the final node will
|
||||||
# contain the section name after the label
|
# contain the section name after the label
|
||||||
docname, labelid, sectname = self.data['labels'].get(target,
|
docname, labelid, sectname = self.data['labels'].get(target,
|
||||||
('','',''))
|
('', '', ''))
|
||||||
if not docname:
|
if not docname:
|
||||||
return None
|
return None
|
||||||
newnode = nodes.reference('', '', internal=True)
|
|
||||||
innernode = nodes.emphasis(sectname, sectname)
|
return self.build_reference_node(fromdocname, builder,
|
||||||
if docname == fromdocname:
|
docname, labelid, sectname)
|
||||||
newnode['refid'] = labelid
|
elif typ == 'numref':
|
||||||
|
docname, labelid = self.data['anonlabels'].get(target, ('', ''))
|
||||||
|
if not docname:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if env.config.numfig is False:
|
||||||
|
env.warn(fromdocname, 'numfig is disabled. :numref: is ignored.')
|
||||||
|
return contnode
|
||||||
|
|
||||||
|
try:
|
||||||
|
target_node = env.get_doctree(docname).ids[labelid]
|
||||||
|
figtype = get_figtype(target_node)
|
||||||
|
figure_id = target_node['ids'][0]
|
||||||
|
fignumber = env.toc_fignumbers[docname][figtype][figure_id]
|
||||||
|
except (KeyError, IndexError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
title = contnode.astext()
|
||||||
|
if target == title:
|
||||||
|
prefix = env.config.numfig_format.get(figtype, '')
|
||||||
|
title = prefix.replace('%s', '#')
|
||||||
|
newtitle = prefix % '.'.join(map(str, fignumber))
|
||||||
else:
|
else:
|
||||||
# set more info in contnode; in case the
|
newtitle = title.replace('#', '.'.join(map(str, fignumber)))
|
||||||
# get_relative_uri call raises NoUri,
|
|
||||||
# the builder will then have to resolve these
|
return self.build_reference_node(fromdocname, builder,
|
||||||
contnode = addnodes.pending_xref('')
|
docname, labelid, newtitle,
|
||||||
contnode['refdocname'] = docname
|
nodeclass=addnodes.number_reference,
|
||||||
contnode['refsectname'] = sectname
|
title=title)
|
||||||
newnode['refuri'] = builder.get_relative_uri(
|
|
||||||
fromdocname, docname)
|
|
||||||
if labelid:
|
|
||||||
newnode['refuri'] += '#' + labelid
|
|
||||||
newnode.append(innernode)
|
|
||||||
return newnode
|
|
||||||
elif typ == 'keyword':
|
elif typ == 'keyword':
|
||||||
# keywords are oddballs: they are referenced by named labels
|
# keywords are oddballs: they are referenced by named labels
|
||||||
docname, labelid, _ = self.data['labels'].get(target, ('','',''))
|
docname, labelid, _ = self.data['labels'].get(target, ('', '', ''))
|
||||||
if not docname:
|
if not docname:
|
||||||
return None
|
return None
|
||||||
return make_refnode(builder, fromdocname, docname,
|
return make_refnode(builder, fromdocname, docname,
|
||||||
labelid, contnode)
|
labelid, contnode)
|
||||||
elif typ == 'option':
|
elif typ == 'option':
|
||||||
progname = node['refprogram']
|
target = target.strip()
|
||||||
|
# most obvious thing: we are a flag option without program
|
||||||
|
if target.startswith(('-', '/', '+')):
|
||||||
|
progname = node.get('std:program')
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
progname, target = re.split(r' (?=-|--|/|\+)', target, 1)
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
progname = ws_re.sub('-', progname.strip())
|
||||||
docname, labelid = self.data['progoptions'].get((progname, target),
|
docname, labelid = self.data['progoptions'].get((progname, target),
|
||||||
('', ''))
|
('', ''))
|
||||||
if not docname:
|
if not docname:
|
||||||
@ -628,6 +684,28 @@ class StandardDomain(Domain):
|
|||||||
return make_refnode(builder, fromdocname, docname,
|
return make_refnode(builder, fromdocname, docname,
|
||||||
labelid, contnode)
|
labelid, contnode)
|
||||||
|
|
||||||
|
def resolve_any_xref(self, env, fromdocname, builder, target,
|
||||||
|
node, contnode):
|
||||||
|
results = []
|
||||||
|
ltarget = target.lower() # :ref: lowercases its target automatically
|
||||||
|
for role in ('ref', 'option'): # do not try "keyword"
|
||||||
|
res = self.resolve_xref(env, fromdocname, builder, role,
|
||||||
|
ltarget if role == 'ref' else target,
|
||||||
|
node, contnode)
|
||||||
|
if res:
|
||||||
|
results.append(('std:' + role, res))
|
||||||
|
# all others
|
||||||
|
for objtype in self.object_types:
|
||||||
|
key = (objtype, target)
|
||||||
|
if objtype == 'term':
|
||||||
|
key = (objtype, ltarget)
|
||||||
|
if key in self.data['objects']:
|
||||||
|
docname, labelid = self.data['objects'][key]
|
||||||
|
results.append(('std:' + self.role_for_objtype(objtype),
|
||||||
|
make_refnode(builder, fromdocname, docname,
|
||||||
|
labelid, contnode)))
|
||||||
|
return results
|
||||||
|
|
||||||
def get_objects(self):
|
def get_objects(self):
|
||||||
for (prog, option), info in iteritems(self.data['progoptions']):
|
for (prog, option), info in iteritems(self.data['progoptions']):
|
||||||
yield (option, option, 'option', info[0], info[1], 1)
|
yield (option, option, 'option', info[0], info[1], 1)
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -6,10 +6,13 @@
|
|||||||
Contains SphinxError and a few subclasses (in an extra module to avoid
|
Contains SphinxError and a few subclasses (in an extra module to avoid
|
||||||
circular import problems).
|
circular import problems).
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
|
||||||
class SphinxError(Exception):
|
class SphinxError(Exception):
|
||||||
"""
|
"""
|
||||||
Base class for Sphinx errors that are shown to the user in a nicer
|
Base class for Sphinx errors that are shown to the user in a nicer
|
||||||
@ -62,3 +65,13 @@ class PycodeError(Exception):
|
|||||||
if len(self.args) > 1:
|
if len(self.args) > 1:
|
||||||
res += ' (exception was: %r)' % self.args[1]
|
res += ' (exception was: %r)' % self.args[1]
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
class SphinxParallelError(Exception):
|
||||||
|
def __init__(self, orig_exc, traceback):
|
||||||
|
self.orig_exc = orig_exc
|
||||||
|
self.traceback = traceback
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return traceback.format_exception_only(
|
||||||
|
self.orig_exc.__class__, self.orig_exc)[0].strip()
|
||||||
|
@ -5,6 +5,6 @@
|
|||||||
|
|
||||||
Contains Sphinx features not activated by default.
|
Contains Sphinx features not activated by default.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
the doctree, thus avoiding duplication between docstrings and documentation
|
the doctree, thus avoiding duplication between docstrings and documentation
|
||||||
for those who like elaborate docstrings.
|
for those who like elaborate docstrings.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -30,7 +30,7 @@ from sphinx.application import ExtensionError
|
|||||||
from sphinx.util.nodes import nested_parse_with_titles
|
from sphinx.util.nodes import nested_parse_with_titles
|
||||||
from sphinx.util.compat import Directive
|
from sphinx.util.compat import Directive
|
||||||
from sphinx.util.inspect import getargspec, isdescriptor, safe_getmembers, \
|
from sphinx.util.inspect import getargspec, isdescriptor, safe_getmembers, \
|
||||||
safe_getattr, safe_repr, is_builtin_class_method
|
safe_getattr, object_description, is_builtin_class_method
|
||||||
from sphinx.util.docstrings import prepare_docstring
|
from sphinx.util.docstrings import prepare_docstring
|
||||||
|
|
||||||
|
|
||||||
@ -50,11 +50,13 @@ class DefDict(dict):
|
|||||||
def __init__(self, default):
|
def __init__(self, default):
|
||||||
dict.__init__(self)
|
dict.__init__(self)
|
||||||
self.default = default
|
self.default = default
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
try:
|
try:
|
||||||
return dict.__getitem__(self, key)
|
return dict.__getitem__(self, key)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return self.default
|
return self.default
|
||||||
|
|
||||||
def __bool__(self):
|
def __bool__(self):
|
||||||
# docutils check "if option_spec"
|
# docutils check "if option_spec"
|
||||||
return True
|
return True
|
||||||
@ -92,6 +94,7 @@ class _MockModule(object):
|
|||||||
else:
|
else:
|
||||||
return _MockModule()
|
return _MockModule()
|
||||||
|
|
||||||
|
|
||||||
def mock_import(modname):
|
def mock_import(modname):
|
||||||
if '.' in modname:
|
if '.' in modname:
|
||||||
pkg, _n, mods = modname.rpartition('.')
|
pkg, _n, mods = modname.rpartition('.')
|
||||||
@ -104,12 +107,14 @@ def mock_import(modname):
|
|||||||
ALL = object()
|
ALL = object()
|
||||||
INSTANCEATTR = object()
|
INSTANCEATTR = object()
|
||||||
|
|
||||||
|
|
||||||
def members_option(arg):
|
def members_option(arg):
|
||||||
"""Used to convert the :members: option to auto directives."""
|
"""Used to convert the :members: option to auto directives."""
|
||||||
if arg is None:
|
if arg is None:
|
||||||
return ALL
|
return ALL
|
||||||
return [x.strip() for x in arg.split(',')]
|
return [x.strip() for x in arg.split(',')]
|
||||||
|
|
||||||
|
|
||||||
def members_set_option(arg):
|
def members_set_option(arg):
|
||||||
"""Used to convert the :members: option to auto directives."""
|
"""Used to convert the :members: option to auto directives."""
|
||||||
if arg is None:
|
if arg is None:
|
||||||
@ -118,6 +123,7 @@ def members_set_option(arg):
|
|||||||
|
|
||||||
SUPPRESS = object()
|
SUPPRESS = object()
|
||||||
|
|
||||||
|
|
||||||
def annotation_option(arg):
|
def annotation_option(arg):
|
||||||
if arg is None:
|
if arg is None:
|
||||||
# suppress showing the representation of the object
|
# suppress showing the representation of the object
|
||||||
@ -125,6 +131,7 @@ def annotation_option(arg):
|
|||||||
else:
|
else:
|
||||||
return arg
|
return arg
|
||||||
|
|
||||||
|
|
||||||
def bool_option(arg):
|
def bool_option(arg):
|
||||||
"""Used to convert flag options to auto directives. (Instead of
|
"""Used to convert flag options to auto directives. (Instead of
|
||||||
directives.flag(), which returns None).
|
directives.flag(), which returns None).
|
||||||
@ -201,6 +208,7 @@ def cut_lines(pre, post=0, what=None):
|
|||||||
lines.append('')
|
lines.append('')
|
||||||
return process
|
return process
|
||||||
|
|
||||||
|
|
||||||
def between(marker, what=None, keepempty=False, exclude=False):
|
def between(marker, what=None, keepempty=False, exclude=False):
|
||||||
"""Return a listener that either keeps, or if *exclude* is True excludes,
|
"""Return a listener that either keeps, or if *exclude* is True excludes,
|
||||||
lines between lines that match the *marker* regular expression. If no line
|
lines between lines that match the *marker* regular expression. If no line
|
||||||
@ -211,6 +219,7 @@ def between(marker, what=None, keepempty=False, exclude=False):
|
|||||||
be processed.
|
be processed.
|
||||||
"""
|
"""
|
||||||
marker_re = re.compile(marker)
|
marker_re = re.compile(marker)
|
||||||
|
|
||||||
def process(app, what_, name, obj, options, lines):
|
def process(app, what_, name, obj, options, lines):
|
||||||
if what and what_ not in what:
|
if what and what_ not in what:
|
||||||
return
|
return
|
||||||
@ -234,6 +243,11 @@ def between(marker, what=None, keepempty=False, exclude=False):
|
|||||||
return process
|
return process
|
||||||
|
|
||||||
|
|
||||||
|
def formatargspec(*argspec):
|
||||||
|
return inspect.formatargspec(*argspec,
|
||||||
|
formatvalue=lambda x: '=' + object_description(x))
|
||||||
|
|
||||||
|
|
||||||
class Documenter(object):
|
class Documenter(object):
|
||||||
"""
|
"""
|
||||||
A Documenter knows how to autodocument a single object type. When
|
A Documenter knows how to autodocument a single object type. When
|
||||||
@ -325,7 +339,7 @@ class Documenter(object):
|
|||||||
# an autogenerated one
|
# an autogenerated one
|
||||||
try:
|
try:
|
||||||
explicit_modname, path, base, args, retann = \
|
explicit_modname, path, base, args, retann = \
|
||||||
py_ext_sig_re.match(self.name).groups()
|
py_ext_sig_re.match(self.name).groups()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
self.directive.warn('invalid signature for auto%s (%r)' %
|
self.directive.warn('invalid signature for auto%s (%r)' %
|
||||||
(self.objtype, self.name))
|
(self.objtype, self.name))
|
||||||
@ -340,7 +354,7 @@ class Documenter(object):
|
|||||||
parents = []
|
parents = []
|
||||||
|
|
||||||
self.modname, self.objpath = \
|
self.modname, self.objpath = \
|
||||||
self.resolve_name(modname, parents, path, base)
|
self.resolve_name(modname, parents, path, base)
|
||||||
|
|
||||||
if not self.modname:
|
if not self.modname:
|
||||||
return False
|
return False
|
||||||
@ -472,14 +486,15 @@ class Documenter(object):
|
|||||||
domain = getattr(self, 'domain', 'py')
|
domain = getattr(self, 'domain', 'py')
|
||||||
directive = getattr(self, 'directivetype', self.objtype)
|
directive = getattr(self, 'directivetype', self.objtype)
|
||||||
name = self.format_name()
|
name = self.format_name()
|
||||||
|
sourcename = self.get_sourcename()
|
||||||
self.add_line(u'.. %s:%s:: %s%s' % (domain, directive, name, sig),
|
self.add_line(u'.. %s:%s:: %s%s' % (domain, directive, name, sig),
|
||||||
'<autodoc>')
|
sourcename)
|
||||||
if self.options.noindex:
|
if self.options.noindex:
|
||||||
self.add_line(u' :noindex:', '<autodoc>')
|
self.add_line(u' :noindex:', sourcename)
|
||||||
if self.objpath:
|
if self.objpath:
|
||||||
# Be explicit about the module, this is necessary since .. class::
|
# Be explicit about the module, this is necessary since .. class::
|
||||||
# etc. don't support a prepended module name
|
# etc. don't support a prepended module name
|
||||||
self.add_line(u' :module: %s' % self.modname, '<autodoc>')
|
self.add_line(u' :module: %s' % self.modname, sourcename)
|
||||||
|
|
||||||
def get_doc(self, encoding=None, ignore=1):
|
def get_doc(self, encoding=None, ignore=1):
|
||||||
"""Decode and return lines of the docstring(s) for the object."""
|
"""Decode and return lines of the docstring(s) for the object."""
|
||||||
@ -505,9 +520,7 @@ class Documenter(object):
|
|||||||
for line in docstringlines:
|
for line in docstringlines:
|
||||||
yield line
|
yield line
|
||||||
|
|
||||||
def add_content(self, more_content, no_docstring=False):
|
def get_sourcename(self):
|
||||||
"""Add content from docstrings, attribute documentation and user."""
|
|
||||||
# set sourcename and add content from attribute documentation
|
|
||||||
if self.analyzer:
|
if self.analyzer:
|
||||||
# prevent encoding errors when the file name is non-ASCII
|
# prevent encoding errors when the file name is non-ASCII
|
||||||
if not isinstance(self.analyzer.srcname, text_type):
|
if not isinstance(self.analyzer.srcname, text_type):
|
||||||
@ -515,8 +528,14 @@ class Documenter(object):
|
|||||||
sys.getfilesystemencoding(), 'replace')
|
sys.getfilesystemencoding(), 'replace')
|
||||||
else:
|
else:
|
||||||
filename = self.analyzer.srcname
|
filename = self.analyzer.srcname
|
||||||
sourcename = u'%s:docstring of %s' % (filename, self.fullname)
|
return u'%s:docstring of %s' % (filename, self.fullname)
|
||||||
|
return u'docstring of %s' % self.fullname
|
||||||
|
|
||||||
|
def add_content(self, more_content, no_docstring=False):
|
||||||
|
"""Add content from docstrings, attribute documentation and user."""
|
||||||
|
# set sourcename and add content from attribute documentation
|
||||||
|
sourcename = self.get_sourcename()
|
||||||
|
if self.analyzer:
|
||||||
attr_docs = self.analyzer.find_attr_docs()
|
attr_docs = self.analyzer.find_attr_docs()
|
||||||
if self.objpath:
|
if self.objpath:
|
||||||
key = ('.'.join(self.objpath[:-1]), self.objpath[-1])
|
key = ('.'.join(self.objpath[:-1]), self.objpath[-1])
|
||||||
@ -525,8 +544,6 @@ class Documenter(object):
|
|||||||
docstrings = [attr_docs[key]]
|
docstrings = [attr_docs[key]]
|
||||||
for i, line in enumerate(self.process_doc(docstrings)):
|
for i, line in enumerate(self.process_doc(docstrings)):
|
||||||
self.add_line(line, sourcename, i)
|
self.add_line(line, sourcename, i)
|
||||||
else:
|
|
||||||
sourcename = u'docstring of %s' % self.fullname
|
|
||||||
|
|
||||||
# add content from docstrings
|
# add content from docstrings
|
||||||
if not no_docstring:
|
if not no_docstring:
|
||||||
@ -637,19 +654,19 @@ class Documenter(object):
|
|||||||
|
|
||||||
keep = False
|
keep = False
|
||||||
if want_all and membername.startswith('__') and \
|
if want_all and membername.startswith('__') and \
|
||||||
membername.endswith('__') and len(membername) > 4:
|
membername.endswith('__') and len(membername) > 4:
|
||||||
# special __methods__
|
# special __methods__
|
||||||
if self.options.special_members is ALL and \
|
if self.options.special_members is ALL and \
|
||||||
membername != '__doc__':
|
membername != '__doc__':
|
||||||
keep = has_doc or self.options.undoc_members
|
keep = has_doc or self.options.undoc_members
|
||||||
elif self.options.special_members and \
|
elif self.options.special_members and \
|
||||||
self.options.special_members is not ALL and \
|
self.options.special_members is not ALL and \
|
||||||
membername in self.options.special_members:
|
membername in self.options.special_members:
|
||||||
keep = has_doc or self.options.undoc_members
|
keep = has_doc or self.options.undoc_members
|
||||||
elif want_all and membername.startswith('_'):
|
elif want_all and membername.startswith('_'):
|
||||||
# ignore members whose name starts with _ by default
|
# ignore members whose name starts with _ by default
|
||||||
keep = self.options.private_members and \
|
keep = self.options.private_members and \
|
||||||
(has_doc or self.options.undoc_members)
|
(has_doc or self.options.undoc_members)
|
||||||
elif (namespace, membername) in attr_docs:
|
elif (namespace, membername) in attr_docs:
|
||||||
# keep documented attributes
|
# keep documented attributes
|
||||||
keep = True
|
keep = True
|
||||||
@ -685,7 +702,7 @@ class Documenter(object):
|
|||||||
self.env.temp_data['autodoc:class'] = self.objpath[0]
|
self.env.temp_data['autodoc:class'] = self.objpath[0]
|
||||||
|
|
||||||
want_all = all_members or self.options.inherited_members or \
|
want_all = all_members or self.options.inherited_members or \
|
||||||
self.options.members is ALL
|
self.options.members is ALL
|
||||||
# find out which members are documentable
|
# find out which members are documentable
|
||||||
members_check_module, members = self.get_object_members(want_all)
|
members_check_module, members = self.get_object_members(want_all)
|
||||||
|
|
||||||
@ -707,11 +724,11 @@ class Documenter(object):
|
|||||||
# give explicitly separated module name, so that members
|
# give explicitly separated module name, so that members
|
||||||
# of inner classes can be documented
|
# of inner classes can be documented
|
||||||
full_mname = self.modname + '::' + \
|
full_mname = self.modname + '::' + \
|
||||||
'.'.join(self.objpath + [mname])
|
'.'.join(self.objpath + [mname])
|
||||||
documenter = classes[-1](self.directive, full_mname, self.indent)
|
documenter = classes[-1](self.directive, full_mname, self.indent)
|
||||||
memberdocumenters.append((documenter, isattr))
|
memberdocumenters.append((documenter, isattr))
|
||||||
member_order = self.options.member_order or \
|
member_order = self.options.member_order or \
|
||||||
self.env.config.autodoc_member_order
|
self.env.config.autodoc_member_order
|
||||||
if member_order == 'groupwise':
|
if member_order == 'groupwise':
|
||||||
# sort by group; relies on stable sort to keep items in the
|
# sort by group; relies on stable sort to keep items in the
|
||||||
# same group sorted alphabetically
|
# same group sorted alphabetically
|
||||||
@ -719,6 +736,7 @@ class Documenter(object):
|
|||||||
elif member_order == 'bysource' and self.analyzer:
|
elif member_order == 'bysource' and self.analyzer:
|
||||||
# sort by source order, by virtue of the module analyzer
|
# sort by source order, by virtue of the module analyzer
|
||||||
tagorder = self.analyzer.tagorder
|
tagorder = self.analyzer.tagorder
|
||||||
|
|
||||||
def keyfunc(entry):
|
def keyfunc(entry):
|
||||||
fullname = entry[0].name.split('::')[1]
|
fullname = entry[0].name.split('::')[1]
|
||||||
return tagorder.get(fullname, len(tagorder))
|
return tagorder.get(fullname, len(tagorder))
|
||||||
@ -784,17 +802,19 @@ class Documenter(object):
|
|||||||
if not self.check_module():
|
if not self.check_module():
|
||||||
return
|
return
|
||||||
|
|
||||||
|
sourcename = self.get_sourcename()
|
||||||
|
|
||||||
# make sure that the result starts with an empty line. This is
|
# make sure that the result starts with an empty line. This is
|
||||||
# necessary for some situations where another directive preprocesses
|
# necessary for some situations where another directive preprocesses
|
||||||
# reST and no starting newline is present
|
# reST and no starting newline is present
|
||||||
self.add_line(u'', '<autodoc>')
|
self.add_line(u'', sourcename)
|
||||||
|
|
||||||
# format the object's signature, if any
|
# format the object's signature, if any
|
||||||
sig = self.format_signature()
|
sig = self.format_signature()
|
||||||
|
|
||||||
# generate the directive header and options, if applicable
|
# generate the directive header and options, if applicable
|
||||||
self.add_directive_header(sig)
|
self.add_directive_header(sig)
|
||||||
self.add_line(u'', '<autodoc>')
|
self.add_line(u'', sourcename)
|
||||||
|
|
||||||
# e.g. the module directive doesn't have content
|
# e.g. the module directive doesn't have content
|
||||||
self.indent += self.content_indent
|
self.indent += self.content_indent
|
||||||
@ -844,15 +864,17 @@ class ModuleDocumenter(Documenter):
|
|||||||
def add_directive_header(self, sig):
|
def add_directive_header(self, sig):
|
||||||
Documenter.add_directive_header(self, sig)
|
Documenter.add_directive_header(self, sig)
|
||||||
|
|
||||||
|
sourcename = self.get_sourcename()
|
||||||
|
|
||||||
# add some module-specific options
|
# add some module-specific options
|
||||||
if self.options.synopsis:
|
if self.options.synopsis:
|
||||||
self.add_line(
|
self.add_line(
|
||||||
u' :synopsis: ' + self.options.synopsis, '<autodoc>')
|
u' :synopsis: ' + self.options.synopsis, sourcename)
|
||||||
if self.options.platform:
|
if self.options.platform:
|
||||||
self.add_line(
|
self.add_line(
|
||||||
u' :platform: ' + self.options.platform, '<autodoc>')
|
u' :platform: ' + self.options.platform, sourcename)
|
||||||
if self.options.deprecated:
|
if self.options.deprecated:
|
||||||
self.add_line(u' :deprecated:', '<autodoc>')
|
self.add_line(u' :deprecated:', sourcename)
|
||||||
|
|
||||||
def get_object_members(self, want_all):
|
def get_object_members(self, want_all):
|
||||||
if want_all:
|
if want_all:
|
||||||
@ -862,6 +884,15 @@ class ModuleDocumenter(Documenter):
|
|||||||
return True, safe_getmembers(self.object)
|
return True, safe_getmembers(self.object)
|
||||||
else:
|
else:
|
||||||
memberlist = self.object.__all__
|
memberlist = self.object.__all__
|
||||||
|
# Sometimes __all__ is broken...
|
||||||
|
if not isinstance(memberlist, (list, tuple)) or not \
|
||||||
|
all(isinstance(entry, str) for entry in memberlist):
|
||||||
|
self.directive.warn(
|
||||||
|
'__all__ should be a list of strings, not %r '
|
||||||
|
'(in module %s) -- ignoring __all__' %
|
||||||
|
(memberlist, self.fullname))
|
||||||
|
# fall back to all members
|
||||||
|
return True, safe_getmembers(self.object)
|
||||||
else:
|
else:
|
||||||
memberlist = self.options.members or []
|
memberlist = self.options.members or []
|
||||||
ret = []
|
ret = []
|
||||||
@ -872,7 +903,7 @@ class ModuleDocumenter(Documenter):
|
|||||||
self.directive.warn(
|
self.directive.warn(
|
||||||
'missing attribute mentioned in :members: or __all__: '
|
'missing attribute mentioned in :members: or __all__: '
|
||||||
'module %s, attribute %s' % (
|
'module %s, attribute %s' % (
|
||||||
safe_getattr(self.object, '__name__', '???'), mname))
|
safe_getattr(self.object, '__name__', '???'), mname))
|
||||||
return False, ret
|
return False, ret
|
||||||
|
|
||||||
|
|
||||||
@ -891,7 +922,7 @@ class ModuleLevelDocumenter(Documenter):
|
|||||||
modname = self.env.temp_data.get('autodoc:module')
|
modname = self.env.temp_data.get('autodoc:module')
|
||||||
# ... or in the scope of a module directive
|
# ... or in the scope of a module directive
|
||||||
if not modname:
|
if not modname:
|
||||||
modname = self.env.temp_data.get('py:module')
|
modname = self.env.ref_context.get('py:module')
|
||||||
# ... else, it stays None, which means invalid
|
# ... else, it stays None, which means invalid
|
||||||
return modname, parents + [base]
|
return modname, parents + [base]
|
||||||
|
|
||||||
@ -913,7 +944,7 @@ class ClassLevelDocumenter(Documenter):
|
|||||||
mod_cls = self.env.temp_data.get('autodoc:class')
|
mod_cls = self.env.temp_data.get('autodoc:class')
|
||||||
# ... or from a class directive
|
# ... or from a class directive
|
||||||
if mod_cls is None:
|
if mod_cls is None:
|
||||||
mod_cls = self.env.temp_data.get('py:class')
|
mod_cls = self.env.ref_context.get('py:class')
|
||||||
# ... if still None, there's no way to know
|
# ... if still None, there's no way to know
|
||||||
if mod_cls is None:
|
if mod_cls is None:
|
||||||
return None, []
|
return None, []
|
||||||
@ -923,7 +954,7 @@ class ClassLevelDocumenter(Documenter):
|
|||||||
if not modname:
|
if not modname:
|
||||||
modname = self.env.temp_data.get('autodoc:module')
|
modname = self.env.temp_data.get('autodoc:module')
|
||||||
if not modname:
|
if not modname:
|
||||||
modname = self.env.temp_data.get('py:module')
|
modname = self.env.ref_context.get('py:module')
|
||||||
# ... else, it stays None, which means invalid
|
# ... else, it stays None, which means invalid
|
||||||
return modname, parents + [base]
|
return modname, parents + [base]
|
||||||
|
|
||||||
@ -935,36 +966,37 @@ class DocstringSignatureMixin(object):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def _find_signature(self, encoding=None):
|
def _find_signature(self, encoding=None):
|
||||||
docstrings = Documenter.get_doc(self, encoding)
|
docstrings = self.get_doc(encoding)
|
||||||
if len(docstrings) != 1:
|
self._new_docstrings = docstrings[:]
|
||||||
return
|
result = None
|
||||||
doclines = docstrings[0]
|
for i, doclines in enumerate(docstrings):
|
||||||
setattr(self, '__new_doclines', doclines)
|
# no lines in docstring, no match
|
||||||
if not doclines:
|
if not doclines:
|
||||||
return
|
continue
|
||||||
# match first line of docstring against signature RE
|
# match first line of docstring against signature RE
|
||||||
match = py_ext_sig_re.match(doclines[0])
|
match = py_ext_sig_re.match(doclines[0])
|
||||||
if not match:
|
if not match:
|
||||||
return
|
continue
|
||||||
exmod, path, base, args, retann = match.groups()
|
exmod, path, base, args, retann = match.groups()
|
||||||
# the base name must match ours
|
# the base name must match ours
|
||||||
if not self.objpath or base != self.objpath[-1]:
|
valid_names = [self.objpath[-1]]
|
||||||
return
|
if isinstance(self, ClassDocumenter):
|
||||||
# re-prepare docstring to ignore indentation after signature
|
valid_names.append('__init__')
|
||||||
docstrings = Documenter.get_doc(self, encoding, 2)
|
if hasattr(self.object, '__mro__'):
|
||||||
doclines = docstrings[0]
|
valid_names.extend(cls.__name__ for cls in self.object.__mro__)
|
||||||
# ok, now jump over remaining empty lines and set the remaining
|
if base not in valid_names:
|
||||||
# lines as the new doclines
|
continue
|
||||||
i = 1
|
# re-prepare docstring to ignore more leading indentation
|
||||||
while i < len(doclines) and not doclines[i].strip():
|
self._new_docstrings[i] = prepare_docstring('\n'.join(doclines[1:]))
|
||||||
i += 1
|
result = args, retann
|
||||||
setattr(self, '__new_doclines', doclines[i:])
|
# don't look any further
|
||||||
return args, retann
|
break
|
||||||
|
return result
|
||||||
|
|
||||||
def get_doc(self, encoding=None, ignore=1):
|
def get_doc(self, encoding=None, ignore=1):
|
||||||
lines = getattr(self, '__new_doclines', None)
|
lines = getattr(self, '_new_docstrings', None)
|
||||||
if lines is not None:
|
if lines is not None:
|
||||||
return [lines]
|
return lines
|
||||||
return Documenter.get_doc(self, encoding, ignore)
|
return Documenter.get_doc(self, encoding, ignore)
|
||||||
|
|
||||||
def format_signature(self):
|
def format_signature(self):
|
||||||
@ -976,6 +1008,7 @@ class DocstringSignatureMixin(object):
|
|||||||
self.args, self.retann = result
|
self.args, self.retann = result
|
||||||
return Documenter.format_signature(self)
|
return Documenter.format_signature(self)
|
||||||
|
|
||||||
|
|
||||||
class DocstringStripSignatureMixin(DocstringSignatureMixin):
|
class DocstringStripSignatureMixin(DocstringSignatureMixin):
|
||||||
"""
|
"""
|
||||||
Mixin for AttributeDocumenter to provide the
|
Mixin for AttributeDocumenter to provide the
|
||||||
@ -1007,7 +1040,7 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter):
|
|||||||
|
|
||||||
def format_args(self):
|
def format_args(self):
|
||||||
if inspect.isbuiltin(self.object) or \
|
if inspect.isbuiltin(self.object) or \
|
||||||
inspect.ismethoddescriptor(self.object):
|
inspect.ismethoddescriptor(self.object):
|
||||||
# cannot introspect arguments of a C function or method
|
# cannot introspect arguments of a C function or method
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
@ -1026,7 +1059,7 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter):
|
|||||||
argspec = getargspec(self.object.__init__)
|
argspec = getargspec(self.object.__init__)
|
||||||
if argspec[0]:
|
if argspec[0]:
|
||||||
del argspec[0][0]
|
del argspec[0][0]
|
||||||
args = inspect.formatargspec(*argspec)
|
args = formatargspec(*argspec)
|
||||||
# escape backslashes for reST
|
# escape backslashes for reST
|
||||||
args = args.replace('\\', '\\\\')
|
args = args.replace('\\', '\\\\')
|
||||||
return args
|
return args
|
||||||
@ -1035,7 +1068,7 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ClassDocumenter(ModuleLevelDocumenter):
|
class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter):
|
||||||
"""
|
"""
|
||||||
Specialized Documenter subclass for classes.
|
Specialized Documenter subclass for classes.
|
||||||
"""
|
"""
|
||||||
@ -1070,8 +1103,8 @@ class ClassDocumenter(ModuleLevelDocumenter):
|
|||||||
# classes without __init__ method, default __init__ or
|
# classes without __init__ method, default __init__ or
|
||||||
# __init__ written in C?
|
# __init__ written in C?
|
||||||
if initmeth is None or \
|
if initmeth is None or \
|
||||||
is_builtin_class_method(self.object, '__init__') or \
|
is_builtin_class_method(self.object, '__init__') or \
|
||||||
not(inspect.ismethod(initmeth) or inspect.isfunction(initmeth)):
|
not(inspect.ismethod(initmeth) or inspect.isfunction(initmeth)):
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
argspec = getargspec(initmeth)
|
argspec = getargspec(initmeth)
|
||||||
@ -1081,24 +1114,13 @@ class ClassDocumenter(ModuleLevelDocumenter):
|
|||||||
return None
|
return None
|
||||||
if argspec[0] and argspec[0][0] in ('cls', 'self'):
|
if argspec[0] and argspec[0][0] in ('cls', 'self'):
|
||||||
del argspec[0][0]
|
del argspec[0][0]
|
||||||
return inspect.formatargspec(*argspec)
|
return formatargspec(*argspec)
|
||||||
|
|
||||||
def format_signature(self):
|
def format_signature(self):
|
||||||
if self.doc_as_attr:
|
if self.doc_as_attr:
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
# get __init__ method signature from __init__.__doc__
|
return DocstringSignatureMixin.format_signature(self)
|
||||||
if self.env.config.autodoc_docstring_signature:
|
|
||||||
# only act if the feature is enabled
|
|
||||||
init_doc = MethodDocumenter(self.directive, '__init__')
|
|
||||||
init_doc.object = self.get_attr(self.object, '__init__', None)
|
|
||||||
init_doc.objpath = ['__init__']
|
|
||||||
result = init_doc._find_signature()
|
|
||||||
if result is not None:
|
|
||||||
# use args only for Class signature
|
|
||||||
return '(%s)' % result[0]
|
|
||||||
|
|
||||||
return ModuleLevelDocumenter.format_signature(self)
|
|
||||||
|
|
||||||
def add_directive_header(self, sig):
|
def add_directive_header(self, sig):
|
||||||
if self.doc_as_attr:
|
if self.doc_as_attr:
|
||||||
@ -1107,16 +1129,21 @@ class ClassDocumenter(ModuleLevelDocumenter):
|
|||||||
|
|
||||||
# add inheritance info, if wanted
|
# add inheritance info, if wanted
|
||||||
if not self.doc_as_attr and self.options.show_inheritance:
|
if not self.doc_as_attr and self.options.show_inheritance:
|
||||||
self.add_line(u'', '<autodoc>')
|
sourcename = self.get_sourcename()
|
||||||
|
self.add_line(u'', sourcename)
|
||||||
if hasattr(self.object, '__bases__') and len(self.object.__bases__):
|
if hasattr(self.object, '__bases__') and len(self.object.__bases__):
|
||||||
bases = [b.__module__ == '__builtin__' and
|
bases = [b.__module__ in ('__builtin__', 'builtins') and
|
||||||
u':class:`%s`' % b.__name__ or
|
u':class:`%s`' % b.__name__ or
|
||||||
u':class:`%s.%s`' % (b.__module__, b.__name__)
|
u':class:`%s.%s`' % (b.__module__, b.__name__)
|
||||||
for b in self.object.__bases__]
|
for b in self.object.__bases__]
|
||||||
self.add_line(_(u' Bases: %s') % ', '.join(bases),
|
self.add_line(_(u' Bases: %s') % ', '.join(bases),
|
||||||
'<autodoc>')
|
sourcename)
|
||||||
|
|
||||||
def get_doc(self, encoding=None, ignore=1):
|
def get_doc(self, encoding=None, ignore=1):
|
||||||
|
lines = getattr(self, '_new_docstrings', None)
|
||||||
|
if lines is not None:
|
||||||
|
return lines
|
||||||
|
|
||||||
content = self.env.config.autoclass_content
|
content = self.env.config.autoclass_content
|
||||||
|
|
||||||
docstrings = []
|
docstrings = []
|
||||||
@ -1127,22 +1154,12 @@ class ClassDocumenter(ModuleLevelDocumenter):
|
|||||||
# for classes, what the "docstring" is can be controlled via a
|
# for classes, what the "docstring" is can be controlled via a
|
||||||
# config value; the default is only the class docstring
|
# config value; the default is only the class docstring
|
||||||
if content in ('both', 'init'):
|
if content in ('both', 'init'):
|
||||||
# get __init__ method document from __init__.__doc__
|
initdocstring = self.get_attr(
|
||||||
if self.env.config.autodoc_docstring_signature:
|
self.get_attr(self.object, '__init__', None), '__doc__')
|
||||||
# only act if the feature is enabled
|
|
||||||
init_doc = MethodDocumenter(self.directive, '__init__')
|
|
||||||
init_doc.object = self.get_attr(self.object, '__init__', None)
|
|
||||||
init_doc.objpath = ['__init__']
|
|
||||||
init_doc._find_signature() # this effects to get_doc() result
|
|
||||||
initdocstring = '\n'.join(
|
|
||||||
['\n'.join(l) for l in init_doc.get_doc(encoding)])
|
|
||||||
else:
|
|
||||||
initdocstring = self.get_attr(
|
|
||||||
self.get_attr(self.object, '__init__', None), '__doc__')
|
|
||||||
# for new-style classes, no __init__ means default __init__
|
# for new-style classes, no __init__ means default __init__
|
||||||
if (initdocstring is not None and
|
if (initdocstring is not None and
|
||||||
(initdocstring == object.__init__.__doc__ or # for pypy
|
(initdocstring == object.__init__.__doc__ or # for pypy
|
||||||
initdocstring.strip() == object.__init__.__doc__)): #for !pypy
|
initdocstring.strip() == object.__init__.__doc__)): # for !pypy
|
||||||
initdocstring = None
|
initdocstring = None
|
||||||
if initdocstring:
|
if initdocstring:
|
||||||
if content == 'init':
|
if content == 'init':
|
||||||
@ -1186,7 +1203,7 @@ class ExceptionDocumenter(ClassDocumenter):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def can_document_member(cls, member, membername, isattr, parent):
|
def can_document_member(cls, member, membername, isattr, parent):
|
||||||
return isinstance(member, class_types) and \
|
return isinstance(member, class_types) and \
|
||||||
issubclass(member, BaseException)
|
issubclass(member, BaseException)
|
||||||
|
|
||||||
|
|
||||||
class DataDocumenter(ModuleLevelDocumenter):
|
class DataDocumenter(ModuleLevelDocumenter):
|
||||||
@ -1205,18 +1222,19 @@ class DataDocumenter(ModuleLevelDocumenter):
|
|||||||
|
|
||||||
def add_directive_header(self, sig):
|
def add_directive_header(self, sig):
|
||||||
ModuleLevelDocumenter.add_directive_header(self, sig)
|
ModuleLevelDocumenter.add_directive_header(self, sig)
|
||||||
|
sourcename = self.get_sourcename()
|
||||||
if not self.options.annotation:
|
if not self.options.annotation:
|
||||||
try:
|
try:
|
||||||
objrepr = safe_repr(self.object)
|
objrepr = object_description(self.object)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
self.add_line(u' :annotation: = ' + objrepr, '<autodoc>')
|
self.add_line(u' :annotation: = ' + objrepr, sourcename)
|
||||||
elif self.options.annotation is SUPPRESS:
|
elif self.options.annotation is SUPPRESS:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
self.add_line(u' :annotation: %s' % self.options.annotation,
|
self.add_line(u' :annotation: %s' % self.options.annotation,
|
||||||
'<autodoc>')
|
sourcename)
|
||||||
|
|
||||||
def document_members(self, all_members=False):
|
def document_members(self, all_members=False):
|
||||||
pass
|
pass
|
||||||
@ -1233,7 +1251,7 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def can_document_member(cls, member, membername, isattr, parent):
|
def can_document_member(cls, member, membername, isattr, parent):
|
||||||
return inspect.isroutine(member) and \
|
return inspect.isroutine(member) and \
|
||||||
not isinstance(parent, ModuleDocumenter)
|
not isinstance(parent, ModuleDocumenter)
|
||||||
|
|
||||||
def import_object(self):
|
def import_object(self):
|
||||||
ret = ClassLevelDocumenter.import_object(self)
|
ret = ClassLevelDocumenter.import_object(self)
|
||||||
@ -1257,13 +1275,13 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter):
|
|||||||
|
|
||||||
def format_args(self):
|
def format_args(self):
|
||||||
if inspect.isbuiltin(self.object) or \
|
if inspect.isbuiltin(self.object) or \
|
||||||
inspect.ismethoddescriptor(self.object):
|
inspect.ismethoddescriptor(self.object):
|
||||||
# can never get arguments of a C function or method
|
# can never get arguments of a C function or method
|
||||||
return None
|
return None
|
||||||
argspec = getargspec(self.object)
|
argspec = getargspec(self.object)
|
||||||
if argspec[0] and argspec[0][0] in ('cls', 'self'):
|
if argspec[0] and argspec[0][0] in ('cls', 'self'):
|
||||||
del argspec[0][0]
|
del argspec[0][0]
|
||||||
args = inspect.formatargspec(*argspec)
|
args = formatargspec(*argspec)
|
||||||
# escape backslashes for reST
|
# escape backslashes for reST
|
||||||
args = args.replace('\\', '\\\\')
|
args = args.replace('\\', '\\\\')
|
||||||
return args
|
return args
|
||||||
@ -1272,7 +1290,7 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class AttributeDocumenter(DocstringStripSignatureMixin,ClassLevelDocumenter):
|
class AttributeDocumenter(DocstringStripSignatureMixin, ClassLevelDocumenter):
|
||||||
"""
|
"""
|
||||||
Specialized Documenter subclass for attributes.
|
Specialized Documenter subclass for attributes.
|
||||||
"""
|
"""
|
||||||
@ -1290,9 +1308,9 @@ class AttributeDocumenter(DocstringStripSignatureMixin,ClassLevelDocumenter):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def can_document_member(cls, member, membername, isattr, parent):
|
def can_document_member(cls, member, membername, isattr, parent):
|
||||||
isdatadesc = isdescriptor(member) and not \
|
isdatadesc = isdescriptor(member) and not \
|
||||||
isinstance(member, cls.method_types) and not \
|
isinstance(member, cls.method_types) and not \
|
||||||
type(member).__name__ in ("type", "method_descriptor",
|
type(member).__name__ in ("type", "method_descriptor",
|
||||||
"instancemethod")
|
"instancemethod")
|
||||||
return isdatadesc or (not isinstance(parent, ModuleDocumenter)
|
return isdatadesc or (not isinstance(parent, ModuleDocumenter)
|
||||||
and not inspect.isroutine(member)
|
and not inspect.isroutine(member)
|
||||||
and not isinstance(member, class_types))
|
and not isinstance(member, class_types))
|
||||||
@ -1303,7 +1321,7 @@ class AttributeDocumenter(DocstringStripSignatureMixin,ClassLevelDocumenter):
|
|||||||
def import_object(self):
|
def import_object(self):
|
||||||
ret = ClassLevelDocumenter.import_object(self)
|
ret = ClassLevelDocumenter.import_object(self)
|
||||||
if isdescriptor(self.object) and \
|
if isdescriptor(self.object) and \
|
||||||
not isinstance(self.object, self.method_types):
|
not isinstance(self.object, self.method_types):
|
||||||
self._datadescriptor = True
|
self._datadescriptor = True
|
||||||
else:
|
else:
|
||||||
# if it's not a data descriptor
|
# if it's not a data descriptor
|
||||||
@ -1312,23 +1330,24 @@ class AttributeDocumenter(DocstringStripSignatureMixin,ClassLevelDocumenter):
|
|||||||
|
|
||||||
def get_real_modname(self):
|
def get_real_modname(self):
|
||||||
return self.get_attr(self.parent or self.object, '__module__', None) \
|
return self.get_attr(self.parent or self.object, '__module__', None) \
|
||||||
or self.modname
|
or self.modname
|
||||||
|
|
||||||
def add_directive_header(self, sig):
|
def add_directive_header(self, sig):
|
||||||
ClassLevelDocumenter.add_directive_header(self, sig)
|
ClassLevelDocumenter.add_directive_header(self, sig)
|
||||||
|
sourcename = self.get_sourcename()
|
||||||
if not self.options.annotation:
|
if not self.options.annotation:
|
||||||
if not self._datadescriptor:
|
if not self._datadescriptor:
|
||||||
try:
|
try:
|
||||||
objrepr = safe_repr(self.object)
|
objrepr = object_description(self.object)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
self.add_line(u' :annotation: = ' + objrepr, '<autodoc>')
|
self.add_line(u' :annotation: = ' + objrepr, sourcename)
|
||||||
elif self.options.annotation is SUPPRESS:
|
elif self.options.annotation is SUPPRESS:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
self.add_line(u' :annotation: %s' % self.options.annotation,
|
self.add_line(u' :annotation: %s' % self.options.annotation,
|
||||||
'<autodoc>')
|
sourcename)
|
||||||
|
|
||||||
def add_content(self, more_content, no_docstring=False):
|
def add_content(self, more_content, no_docstring=False):
|
||||||
if not self._datadescriptor:
|
if not self._datadescriptor:
|
||||||
@ -1479,7 +1498,7 @@ def add_documenter(cls):
|
|||||||
raise ExtensionError('autodoc documenter %r must be a subclass '
|
raise ExtensionError('autodoc documenter %r must be a subclass '
|
||||||
'of Documenter' % cls)
|
'of Documenter' % cls)
|
||||||
# actually, it should be possible to override Documenters
|
# actually, it should be possible to override Documenters
|
||||||
#if cls.objtype in AutoDirective._registry:
|
# if cls.objtype in AutoDirective._registry:
|
||||||
# raise ExtensionError('autodoc documenter for %r is already '
|
# raise ExtensionError('autodoc documenter for %r is already '
|
||||||
# 'registered' % cls.objtype)
|
# 'registered' % cls.objtype)
|
||||||
AutoDirective._registry[cls.objtype] = cls
|
AutoDirective._registry[cls.objtype] = cls
|
||||||
@ -1504,7 +1523,7 @@ def setup(app):
|
|||||||
app.add_event('autodoc-process-signature')
|
app.add_event('autodoc-process-signature')
|
||||||
app.add_event('autodoc-skip-member')
|
app.add_event('autodoc-skip-member')
|
||||||
|
|
||||||
return sphinx.__version__
|
return {'version': sphinx.__version__, 'parallel_read_safe': True}
|
||||||
|
|
||||||
|
|
||||||
class testcls:
|
class testcls:
|
||||||
|
@ -49,7 +49,7 @@
|
|||||||
resolved to a Python object, and otherwise it becomes simple emphasis.
|
resolved to a Python object, and otherwise it becomes simple emphasis.
|
||||||
This can be used as the default role to make links 'smart'.
|
This can be used as the default role to make links 'smart'.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -58,6 +58,7 @@ import re
|
|||||||
import sys
|
import sys
|
||||||
import inspect
|
import inspect
|
||||||
import posixpath
|
import posixpath
|
||||||
|
from types import ModuleType
|
||||||
|
|
||||||
from six import text_type
|
from six import text_type
|
||||||
from docutils.parsers.rst import directives
|
from docutils.parsers.rst import directives
|
||||||
@ -68,6 +69,7 @@ import sphinx
|
|||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.util.compat import Directive
|
from sphinx.util.compat import Directive
|
||||||
from sphinx.pycode import ModuleAnalyzer, PycodeError
|
from sphinx.pycode import ModuleAnalyzer, PycodeError
|
||||||
|
from sphinx.ext.autodoc import Options
|
||||||
|
|
||||||
|
|
||||||
# -- autosummary_toc node ------------------------------------------------------
|
# -- autosummary_toc node ------------------------------------------------------
|
||||||
@ -130,7 +132,7 @@ def autosummary_table_visit_html(self, node):
|
|||||||
|
|
||||||
class FakeDirective:
|
class FakeDirective:
|
||||||
env = {}
|
env = {}
|
||||||
genopt = {}
|
genopt = Options()
|
||||||
|
|
||||||
def get_documenter(obj, parent):
|
def get_documenter(obj, parent):
|
||||||
"""Get an autodoc.Documenter class suitable for documenting the given
|
"""Get an autodoc.Documenter class suitable for documenting the given
|
||||||
@ -193,7 +195,7 @@ class Autosummary(Directive):
|
|||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
self.env = env = self.state.document.settings.env
|
self.env = env = self.state.document.settings.env
|
||||||
self.genopt = {}
|
self.genopt = Options()
|
||||||
self.warnings = []
|
self.warnings = []
|
||||||
self.result = ViewList()
|
self.result = ViewList()
|
||||||
|
|
||||||
@ -253,7 +255,7 @@ class Autosummary(Directive):
|
|||||||
|
|
||||||
self.result = ViewList() # initialize for each documenter
|
self.result = ViewList() # initialize for each documenter
|
||||||
full_name = real_name
|
full_name = real_name
|
||||||
if full_name.startswith(modname + '.'):
|
if not isinstance(obj, ModuleType):
|
||||||
# give explicitly separated module name, so that members
|
# give explicitly separated module name, so that members
|
||||||
# of inner classes can be documented
|
# of inner classes can be documented
|
||||||
full_name = modname + '::' + full_name[len(modname)+1:]
|
full_name = modname + '::' + full_name[len(modname)+1:]
|
||||||
@ -268,6 +270,8 @@ class Autosummary(Directive):
|
|||||||
self.warn('failed to import object %s' % real_name)
|
self.warn('failed to import object %s' % real_name)
|
||||||
items.append((display_name, '', '', real_name))
|
items.append((display_name, '', '', real_name))
|
||||||
continue
|
continue
|
||||||
|
if not documenter.check_module():
|
||||||
|
continue
|
||||||
|
|
||||||
# try to also get a source code analyzer for attribute docs
|
# try to also get a source code analyzer for attribute docs
|
||||||
try:
|
try:
|
||||||
@ -432,11 +436,11 @@ def get_import_prefixes_from_env(env):
|
|||||||
"""
|
"""
|
||||||
prefixes = [None]
|
prefixes = [None]
|
||||||
|
|
||||||
currmodule = env.temp_data.get('py:module')
|
currmodule = env.ref_context.get('py:module')
|
||||||
if currmodule:
|
if currmodule:
|
||||||
prefixes.insert(0, currmodule)
|
prefixes.insert(0, currmodule)
|
||||||
|
|
||||||
currclass = env.temp_data.get('py:class')
|
currclass = env.ref_context.get('py:class')
|
||||||
if currclass:
|
if currclass:
|
||||||
if currmodule:
|
if currmodule:
|
||||||
prefixes.insert(0, currmodule + "." + currclass)
|
prefixes.insert(0, currmodule + "." + currclass)
|
||||||
@ -570,4 +574,4 @@ def setup(app):
|
|||||||
app.connect('doctree-read', process_autosummary_toc)
|
app.connect('doctree-read', process_autosummary_toc)
|
||||||
app.connect('builder-inited', process_generate_options)
|
app.connect('builder-inited', process_generate_options)
|
||||||
app.add_config_value('autosummary_generate', [], True)
|
app.add_config_value('autosummary_generate', [], True)
|
||||||
return sphinx.__version__
|
return {'version': sphinx.__version__, 'parallel_read_safe': True}
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
generate:
|
generate:
|
||||||
sphinx-autogen -o source/generated source/*.rst
|
sphinx-autogen -o source/generated source/*.rst
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
Check Python modules and C API for coverage. Mostly written by Josip
|
Check Python modules and C API for coverage. Mostly written by Josip
|
||||||
Dzolonga for the Google Highly Open Participation contest.
|
Dzolonga for the Google Highly Open Participation contest.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -84,7 +84,7 @@ class CoverageBuilder(Builder):
|
|||||||
# Fetch all the info from the header files
|
# Fetch all the info from the header files
|
||||||
c_objects = self.env.domaindata['c']['objects']
|
c_objects = self.env.domaindata['c']['objects']
|
||||||
for filename in self.c_sourcefiles:
|
for filename in self.c_sourcefiles:
|
||||||
undoc = []
|
undoc = set()
|
||||||
f = open(filename, 'r')
|
f = open(filename, 'r')
|
||||||
try:
|
try:
|
||||||
for line in f:
|
for line in f:
|
||||||
@ -97,7 +97,7 @@ class CoverageBuilder(Builder):
|
|||||||
if exp.match(name):
|
if exp.match(name):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
undoc.append((key, name))
|
undoc.add((key, name))
|
||||||
continue
|
continue
|
||||||
finally:
|
finally:
|
||||||
f.close()
|
f.close()
|
||||||
@ -114,7 +114,7 @@ class CoverageBuilder(Builder):
|
|||||||
|
|
||||||
for filename, undoc in iteritems(self.c_undoc):
|
for filename, undoc in iteritems(self.c_undoc):
|
||||||
write_header(op, filename)
|
write_header(op, filename)
|
||||||
for typ, name in undoc:
|
for typ, name in sorted(undoc):
|
||||||
op.write(' * %-50s [%9s]\n' % (name, typ))
|
op.write(' * %-50s [%9s]\n' % (name, typ))
|
||||||
op.write('\n')
|
op.write('\n')
|
||||||
finally:
|
finally:
|
||||||
@ -265,4 +265,4 @@ def setup(app):
|
|||||||
app.add_config_value('coverage_ignore_c_items', {}, False)
|
app.add_config_value('coverage_ignore_c_items', {}, False)
|
||||||
app.add_config_value('coverage_write_headline', True, False)
|
app.add_config_value('coverage_write_headline', True, False)
|
||||||
app.add_config_value('coverage_skip_undoc_in_source', False, False)
|
app.add_config_value('coverage_skip_undoc_in_source', False, False)
|
||||||
return sphinx.__version__
|
return {'version': sphinx.__version__, 'parallel_read_safe': True}
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
Mimic doctest by automatically executing code snippets and checking
|
Mimic doctest by automatically executing code snippets and checking
|
||||||
their results.
|
their results.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -32,6 +32,7 @@ from sphinx.util.console import bold
|
|||||||
blankline_re = re.compile(r'^\s*<BLANKLINE>', re.MULTILINE)
|
blankline_re = re.compile(r'^\s*<BLANKLINE>', re.MULTILINE)
|
||||||
doctestopt_re = re.compile(r'#\s*doctest:.+$', re.MULTILINE)
|
doctestopt_re = re.compile(r'#\s*doctest:.+$', re.MULTILINE)
|
||||||
|
|
||||||
|
|
||||||
# set up the necessary directives
|
# set up the necessary directives
|
||||||
|
|
||||||
class TestDirective(Directive):
|
class TestDirective(Directive):
|
||||||
@ -79,30 +80,35 @@ class TestDirective(Directive):
|
|||||||
option_strings = self.options['options'].replace(',', ' ').split()
|
option_strings = self.options['options'].replace(',', ' ').split()
|
||||||
for option in option_strings:
|
for option in option_strings:
|
||||||
if (option[0] not in '+-' or option[1:] not in
|
if (option[0] not in '+-' or option[1:] not in
|
||||||
doctest.OPTIONFLAGS_BY_NAME):
|
doctest.OPTIONFLAGS_BY_NAME):
|
||||||
# XXX warn?
|
# XXX warn?
|
||||||
continue
|
continue
|
||||||
flag = doctest.OPTIONFLAGS_BY_NAME[option[1:]]
|
flag = doctest.OPTIONFLAGS_BY_NAME[option[1:]]
|
||||||
node['options'][flag] = (option[0] == '+')
|
node['options'][flag] = (option[0] == '+')
|
||||||
return [node]
|
return [node]
|
||||||
|
|
||||||
|
|
||||||
class TestsetupDirective(TestDirective):
|
class TestsetupDirective(TestDirective):
|
||||||
option_spec = {}
|
option_spec = {}
|
||||||
|
|
||||||
|
|
||||||
class TestcleanupDirective(TestDirective):
|
class TestcleanupDirective(TestDirective):
|
||||||
option_spec = {}
|
option_spec = {}
|
||||||
|
|
||||||
|
|
||||||
class DoctestDirective(TestDirective):
|
class DoctestDirective(TestDirective):
|
||||||
option_spec = {
|
option_spec = {
|
||||||
'hide': directives.flag,
|
'hide': directives.flag,
|
||||||
'options': directives.unchanged,
|
'options': directives.unchanged,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class TestcodeDirective(TestDirective):
|
class TestcodeDirective(TestDirective):
|
||||||
option_spec = {
|
option_spec = {
|
||||||
'hide': directives.flag,
|
'hide': directives.flag,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class TestoutputDirective(TestDirective):
|
class TestoutputDirective(TestDirective):
|
||||||
option_spec = {
|
option_spec = {
|
||||||
'hide': directives.flag,
|
'hide': directives.flag,
|
||||||
@ -112,6 +118,7 @@ class TestoutputDirective(TestDirective):
|
|||||||
|
|
||||||
parser = doctest.DocTestParser()
|
parser = doctest.DocTestParser()
|
||||||
|
|
||||||
|
|
||||||
# helper classes
|
# helper classes
|
||||||
|
|
||||||
class TestGroup(object):
|
class TestGroup(object):
|
||||||
@ -196,7 +203,7 @@ class DocTestBuilder(Builder):
|
|||||||
def init(self):
|
def init(self):
|
||||||
# default options
|
# default options
|
||||||
self.opt = doctest.DONT_ACCEPT_TRUE_FOR_1 | doctest.ELLIPSIS | \
|
self.opt = doctest.DONT_ACCEPT_TRUE_FOR_1 | doctest.ELLIPSIS | \
|
||||||
doctest.IGNORE_EXCEPTION_DETAIL
|
doctest.IGNORE_EXCEPTION_DETAIL
|
||||||
|
|
||||||
# HACK HACK HACK
|
# HACK HACK HACK
|
||||||
# doctest compiles its snippets with type 'single'. That is nice
|
# doctest compiles its snippets with type 'single'. That is nice
|
||||||
@ -247,6 +254,10 @@ Results of doctest builder run on %s
|
|||||||
# write executive summary
|
# write executive summary
|
||||||
def s(v):
|
def s(v):
|
||||||
return v != 1 and 's' or ''
|
return v != 1 and 's' or ''
|
||||||
|
repl = (self.total_tries, s(self.total_tries),
|
||||||
|
self.total_failures, s(self.total_failures),
|
||||||
|
self.setup_failures, s(self.setup_failures),
|
||||||
|
self.cleanup_failures, s(self.cleanup_failures))
|
||||||
self._out('''
|
self._out('''
|
||||||
Doctest summary
|
Doctest summary
|
||||||
===============
|
===============
|
||||||
@ -254,10 +265,7 @@ Doctest summary
|
|||||||
%5d failure%s in tests
|
%5d failure%s in tests
|
||||||
%5d failure%s in setup code
|
%5d failure%s in setup code
|
||||||
%5d failure%s in cleanup code
|
%5d failure%s in cleanup code
|
||||||
''' % (self.total_tries, s(self.total_tries),
|
''' % repl)
|
||||||
self.total_failures, s(self.total_failures),
|
|
||||||
self.setup_failures, s(self.setup_failures),
|
|
||||||
self.cleanup_failures, s(self.cleanup_failures)))
|
|
||||||
self.outfile.close()
|
self.outfile.close()
|
||||||
|
|
||||||
if self.total_failures or self.setup_failures or self.cleanup_failures:
|
if self.total_failures or self.setup_failures or self.cleanup_failures:
|
||||||
@ -290,11 +298,11 @@ Doctest summary
|
|||||||
def condition(node):
|
def condition(node):
|
||||||
return (isinstance(node, (nodes.literal_block, nodes.comment))
|
return (isinstance(node, (nodes.literal_block, nodes.comment))
|
||||||
and 'testnodetype' in node) or \
|
and 'testnodetype' in node) or \
|
||||||
isinstance(node, nodes.doctest_block)
|
isinstance(node, nodes.doctest_block)
|
||||||
else:
|
else:
|
||||||
def condition(node):
|
def condition(node):
|
||||||
return isinstance(node, (nodes.literal_block, nodes.comment)) \
|
return isinstance(node, (nodes.literal_block, nodes.comment)) \
|
||||||
and 'testnodetype' in node
|
and 'testnodetype' in node
|
||||||
for node in doctree.traverse(condition):
|
for node in doctree.traverse(condition):
|
||||||
source = 'test' in node and node['test'] or node.astext()
|
source = 'test' in node and node['test'] or node.astext()
|
||||||
if not source:
|
if not source:
|
||||||
@ -364,7 +372,7 @@ Doctest summary
|
|||||||
filename, 0, None)
|
filename, 0, None)
|
||||||
sim_doctest.globs = ns
|
sim_doctest.globs = ns
|
||||||
old_f = runner.failures
|
old_f = runner.failures
|
||||||
self.type = 'exec' # the snippet may contain multiple statements
|
self.type = 'exec' # the snippet may contain multiple statements
|
||||||
runner.run(sim_doctest, out=self._warn_out, clear_globs=False)
|
runner.run(sim_doctest, out=self._warn_out, clear_globs=False)
|
||||||
if runner.failures > old_f:
|
if runner.failures > old_f:
|
||||||
return False
|
return False
|
||||||
@ -394,7 +402,7 @@ Doctest summary
|
|||||||
new_opt = code[0].options.copy()
|
new_opt = code[0].options.copy()
|
||||||
new_opt.update(example.options)
|
new_opt.update(example.options)
|
||||||
example.options = new_opt
|
example.options = new_opt
|
||||||
self.type = 'single' # as for ordinary doctests
|
self.type = 'single' # as for ordinary doctests
|
||||||
else:
|
else:
|
||||||
# testcode and output separate
|
# testcode and output separate
|
||||||
output = code[1] and code[1].code or ''
|
output = code[1] and code[1].code or ''
|
||||||
@ -413,7 +421,7 @@ Doctest summary
|
|||||||
options=options)
|
options=options)
|
||||||
test = doctest.DocTest([example], {}, group.name,
|
test = doctest.DocTest([example], {}, group.name,
|
||||||
filename, code[0].lineno, None)
|
filename, code[0].lineno, None)
|
||||||
self.type = 'exec' # multiple statements again
|
self.type = 'exec' # multiple statements again
|
||||||
# DocTest.__init__ copies the globs namespace, which we don't want
|
# DocTest.__init__ copies the globs namespace, which we don't want
|
||||||
test.globs = ns
|
test.globs = ns
|
||||||
# also don't clear the globs namespace after running the doctest
|
# also don't clear the globs namespace after running the doctest
|
||||||
@ -435,4 +443,4 @@ def setup(app):
|
|||||||
app.add_config_value('doctest_test_doctest_blocks', 'default', False)
|
app.add_config_value('doctest_test_doctest_blocks', 'default', False)
|
||||||
app.add_config_value('doctest_global_setup', '', False)
|
app.add_config_value('doctest_global_setup', '', False)
|
||||||
app.add_config_value('doctest_global_cleanup', '', False)
|
app.add_config_value('doctest_global_cleanup', '', False)
|
||||||
return sphinx.__version__
|
return {'version': sphinx.__version__, 'parallel_read_safe': True}
|
||||||
|
@ -20,7 +20,7 @@
|
|||||||
|
|
||||||
You can also give an explicit caption, e.g. :exmpl:`Foo <foo>`.
|
You can also give an explicit caption, e.g. :exmpl:`Foo <foo>`.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -59,4 +59,4 @@ def setup_link_roles(app):
|
|||||||
def setup(app):
|
def setup(app):
|
||||||
app.add_config_value('extlinks', {}, 'env')
|
app.add_config_value('extlinks', {}, 'env')
|
||||||
app.connect('builder-inited', setup_link_roles)
|
app.connect('builder-inited', setup_link_roles)
|
||||||
return sphinx.__version__
|
return {'version': sphinx.__version__, 'parallel_read_safe': True}
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
Allow graphviz-formatted graphs to be included in Sphinx-generated
|
Allow graphviz-formatted graphs to be included in Sphinx-generated
|
||||||
documents inline.
|
documents inline.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -146,14 +146,8 @@ def render_dot(self, code, options, format, prefix='graphviz'):
|
|||||||
).encode('utf-8')
|
).encode('utf-8')
|
||||||
|
|
||||||
fname = '%s-%s.%s' % (prefix, sha1(hashkey).hexdigest(), format)
|
fname = '%s-%s.%s' % (prefix, sha1(hashkey).hexdigest(), format)
|
||||||
if hasattr(self.builder, 'imgpath'):
|
relfn = posixpath.join(self.builder.imgpath, fname)
|
||||||
# HTML
|
outfn = path.join(self.builder.outdir, self.builder.imagedir, fname)
|
||||||
relfn = posixpath.join(self.builder.imgpath, fname)
|
|
||||||
outfn = path.join(self.builder.outdir, '_images', fname)
|
|
||||||
else:
|
|
||||||
# LaTeX
|
|
||||||
relfn = fname
|
|
||||||
outfn = path.join(self.builder.outdir, fname)
|
|
||||||
|
|
||||||
if path.isfile(outfn):
|
if path.isfile(outfn):
|
||||||
return relfn, outfn
|
return relfn, outfn
|
||||||
@ -323,4 +317,4 @@ def setup(app):
|
|||||||
app.add_config_value('graphviz_dot', 'dot', 'html')
|
app.add_config_value('graphviz_dot', 'dot', 'html')
|
||||||
app.add_config_value('graphviz_dot_args', [], 'html')
|
app.add_config_value('graphviz_dot_args', [], 'html')
|
||||||
app.add_config_value('graphviz_output_format', 'png', 'html')
|
app.add_config_value('graphviz_output_format', 'png', 'html')
|
||||||
return sphinx.__version__
|
return {'version': sphinx.__version__, 'parallel_read_safe': True}
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
namespace of the project configuration (that is, all variables from
|
namespace of the project configuration (that is, all variables from
|
||||||
``conf.py`` are available.)
|
``conf.py`` are available.)
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -49,7 +49,8 @@ class IfConfig(Directive):
|
|||||||
|
|
||||||
|
|
||||||
def process_ifconfig_nodes(app, doctree, docname):
|
def process_ifconfig_nodes(app, doctree, docname):
|
||||||
ns = app.config.__dict__.copy()
|
ns = {k: app.config[k] for k in app.config.values}
|
||||||
|
ns.update(app.config.__dict__.copy())
|
||||||
ns['builder'] = app.builder.name
|
ns['builder'] = app.builder.name
|
||||||
for node in doctree.traverse(ifconfig):
|
for node in doctree.traverse(ifconfig):
|
||||||
try:
|
try:
|
||||||
@ -73,4 +74,4 @@ def setup(app):
|
|||||||
app.add_node(ifconfig)
|
app.add_node(ifconfig)
|
||||||
app.add_directive('ifconfig', IfConfig)
|
app.add_directive('ifconfig', IfConfig)
|
||||||
app.connect('doctree-resolved', process_ifconfig_nodes)
|
app.connect('doctree-resolved', process_ifconfig_nodes)
|
||||||
return sphinx.__version__
|
return {'version': sphinx.__version__, 'parallel_read_safe': True}
|
||||||
|
@ -32,20 +32,21 @@ r"""
|
|||||||
The graph is inserted as a PNG+image map into HTML and a PDF in
|
The graph is inserted as a PNG+image map into HTML and a PDF in
|
||||||
LaTeX.
|
LaTeX.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import inspect
|
import inspect
|
||||||
import __builtin__ as __builtin__ # as __builtin__ is for lib2to3 compatibility
|
|
||||||
try:
|
try:
|
||||||
from hashlib import md5
|
from hashlib import md5
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from md5 import md5
|
from md5 import md5
|
||||||
|
|
||||||
from six import text_type
|
from six import text_type
|
||||||
|
from six.moves import builtins
|
||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from docutils.parsers.rst import directives
|
from docutils.parsers.rst import directives
|
||||||
|
|
||||||
@ -147,10 +148,10 @@ class InheritanceGraph(object):
|
|||||||
displayed node names.
|
displayed node names.
|
||||||
"""
|
"""
|
||||||
all_classes = {}
|
all_classes = {}
|
||||||
builtins = vars(__builtin__).values()
|
py_builtins = vars(builtins).values()
|
||||||
|
|
||||||
def recurse(cls):
|
def recurse(cls):
|
||||||
if not show_builtins and cls in builtins:
|
if not show_builtins and cls in py_builtins:
|
||||||
return
|
return
|
||||||
if not private_bases and cls.__name__.startswith('_'):
|
if not private_bases and cls.__name__.startswith('_'):
|
||||||
return
|
return
|
||||||
@ -174,7 +175,7 @@ class InheritanceGraph(object):
|
|||||||
baselist = []
|
baselist = []
|
||||||
all_classes[cls] = (nodename, fullname, baselist, tooltip)
|
all_classes[cls] = (nodename, fullname, baselist, tooltip)
|
||||||
for base in cls.__bases__:
|
for base in cls.__bases__:
|
||||||
if not show_builtins and base in builtins:
|
if not show_builtins and base in py_builtins:
|
||||||
continue
|
continue
|
||||||
if not private_bases and base.__name__.startswith('_'):
|
if not private_bases and base.__name__.startswith('_'):
|
||||||
continue
|
continue
|
||||||
@ -185,7 +186,7 @@ class InheritanceGraph(object):
|
|||||||
for cls in classes:
|
for cls in classes:
|
||||||
recurse(cls)
|
recurse(cls)
|
||||||
|
|
||||||
return all_classes.values()
|
return list(all_classes.values())
|
||||||
|
|
||||||
def class_name(self, cls, parts=0):
|
def class_name(self, cls, parts=0):
|
||||||
"""Given a class object, return a fully-qualified name.
|
"""Given a class object, return a fully-qualified name.
|
||||||
@ -194,7 +195,7 @@ class InheritanceGraph(object):
|
|||||||
completely general.
|
completely general.
|
||||||
"""
|
"""
|
||||||
module = cls.__module__
|
module = cls.__module__
|
||||||
if module == '__builtin__':
|
if module in ('__builtin__', 'builtins'):
|
||||||
fullname = cls.__name__
|
fullname = cls.__name__
|
||||||
else:
|
else:
|
||||||
fullname = '%s.%s' % (module, cls.__name__)
|
fullname = '%s.%s' % (module, cls.__name__)
|
||||||
@ -310,7 +311,7 @@ class InheritanceDiagram(Directive):
|
|||||||
# Create a graph starting with the list of classes
|
# Create a graph starting with the list of classes
|
||||||
try:
|
try:
|
||||||
graph = InheritanceGraph(
|
graph = InheritanceGraph(
|
||||||
class_names, env.temp_data.get('py:module'),
|
class_names, env.ref_context.get('py:module'),
|
||||||
parts=node['parts'],
|
parts=node['parts'],
|
||||||
private_bases='private-bases' in self.options)
|
private_bases='private-bases' in self.options)
|
||||||
except InheritanceException as err:
|
except InheritanceException as err:
|
||||||
@ -407,4 +408,4 @@ def setup(app):
|
|||||||
app.add_config_value('inheritance_graph_attrs', {}, False),
|
app.add_config_value('inheritance_graph_attrs', {}, False),
|
||||||
app.add_config_value('inheritance_node_attrs', {}, False),
|
app.add_config_value('inheritance_node_attrs', {}, False),
|
||||||
app.add_config_value('inheritance_edge_attrs', {}, False),
|
app.add_config_value('inheritance_edge_attrs', {}, False),
|
||||||
return sphinx.__version__
|
return {'version': sphinx.__version__, 'parallel_read_safe': True}
|
||||||
|
@ -20,7 +20,7 @@
|
|||||||
also be specified individually, e.g. if the docs should be buildable
|
also be specified individually, e.g. if the docs should be buildable
|
||||||
without Internet access.
|
without Internet access.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -222,15 +222,21 @@ def load_mappings(app):
|
|||||||
|
|
||||||
def missing_reference(app, env, node, contnode):
|
def missing_reference(app, env, node, contnode):
|
||||||
"""Attempt to resolve a missing reference via intersphinx references."""
|
"""Attempt to resolve a missing reference via intersphinx references."""
|
||||||
domain = node.get('refdomain')
|
|
||||||
if not domain:
|
|
||||||
# only objects in domains are in the inventory
|
|
||||||
return
|
|
||||||
target = node['reftarget']
|
target = node['reftarget']
|
||||||
objtypes = env.domains[domain].objtypes_for_role(node['reftype'])
|
if node['reftype'] == 'any':
|
||||||
if not objtypes:
|
# we search anything!
|
||||||
return
|
objtypes = ['%s:%s' % (domain.name, objtype)
|
||||||
objtypes = ['%s:%s' % (domain, objtype) for objtype in objtypes]
|
for domain in env.domains.values()
|
||||||
|
for objtype in domain.object_types]
|
||||||
|
else:
|
||||||
|
domain = node.get('refdomain')
|
||||||
|
if not domain:
|
||||||
|
# only objects in domains are in the inventory
|
||||||
|
return
|
||||||
|
objtypes = env.domains[domain].objtypes_for_role(node['reftype'])
|
||||||
|
if not objtypes:
|
||||||
|
return
|
||||||
|
objtypes = ['%s:%s' % (domain, objtype) for objtype in objtypes]
|
||||||
to_try = [(env.intersphinx_inventory, target)]
|
to_try = [(env.intersphinx_inventory, target)]
|
||||||
in_set = None
|
in_set = None
|
||||||
if ':' in target:
|
if ':' in target:
|
||||||
@ -248,7 +254,7 @@ def missing_reference(app, env, node, contnode):
|
|||||||
# get correct path in case of subdirectories
|
# get correct path in case of subdirectories
|
||||||
uri = path.join(relative_path(node['refdoc'], env.srcdir), uri)
|
uri = path.join(relative_path(node['refdoc'], env.srcdir), uri)
|
||||||
newnode = nodes.reference('', '', internal=False, refuri=uri,
|
newnode = nodes.reference('', '', internal=False, refuri=uri,
|
||||||
reftitle=_('(in %s v%s)') % (proj, version))
|
reftitle=_('(in %s v%s)') % (proj, version))
|
||||||
if node.get('refexplicit'):
|
if node.get('refexplicit'):
|
||||||
# use whatever title was given
|
# use whatever title was given
|
||||||
newnode.append(contnode)
|
newnode.append(contnode)
|
||||||
@ -276,4 +282,4 @@ def setup(app):
|
|||||||
app.add_config_value('intersphinx_cache_limit', 5, False)
|
app.add_config_value('intersphinx_cache_limit', 5, False)
|
||||||
app.connect('missing-reference', missing_reference)
|
app.connect('missing-reference', missing_reference)
|
||||||
app.connect('builder-inited', load_mappings)
|
app.connect('builder-inited', load_mappings)
|
||||||
return sphinx.__version__
|
return {'version': sphinx.__version__, 'parallel_read_safe': True}
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
Set up everything for use of JSMath to display math in HTML
|
Set up everything for use of JSMath to display math in HTML
|
||||||
via JavaScript.
|
via JavaScript.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -57,4 +57,4 @@ def setup(app):
|
|||||||
mathbase_setup(app, (html_visit_math, None), (html_visit_displaymath, None))
|
mathbase_setup(app, (html_visit_math, None), (html_visit_displaymath, None))
|
||||||
app.add_config_value('jsmath_path', '', False)
|
app.add_config_value('jsmath_path', '', False)
|
||||||
app.connect('builder-inited', builder_inited)
|
app.connect('builder-inited', builder_inited)
|
||||||
return sphinx.__version__
|
return {'version': sphinx.__version__, 'parallel_read_safe': True}
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
Add external links to module code in Python object descriptions.
|
Add external links to module code in Python object descriptions.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -16,9 +16,11 @@ from sphinx import addnodes
|
|||||||
from sphinx.locale import _
|
from sphinx.locale import _
|
||||||
from sphinx.errors import SphinxError
|
from sphinx.errors import SphinxError
|
||||||
|
|
||||||
|
|
||||||
class LinkcodeError(SphinxError):
|
class LinkcodeError(SphinxError):
|
||||||
category = "linkcode error"
|
category = "linkcode error"
|
||||||
|
|
||||||
|
|
||||||
def doctree_read(app, doctree):
|
def doctree_read(app, doctree):
|
||||||
env = app.builder.env
|
env = app.builder.env
|
||||||
|
|
||||||
@ -68,7 +70,8 @@ def doctree_read(app, doctree):
|
|||||||
classes=['viewcode-link'])
|
classes=['viewcode-link'])
|
||||||
signode += onlynode
|
signode += onlynode
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
def setup(app):
|
||||||
app.connect('doctree-read', doctree_read)
|
app.connect('doctree-read', doctree_read)
|
||||||
app.add_config_value('linkcode_resolve', None, '')
|
app.add_config_value('linkcode_resolve', None, '')
|
||||||
return sphinx.__version__
|
return {'version': sphinx.__version__, 'parallel_read_safe': True}
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
Set up math support in source files and LaTeX/text output.
|
Set up math support in source files and LaTeX/text output.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
Sphinx's HTML writer -- requires the MathJax JavaScript library on your
|
Sphinx's HTML writer -- requires the MathJax JavaScript library on your
|
||||||
webserver/computer.
|
webserver/computer.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -64,9 +64,9 @@ def setup(app):
|
|||||||
# more information for mathjax secure url is here:
|
# more information for mathjax secure url is here:
|
||||||
# http://docs.mathjax.org/en/latest/start.html#secure-access-to-the-cdn
|
# http://docs.mathjax.org/en/latest/start.html#secure-access-to-the-cdn
|
||||||
app.add_config_value('mathjax_path',
|
app.add_config_value('mathjax_path',
|
||||||
'https://c328740.ssl.cf1.rackcdn.com/mathjax/latest/MathJax.js?'
|
'https://cdn.mathjax.org/mathjax/latest/MathJax.js?'
|
||||||
'config=TeX-AMS-MML_HTMLorMML', False)
|
'config=TeX-AMS-MML_HTMLorMML', False)
|
||||||
app.add_config_value('mathjax_inline', [r'\(', r'\)'], 'html')
|
app.add_config_value('mathjax_inline', [r'\(', r'\)'], 'html')
|
||||||
app.add_config_value('mathjax_display', [r'\[', r'\]'], 'html')
|
app.add_config_value('mathjax_display', [r'\[', r'\]'], 'html')
|
||||||
app.connect('builder-inited', builder_inited)
|
app.connect('builder-inited', builder_inited)
|
||||||
return sphinx.__version__
|
return {'version': sphinx.__version__, 'parallel_read_safe': True}
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
Support for NumPy and Google style docstrings.
|
Support for NumPy and Google style docstrings.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -256,7 +256,7 @@ def setup(app):
|
|||||||
|
|
||||||
for name, (default, rebuild) in iteritems(Config._config_values):
|
for name, (default, rebuild) in iteritems(Config._config_values):
|
||||||
app.add_config_value(name, default, rebuild)
|
app.add_config_value(name, default, rebuild)
|
||||||
return sphinx.__version__
|
return {'version': sphinx.__version__, 'parallel_read_safe': True}
|
||||||
|
|
||||||
|
|
||||||
def _process_docstring(app, what, name, obj, options, lines):
|
def _process_docstring(app, what, name, obj, options, lines):
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
Classes for docstring parsing and formatting.
|
Classes for docstring parsing and formatting.
|
||||||
|
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -23,8 +23,9 @@ from sphinx.util.pycompat import UnicodeMixin
|
|||||||
|
|
||||||
|
|
||||||
_directive_regex = re.compile(r'\.\. \S+::')
|
_directive_regex = re.compile(r'\.\. \S+::')
|
||||||
_google_untyped_arg_regex = re.compile(r'\s*(\w+)\s*:\s*(.*)')
|
_google_untyped_arg_regex = re.compile(r'\s*(\*?\*?\w+)\s*:\s*(.*)')
|
||||||
_google_typed_arg_regex = re.compile(r'\s*(\w+)\s*\(\s*(.+?)\s*\)\s*:\s*(.*)')
|
_google_typed_arg_regex = re.compile(r'\s*(\*?\*?\w+)\s*\(\s*(.+?)\s*\)\s*:'
|
||||||
|
r'\s*(.*)')
|
||||||
|
|
||||||
|
|
||||||
class GoogleDocstring(UnicodeMixin):
|
class GoogleDocstring(UnicodeMixin):
|
||||||
@ -90,6 +91,7 @@ class GoogleDocstring(UnicodeMixin):
|
|||||||
<BLANKLINE>
|
<BLANKLINE>
|
||||||
:returns: Description of return value.
|
:returns: Description of return value.
|
||||||
:rtype: str
|
:rtype: str
|
||||||
|
<BLANKLINE>
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, docstring, config=None, app=None, what='', name='',
|
def __init__(self, docstring, config=None, app=None, what='', name='',
|
||||||
@ -215,6 +217,11 @@ class GoogleDocstring(UnicodeMixin):
|
|||||||
_name = match.group(1)
|
_name = match.group(1)
|
||||||
_desc = match.group(2)
|
_desc = match.group(2)
|
||||||
|
|
||||||
|
if _name[:2] == '**':
|
||||||
|
_name = r'\*\*'+_name[2:]
|
||||||
|
elif _name[:1] == '*':
|
||||||
|
_name = r'\*'+_name[1:]
|
||||||
|
|
||||||
if prefer_type and not _type:
|
if prefer_type and not _type:
|
||||||
_type, _name = _name, _type
|
_type, _name = _name, _type
|
||||||
indent = self._get_indent(line) + 1
|
indent = self._get_indent(line) + 1
|
||||||
@ -254,6 +261,10 @@ class GoogleDocstring(UnicodeMixin):
|
|||||||
else:
|
else:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
def _consume_usage_section(self):
|
||||||
|
lines = self._dedent(self._consume_to_next_section())
|
||||||
|
return lines
|
||||||
|
|
||||||
def _consume_section_header(self):
|
def _consume_section_header(self):
|
||||||
section = next(self._line_iter)
|
section = next(self._line_iter)
|
||||||
stripped_section = section.strip(':')
|
stripped_section = section.strip(':')
|
||||||
@ -291,11 +302,10 @@ class GoogleDocstring(UnicodeMixin):
|
|||||||
padding = ' ' * len(prefix)
|
padding = ' ' * len(prefix)
|
||||||
result_lines = []
|
result_lines = []
|
||||||
for i, line in enumerate(lines):
|
for i, line in enumerate(lines):
|
||||||
if line:
|
if i == 0:
|
||||||
if i == 0:
|
result_lines.append((prefix + line).rstrip())
|
||||||
result_lines.append(prefix + line)
|
elif line:
|
||||||
else:
|
result_lines.append(padding + line)
|
||||||
result_lines.append(padding + line)
|
|
||||||
else:
|
else:
|
||||||
result_lines.append('')
|
result_lines.append('')
|
||||||
return result_lines
|
return result_lines
|
||||||
@ -444,6 +454,13 @@ class GoogleDocstring(UnicodeMixin):
|
|||||||
use_admonition = self._config.napoleon_use_admonition_for_examples
|
use_admonition = self._config.napoleon_use_admonition_for_examples
|
||||||
return self._parse_generic_section(section, use_admonition)
|
return self._parse_generic_section(section, use_admonition)
|
||||||
|
|
||||||
|
def _parse_usage_section(self, section):
|
||||||
|
header = ['.. rubric:: Usage:', '']
|
||||||
|
block = ['.. code-block:: python', '']
|
||||||
|
lines = self._consume_usage_section()
|
||||||
|
lines = self._indent(lines, 3)
|
||||||
|
return header + block + lines + ['']
|
||||||
|
|
||||||
def _parse_generic_section(self, section, use_admonition):
|
def _parse_generic_section(self, section, use_admonition):
|
||||||
lines = self._strip_empty(self._consume_to_next_section())
|
lines = self._strip_empty(self._consume_to_next_section())
|
||||||
lines = self._dedent(lines)
|
lines = self._dedent(lines)
|
||||||
@ -664,6 +681,7 @@ class NumpyDocstring(GoogleDocstring):
|
|||||||
<BLANKLINE>
|
<BLANKLINE>
|
||||||
:returns: Description of return value.
|
:returns: Description of return value.
|
||||||
:rtype: str
|
:rtype: str
|
||||||
|
<BLANKLINE>
|
||||||
|
|
||||||
Methods
|
Methods
|
||||||
-------
|
-------
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
A collection of helpful iterators.
|
A collection of helpful iterators.
|
||||||
|
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
Render math in HTML via dvipng.
|
Render math in HTML via dvipng.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -14,7 +14,7 @@ import codecs
|
|||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
import posixpath
|
import posixpath
|
||||||
from os import path, getcwd, chdir
|
from os import path
|
||||||
from subprocess import Popen, PIPE
|
from subprocess import Popen, PIPE
|
||||||
from hashlib import sha1
|
from hashlib import sha1
|
||||||
|
|
||||||
@ -24,7 +24,7 @@ from docutils import nodes
|
|||||||
import sphinx
|
import sphinx
|
||||||
from sphinx.errors import SphinxError
|
from sphinx.errors import SphinxError
|
||||||
from sphinx.util.png import read_png_depth, write_png_depth
|
from sphinx.util.png import read_png_depth, write_png_depth
|
||||||
from sphinx.util.osutil import ensuredir, ENOENT
|
from sphinx.util.osutil import ensuredir, ENOENT, cd
|
||||||
from sphinx.util.pycompat import sys_encoding
|
from sphinx.util.pycompat import sys_encoding
|
||||||
from sphinx.ext.mathbase import setup_math as mathbase_setup, wrap_displaymath
|
from sphinx.ext.mathbase import setup_math as mathbase_setup, wrap_displaymath
|
||||||
|
|
||||||
@ -86,7 +86,7 @@ def render_math(self, math):
|
|||||||
|
|
||||||
shasum = "%s.png" % sha1(latex.encode('utf-8')).hexdigest()
|
shasum = "%s.png" % sha1(latex.encode('utf-8')).hexdigest()
|
||||||
relfn = posixpath.join(self.builder.imgpath, 'math', shasum)
|
relfn = posixpath.join(self.builder.imgpath, 'math', shasum)
|
||||||
outfn = path.join(self.builder.outdir, '_images', 'math', shasum)
|
outfn = path.join(self.builder.outdir, self.builder.imagedir, 'math', shasum)
|
||||||
if path.isfile(outfn):
|
if path.isfile(outfn):
|
||||||
depth = read_png_depth(outfn)
|
depth = read_png_depth(outfn)
|
||||||
return relfn, depth
|
return relfn, depth
|
||||||
@ -116,10 +116,7 @@ def render_math(self, math):
|
|||||||
ltx_args.extend(self.builder.config.pngmath_latex_args)
|
ltx_args.extend(self.builder.config.pngmath_latex_args)
|
||||||
ltx_args.append('math.tex')
|
ltx_args.append('math.tex')
|
||||||
|
|
||||||
curdir = getcwd()
|
with cd(tempdir):
|
||||||
chdir(tempdir)
|
|
||||||
|
|
||||||
try:
|
|
||||||
try:
|
try:
|
||||||
p = Popen(ltx_args, stdout=PIPE, stderr=PIPE)
|
p = Popen(ltx_args, stdout=PIPE, stderr=PIPE)
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
@ -130,8 +127,6 @@ def render_math(self, math):
|
|||||||
self.builder.config.pngmath_latex)
|
self.builder.config.pngmath_latex)
|
||||||
self.builder._mathpng_warned_latex = True
|
self.builder._mathpng_warned_latex = True
|
||||||
return None, None
|
return None, None
|
||||||
finally:
|
|
||||||
chdir(curdir)
|
|
||||||
|
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
@ -246,4 +241,4 @@ def setup(app):
|
|||||||
app.add_config_value('pngmath_latex_preamble', '', 'html')
|
app.add_config_value('pngmath_latex_preamble', '', 'html')
|
||||||
app.add_config_value('pngmath_add_tooltips', True, 'html')
|
app.add_config_value('pngmath_add_tooltips', True, 'html')
|
||||||
app.connect('build-finished', cleanup_tempdir)
|
app.connect('build-finished', cleanup_tempdir)
|
||||||
return sphinx.__version__
|
return {'version': sphinx.__version__, 'parallel_read_safe': True}
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
all todos of your project and lists them along with a backlink to the
|
all todos of your project and lists them along with a backlink to the
|
||||||
original location.
|
original location.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -150,6 +150,14 @@ def purge_todos(app, env, docname):
|
|||||||
if todo['docname'] != docname]
|
if todo['docname'] != docname]
|
||||||
|
|
||||||
|
|
||||||
|
def merge_info(app, env, docnames, other):
|
||||||
|
if not hasattr(other, 'todo_all_todos'):
|
||||||
|
return
|
||||||
|
if not hasattr(env, 'todo_all_todos'):
|
||||||
|
env.todo_all_todos = []
|
||||||
|
env.todo_all_todos.extend(other.todo_all_todos)
|
||||||
|
|
||||||
|
|
||||||
def visit_todo_node(self, node):
|
def visit_todo_node(self, node):
|
||||||
self.visit_admonition(node)
|
self.visit_admonition(node)
|
||||||
|
|
||||||
@ -172,4 +180,5 @@ def setup(app):
|
|||||||
app.connect('doctree-read', process_todos)
|
app.connect('doctree-read', process_todos)
|
||||||
app.connect('doctree-resolved', process_todo_nodes)
|
app.connect('doctree-resolved', process_todo_nodes)
|
||||||
app.connect('env-purge-doc', purge_todos)
|
app.connect('env-purge-doc', purge_todos)
|
||||||
return sphinx.__version__
|
app.connect('env-merge-info', merge_info)
|
||||||
|
return {'version': sphinx.__version__, 'parallel_read_safe': True}
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
Add links to module code in Python object descriptions.
|
Add links to module code in Python object descriptions.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -20,6 +20,7 @@ from sphinx.locale import _
|
|||||||
from sphinx.pycode import ModuleAnalyzer
|
from sphinx.pycode import ModuleAnalyzer
|
||||||
from sphinx.util import get_full_modname
|
from sphinx.util import get_full_modname
|
||||||
from sphinx.util.nodes import make_refnode
|
from sphinx.util.nodes import make_refnode
|
||||||
|
from sphinx.util.console import blue
|
||||||
|
|
||||||
|
|
||||||
def _get_full_modname(app, modname, attribute):
|
def _get_full_modname(app, modname, attribute):
|
||||||
@ -37,7 +38,7 @@ def _get_full_modname(app, modname, attribute):
|
|||||||
# It should be displayed only verbose mode.
|
# It should be displayed only verbose mode.
|
||||||
app.verbose(traceback.format_exc().rstrip())
|
app.verbose(traceback.format_exc().rstrip())
|
||||||
app.verbose('viewcode can\'t import %s, failed with error "%s"' %
|
app.verbose('viewcode can\'t import %s, failed with error "%s"' %
|
||||||
(modname, e))
|
(modname, e))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@ -100,6 +101,16 @@ def doctree_read(app, doctree):
|
|||||||
signode += onlynode
|
signode += onlynode
|
||||||
|
|
||||||
|
|
||||||
|
def env_merge_info(app, env, docnames, other):
|
||||||
|
if not hasattr(other, '_viewcode_modules'):
|
||||||
|
return
|
||||||
|
# create a _viewcode_modules dict on the main environment
|
||||||
|
if not hasattr(env, '_viewcode_modules'):
|
||||||
|
env._viewcode_modules = {}
|
||||||
|
# now merge in the information from the subprocess
|
||||||
|
env._viewcode_modules.update(other._viewcode_modules)
|
||||||
|
|
||||||
|
|
||||||
def missing_reference(app, env, node, contnode):
|
def missing_reference(app, env, node, contnode):
|
||||||
# resolve our "viewcode" reference nodes -- they need special treatment
|
# resolve our "viewcode" reference nodes -- they need special treatment
|
||||||
if node['reftype'] == 'viewcode':
|
if node['reftype'] == 'viewcode':
|
||||||
@ -116,10 +127,12 @@ def collect_pages(app):
|
|||||||
|
|
||||||
modnames = set(env._viewcode_modules)
|
modnames = set(env._viewcode_modules)
|
||||||
|
|
||||||
app.builder.info(' (%d module code pages)' %
|
# app.builder.info(' (%d module code pages)' %
|
||||||
len(env._viewcode_modules), nonl=1)
|
# len(env._viewcode_modules), nonl=1)
|
||||||
|
|
||||||
for modname, entry in iteritems(env._viewcode_modules):
|
for modname, entry in app.status_iterator(
|
||||||
|
iteritems(env._viewcode_modules), 'highlighting module code... ',
|
||||||
|
blue, len(env._viewcode_modules), lambda x: x[0]):
|
||||||
if not entry:
|
if not entry:
|
||||||
continue
|
continue
|
||||||
code, tags, used, refname = entry
|
code, tags, used, refname = entry
|
||||||
@ -162,15 +175,14 @@ def collect_pages(app):
|
|||||||
context = {
|
context = {
|
||||||
'parents': parents,
|
'parents': parents,
|
||||||
'title': modname,
|
'title': modname,
|
||||||
'body': _('<h1>Source code for %s</h1>') % modname + \
|
'body': (_('<h1>Source code for %s</h1>') % modname +
|
||||||
'\n'.join(lines)
|
'\n'.join(lines)),
|
||||||
}
|
}
|
||||||
yield (pagename, context, 'page.html')
|
yield (pagename, context, 'page.html')
|
||||||
|
|
||||||
if not modnames:
|
if not modnames:
|
||||||
return
|
return
|
||||||
|
|
||||||
app.builder.info(' _modules/index', nonl=True)
|
|
||||||
html = ['\n']
|
html = ['\n']
|
||||||
# the stack logic is needed for using nested lists for submodules
|
# the stack logic is needed for using nested lists for submodules
|
||||||
stack = ['']
|
stack = ['']
|
||||||
@ -190,8 +202,8 @@ def collect_pages(app):
|
|||||||
html.append('</ul>' * (len(stack) - 1))
|
html.append('</ul>' * (len(stack) - 1))
|
||||||
context = {
|
context = {
|
||||||
'title': _('Overview: module code'),
|
'title': _('Overview: module code'),
|
||||||
'body': _('<h1>All modules for which code is available</h1>') + \
|
'body': (_('<h1>All modules for which code is available</h1>') +
|
||||||
''.join(html),
|
''.join(html)),
|
||||||
}
|
}
|
||||||
|
|
||||||
yield ('_modules/index', context, 'page.html')
|
yield ('_modules/index', context, 'page.html')
|
||||||
@ -200,8 +212,9 @@ def collect_pages(app):
|
|||||||
def setup(app):
|
def setup(app):
|
||||||
app.add_config_value('viewcode_import', True, False)
|
app.add_config_value('viewcode_import', True, False)
|
||||||
app.connect('doctree-read', doctree_read)
|
app.connect('doctree-read', doctree_read)
|
||||||
|
app.connect('env-merge-info', env_merge_info)
|
||||||
app.connect('html-collect-pages', collect_pages)
|
app.connect('html-collect-pages', collect_pages)
|
||||||
app.connect('missing-reference', missing_reference)
|
app.connect('missing-reference', missing_reference)
|
||||||
#app.add_config_value('viewcode_include_modules', [], 'env')
|
# app.add_config_value('viewcode_include_modules', [], 'env')
|
||||||
#app.add_config_value('viewcode_exclude_modules', [], 'env')
|
# app.add_config_value('viewcode_exclude_modules', [], 'env')
|
||||||
return sphinx.__version__
|
return {'version': sphinx.__version__, 'parallel_read_safe': True}
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
Highlight code blocks using Pygments.
|
Highlight code blocks using Pygments.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -24,46 +24,32 @@ from sphinx.util.pycompat import htmlescape
|
|||||||
from sphinx.util.texescape import tex_hl_escape_map_new
|
from sphinx.util.texescape import tex_hl_escape_map_new
|
||||||
from sphinx.ext import doctest
|
from sphinx.ext import doctest
|
||||||
|
|
||||||
try:
|
from pygments import highlight
|
||||||
import pygments
|
from pygments.lexers import PythonLexer, PythonConsoleLexer, CLexer, \
|
||||||
from pygments import highlight
|
TextLexer, RstLexer
|
||||||
from pygments.lexers import PythonLexer, PythonConsoleLexer, CLexer, \
|
from pygments.lexers import get_lexer_by_name, guess_lexer
|
||||||
TextLexer, RstLexer
|
from pygments.formatters import HtmlFormatter, LatexFormatter
|
||||||
from pygments.lexers import get_lexer_by_name, guess_lexer
|
from pygments.filters import ErrorToken
|
||||||
from pygments.formatters import HtmlFormatter, LatexFormatter
|
from pygments.styles import get_style_by_name
|
||||||
from pygments.filters import ErrorToken
|
from pygments.util import ClassNotFound
|
||||||
from pygments.styles import get_style_by_name
|
from sphinx.pygments_styles import SphinxStyle, NoneStyle
|
||||||
from pygments.util import ClassNotFound
|
|
||||||
from sphinx.pygments_styles import SphinxStyle, NoneStyle
|
|
||||||
except ImportError:
|
|
||||||
pygments = None
|
|
||||||
lexers = None
|
|
||||||
HtmlFormatter = LatexFormatter = None
|
|
||||||
else:
|
|
||||||
|
|
||||||
lexers = dict(
|
lexers = dict(
|
||||||
none = TextLexer(),
|
none = TextLexer(),
|
||||||
python = PythonLexer(),
|
python = PythonLexer(),
|
||||||
pycon = PythonConsoleLexer(),
|
pycon = PythonConsoleLexer(),
|
||||||
pycon3 = PythonConsoleLexer(python3=True),
|
pycon3 = PythonConsoleLexer(python3=True),
|
||||||
rest = RstLexer(),
|
rest = RstLexer(),
|
||||||
c = CLexer(),
|
c = CLexer(),
|
||||||
)
|
)
|
||||||
for _lexer in lexers.values():
|
for _lexer in lexers.values():
|
||||||
_lexer.add_filter('raiseonerror')
|
_lexer.add_filter('raiseonerror')
|
||||||
|
|
||||||
|
|
||||||
escape_hl_chars = {ord(u'\\'): u'\\PYGZbs{}',
|
escape_hl_chars = {ord(u'\\'): u'\\PYGZbs{}',
|
||||||
ord(u'{'): u'\\PYGZob{}',
|
ord(u'{'): u'\\PYGZob{}',
|
||||||
ord(u'}'): u'\\PYGZcb{}'}
|
ord(u'}'): u'\\PYGZcb{}'}
|
||||||
|
|
||||||
# used if Pygments is not available
|
|
||||||
_LATEX_STYLES = r'''
|
|
||||||
\newcommand\PYGZbs{\char`\\}
|
|
||||||
\newcommand\PYGZob{\char`\{}
|
|
||||||
\newcommand\PYGZcb{\char`\}}
|
|
||||||
'''
|
|
||||||
|
|
||||||
# used if Pygments is available
|
# used if Pygments is available
|
||||||
# use textcomp quote to get a true single quote
|
# use textcomp quote to get a true single quote
|
||||||
_LATEX_ADD_STYLES = r'''
|
_LATEX_ADD_STYLES = r'''
|
||||||
@ -80,8 +66,6 @@ class PygmentsBridge(object):
|
|||||||
def __init__(self, dest='html', stylename='sphinx',
|
def __init__(self, dest='html', stylename='sphinx',
|
||||||
trim_doctest_flags=False):
|
trim_doctest_flags=False):
|
||||||
self.dest = dest
|
self.dest = dest
|
||||||
if not pygments:
|
|
||||||
return
|
|
||||||
if stylename is None or stylename == 'sphinx':
|
if stylename is None or stylename == 'sphinx':
|
||||||
style = SphinxStyle
|
style = SphinxStyle
|
||||||
elif stylename == 'none':
|
elif stylename == 'none':
|
||||||
@ -150,11 +134,9 @@ class PygmentsBridge(object):
|
|||||||
else:
|
else:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def highlight_block(self, source, lang, warn=None, force=False, **kwargs):
|
def highlight_block(self, source, lang, opts=None, warn=None, force=False, **kwargs):
|
||||||
if not isinstance(source, text_type):
|
if not isinstance(source, text_type):
|
||||||
source = source.decode()
|
source = source.decode()
|
||||||
if not pygments:
|
|
||||||
return self.unhighlighted(source)
|
|
||||||
|
|
||||||
# find out which lexer to use
|
# find out which lexer to use
|
||||||
if lang in ('py', 'python'):
|
if lang in ('py', 'python'):
|
||||||
@ -182,7 +164,7 @@ class PygmentsBridge(object):
|
|||||||
lexer = lexers[lang]
|
lexer = lexers[lang]
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
lexer = lexers[lang] = get_lexer_by_name(lang)
|
lexer = lexers[lang] = get_lexer_by_name(lang, **opts or {})
|
||||||
except ClassNotFound:
|
except ClassNotFound:
|
||||||
if warn:
|
if warn:
|
||||||
warn('Pygments lexer name %r is not known' % lang)
|
warn('Pygments lexer name %r is not known' % lang)
|
||||||
@ -213,11 +195,6 @@ class PygmentsBridge(object):
|
|||||||
return hlsource.translate(tex_hl_escape_map_new)
|
return hlsource.translate(tex_hl_escape_map_new)
|
||||||
|
|
||||||
def get_stylesheet(self):
|
def get_stylesheet(self):
|
||||||
if not pygments:
|
|
||||||
if self.dest == 'latex':
|
|
||||||
return _LATEX_STYLES
|
|
||||||
# no HTML styles needed
|
|
||||||
return ''
|
|
||||||
formatter = self.get_formatter()
|
formatter = self.get_formatter()
|
||||||
if self.dest == 'html':
|
if self.dest == 'html':
|
||||||
return formatter.get_style_defs('.highlight')
|
return formatter.get_style_defs('.highlight')
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
Glue code for the jinja2 templating engine.
|
Glue code for the jinja2 templating engine.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
Locale utilities.
|
Locale utilities.
|
||||||
|
|
||||||
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user