mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge branch '2.0' into 6738_new_escape_for_unicode_latex_engine
This commit is contained in:
commit
b070e5be52
3
AUTHORS
3
AUTHORS
@ -25,6 +25,7 @@ Other contributors, listed alphabetically, are:
|
|||||||
* Henrique Bastos -- SVG support for graphviz extension
|
* Henrique Bastos -- SVG support for graphviz extension
|
||||||
* Daniel Bültmann -- todo extension
|
* Daniel Bültmann -- todo extension
|
||||||
* Marco Buttu -- doctest extension (pyversion option)
|
* Marco Buttu -- doctest extension (pyversion option)
|
||||||
|
* Nathan Damon -- bugfix in validation of static paths in html builders
|
||||||
* Etienne Desautels -- apidoc module
|
* Etienne Desautels -- apidoc module
|
||||||
* Michael Droettboom -- inheritance_diagram extension
|
* Michael Droettboom -- inheritance_diagram extension
|
||||||
* Charles Duffy -- original graphviz extension
|
* Charles Duffy -- original graphviz extension
|
||||||
@ -35,7 +36,7 @@ Other contributors, listed alphabetically, are:
|
|||||||
* Hernan Grecco -- search improvements
|
* Hernan Grecco -- search improvements
|
||||||
* Horst Gutmann -- internationalization support
|
* Horst Gutmann -- internationalization support
|
||||||
* Martin Hans -- autodoc improvements
|
* Martin Hans -- autodoc improvements
|
||||||
* Zac Hatfield-Dodds -- doctest reporting improvements
|
* Zac Hatfield-Dodds -- doctest reporting improvements, intersphinx performance
|
||||||
* Doug Hellmann -- graphviz improvements
|
* Doug Hellmann -- graphviz improvements
|
||||||
* Tim Hoffmann -- theme improvements
|
* Tim Hoffmann -- theme improvements
|
||||||
* Antti Kaihola -- doctest extension (skipif option)
|
* Antti Kaihola -- doctest extension (skipif option)
|
||||||
|
38
CHANGES
38
CHANGES
@ -7,9 +7,15 @@ Dependencies
|
|||||||
Incompatible changes
|
Incompatible changes
|
||||||
--------------------
|
--------------------
|
||||||
|
|
||||||
|
* #6742: ``end-before`` option of :rst:dir:`literalinclude` directive does not
|
||||||
|
match the first line of the code block.
|
||||||
|
* #1331: Change default User-Agent header to ``"Sphinx/X.Y.Z requests/X.Y.Z
|
||||||
|
python/X.Y.Z"``. It can be changed via :confval:`user_agent`.
|
||||||
|
|
||||||
Deprecated
|
Deprecated
|
||||||
----------
|
----------
|
||||||
|
|
||||||
|
* ``sphinx.builders.gettext.POHEADER``
|
||||||
* ``sphinx.io.SphinxStandaloneReader.app``
|
* ``sphinx.io.SphinxStandaloneReader.app``
|
||||||
* ``sphinx.io.SphinxStandaloneReader.env``
|
* ``sphinx.io.SphinxStandaloneReader.env``
|
||||||
|
|
||||||
@ -19,6 +25,14 @@ Features added
|
|||||||
* #6707: C++, support bit-fields.
|
* #6707: C++, support bit-fields.
|
||||||
* #267: html: Eliminate prompt characters of doctest block from copyable text
|
* #267: html: Eliminate prompt characters of doctest block from copyable text
|
||||||
* #6729: html theme: agogo theme now supports ``rightsidebar`` option
|
* #6729: html theme: agogo theme now supports ``rightsidebar`` option
|
||||||
|
* #6780: Add PEP-561 Support
|
||||||
|
* #6762: latex: Allow to load additonal LaTeX packages via ``extrapackages`` key
|
||||||
|
of :confval:`latex_elements`
|
||||||
|
* #1331: Add new config variable: :confval:`user_agent`
|
||||||
|
* #6000: LaTeX: have backslash also be an inline literal word wrap break
|
||||||
|
character
|
||||||
|
* #6812: Improve a warning message when extensions are not parallel safe
|
||||||
|
* #6818: Improve Intersphinx performance for multiple remote inventories.
|
||||||
|
|
||||||
Bugs fixed
|
Bugs fixed
|
||||||
----------
|
----------
|
||||||
@ -34,14 +48,22 @@ Bugs fixed
|
|||||||
* #6655: image URLs containing ``data:`` causes gettext builder crashed
|
* #6655: image URLs containing ``data:`` causes gettext builder crashed
|
||||||
* #6584: i18n: Error when compiling message catalogs on Hindi
|
* #6584: i18n: Error when compiling message catalogs on Hindi
|
||||||
* #6718: i18n: KeyError is raised if section title and table title are same
|
* #6718: i18n: KeyError is raised if section title and table title are same
|
||||||
|
* #6743: i18n: :confval:`rst_prolog` breaks the translation
|
||||||
* #6708: mathbase: Some deprecated functions have removed
|
* #6708: mathbase: Some deprecated functions have removed
|
||||||
* #6709: autodoc: mock object does not work as a class decorator
|
* #6709: autodoc: mock object does not work as a class decorator
|
||||||
|
* #5070: epub: Wrong internal href fragment links
|
||||||
* #6712: Allow not to install sphinx.testing as runtime (mainly for ALT Linux)
|
* #6712: Allow not to install sphinx.testing as runtime (mainly for ALT Linux)
|
||||||
|
* #6741: html: search result was broken with empty :confval:`html_file_suffix`
|
||||||
|
* #6001: LaTeX does not wrap long code lines at backslash character
|
||||||
|
* #6804: LaTeX: PDF build breaks if admonition of danger type contains
|
||||||
|
code-block long enough not to fit on one page
|
||||||
|
* #6809: LaTeX: code-block in a danger type admonition can easily spill over
|
||||||
|
bottom of page
|
||||||
|
|
||||||
Testing
|
Testing
|
||||||
--------
|
--------
|
||||||
|
|
||||||
Release 2.2.1 (in development)
|
Release 2.2.2 (in development)
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
Dependencies
|
Dependencies
|
||||||
@ -59,12 +81,22 @@ Features added
|
|||||||
Bugs fixed
|
Bugs fixed
|
||||||
----------
|
----------
|
||||||
|
|
||||||
* #6641: LaTeX: Undefined control sequence ``\sphinxmaketitle``
|
* #6776: LaTeX: 2019-10-01 LaTeX release breaks :file:`sphinxcyrillic.sty`
|
||||||
* #6710: LaTeX not well configured for Greek language as main language
|
|
||||||
|
|
||||||
Testing
|
Testing
|
||||||
--------
|
--------
|
||||||
|
|
||||||
|
Release 2.2.1 (released Oct 26, 2019)
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
Bugs fixed
|
||||||
|
----------
|
||||||
|
|
||||||
|
* #6641: LaTeX: Undefined control sequence ``\sphinxmaketitle``
|
||||||
|
* #6710: LaTeX not well configured for Greek language as main language
|
||||||
|
* #6759: validation of html static paths and extra paths no longer throws
|
||||||
|
an error if the paths are in different directories
|
||||||
|
|
||||||
Release 2.2.0 (released Aug 19, 2019)
|
Release 2.2.0 (released Aug 19, 2019)
|
||||||
=====================================
|
=====================================
|
||||||
|
|
||||||
|
5
doc/_themes/sphinx13/static/sphinx13.css
vendored
5
doc/_themes/sphinx13/static/sphinx13.css
vendored
@ -299,6 +299,11 @@ a.headerlink:hover {
|
|||||||
color: white!important;
|
color: white!important;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* avoid font-size when :mod: role in headings */
|
||||||
|
h1 code, h2 code, h3 code, h4 code {
|
||||||
|
font-size: inherit;
|
||||||
|
}
|
||||||
|
|
||||||
cite, code, tt {
|
cite, code, tt {
|
||||||
font-family: 'Consolas', 'DejaVu Sans Mono',
|
font-family: 'Consolas', 'DejaVu Sans Mono',
|
||||||
'Bitstream Vera Sans Mono', monospace;
|
'Bitstream Vera Sans Mono', monospace;
|
||||||
|
@ -26,6 +26,11 @@ The following is a list of deprecated interfaces.
|
|||||||
- (will be) Removed
|
- (will be) Removed
|
||||||
- Alternatives
|
- Alternatives
|
||||||
|
|
||||||
|
* - ``sphinx.builders.gettext.POHEADER``
|
||||||
|
- 2.3
|
||||||
|
- 4.0
|
||||||
|
- ``sphinx/templates/gettext/message.pot_t`` (template file)
|
||||||
|
|
||||||
* - ``sphinx.io.SphinxStandaloneReader.app``
|
* - ``sphinx.io.SphinxStandaloneReader.app``
|
||||||
- 2.3
|
- 2.3
|
||||||
- 4.0
|
- 4.0
|
||||||
|
@ -55,9 +55,9 @@ See the :ref:`pertinent section in the FAQ list <usingwith>`.
|
|||||||
Prerequisites
|
Prerequisites
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
Sphinx needs at least **Python 3.5** to run, as well as the docutils_ and
|
Sphinx needs at least **Python 3.5** to run.
|
||||||
Jinja2_ libraries. Sphinx should work with docutils version 0.12 or some (not
|
It also depends on 3rd party libraries such as docutils_ and jinja2_, but they
|
||||||
broken) SVN trunk snapshot.
|
are automatically installed when sphinx is installed.
|
||||||
|
|
||||||
.. _reStructuredText: http://docutils.sourceforge.net/rst.html
|
.. _reStructuredText: http://docutils.sourceforge.net/rst.html
|
||||||
.. _docutils: http://docutils.sourceforge.net/
|
.. _docutils: http://docutils.sourceforge.net/
|
||||||
|
@ -226,6 +226,25 @@ into the generated ``.tex`` files. Its ``'sphinxsetup'`` key is described
|
|||||||
|
|
||||||
.. versionadded:: 1.5
|
.. versionadded:: 1.5
|
||||||
|
|
||||||
|
``'extrapackages'``
|
||||||
|
Additional LaTeX packages. For example:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
latex_elements = {
|
||||||
|
'packages': r'\usepackage{isodate}'
|
||||||
|
}
|
||||||
|
|
||||||
|
It defaults to empty.
|
||||||
|
|
||||||
|
The specified LaTeX packages will be loaded before
|
||||||
|
hyperref package and packages loaded from Sphinx extensions.
|
||||||
|
|
||||||
|
.. hint:: If you'd like to load additional LaTeX packages after hyperref, use
|
||||||
|
``'preamble'`` key instead.
|
||||||
|
|
||||||
|
.. versionadded:: 2.3
|
||||||
|
|
||||||
``'footer'``
|
``'footer'``
|
||||||
Additional footer content (before the indices), default empty.
|
Additional footer content (before the indices), default empty.
|
||||||
|
|
||||||
@ -600,12 +619,15 @@ macros may be significant.
|
|||||||
default ``true``. Allows linebreaks inside inline literals: but extra
|
default ``true``. Allows linebreaks inside inline literals: but extra
|
||||||
potential break-points (additionally to those allowed by LaTeX at spaces
|
potential break-points (additionally to those allowed by LaTeX at spaces
|
||||||
or for hyphenation) are currently inserted only after the characters
|
or for hyphenation) are currently inserted only after the characters
|
||||||
``. , ; ? ! /``. Due to TeX internals, white space in the line will be
|
``. , ; ? ! /`` and ``\``. Due to TeX internals, white space in the line
|
||||||
stretched (or shrunk) in order to accomodate the linebreak.
|
will be stretched (or shrunk) in order to accomodate the linebreak.
|
||||||
|
|
||||||
.. versionadded:: 1.5
|
.. versionadded:: 1.5
|
||||||
set this option value to ``false`` to recover former behaviour.
|
set this option value to ``false`` to recover former behaviour.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.3.0
|
||||||
|
added potential breakpoint at ``\`` characters.
|
||||||
|
|
||||||
``verbatimvisiblespace``
|
``verbatimvisiblespace``
|
||||||
default ``\textcolor{red}{\textvisiblespace}``. When a long code line is
|
default ``\textcolor{red}{\textvisiblespace}``. When a long code line is
|
||||||
split, the last space character from the source code line right before the
|
split, the last space character from the source code line right before the
|
||||||
|
@ -510,6 +510,14 @@ General configuration
|
|||||||
|
|
||||||
.. versionadded:: 1.6.6
|
.. versionadded:: 1.6.6
|
||||||
|
|
||||||
|
.. confval:: user_agent
|
||||||
|
|
||||||
|
A User-Agent of Sphinx. It is used for a header on HTTP access (ex.
|
||||||
|
linkcheck, intersphinx and so on). Default is ``"Sphinx/X.Y.Z
|
||||||
|
requests/X.Y.Z python/X.Y.Z"``.
|
||||||
|
|
||||||
|
.. versionadded:: 2.3
|
||||||
|
|
||||||
.. confval:: tls_verify
|
.. confval:: tls_verify
|
||||||
|
|
||||||
If true, Sphinx verifies server certifications. Default is ``True``.
|
If true, Sphinx verifies server certifications. Default is ``True``.
|
||||||
|
7
setup.py
7
setup.py
@ -176,6 +176,10 @@ setup(
|
|||||||
description='Python documentation generator',
|
description='Python documentation generator',
|
||||||
long_description=long_desc,
|
long_description=long_desc,
|
||||||
long_description_content_type='text/x-rst',
|
long_description_content_type='text/x-rst',
|
||||||
|
project_urls={
|
||||||
|
"Code": "https://github.com/sphinx-doc/sphinx",
|
||||||
|
"Issue tracker": "https://github.com/sphinx-doc/sphinx/issues",
|
||||||
|
},
|
||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
classifiers=[
|
classifiers=[
|
||||||
'Development Status :: 5 - Production/Stable',
|
'Development Status :: 5 - Production/Stable',
|
||||||
@ -216,6 +220,9 @@ setup(
|
|||||||
],
|
],
|
||||||
platforms='any',
|
platforms='any',
|
||||||
packages=find_packages(exclude=['tests', 'utils']),
|
packages=find_packages(exclude=['tests', 'utils']),
|
||||||
|
package_data = {
|
||||||
|
'sphinx': ['py.typed'],
|
||||||
|
},
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
entry_points={
|
entry_points={
|
||||||
'console_scripts': [
|
'console_scripts': [
|
||||||
|
@ -1192,26 +1192,30 @@ class Sphinx:
|
|||||||
"""
|
"""
|
||||||
if typ == 'read':
|
if typ == 'read':
|
||||||
attrname = 'parallel_read_safe'
|
attrname = 'parallel_read_safe'
|
||||||
message = __("the %s extension does not declare if it is safe "
|
message_not_declared = __("the %s extension does not declare if it "
|
||||||
"for parallel reading, assuming it isn't - please "
|
"is safe for parallel reading, assuming "
|
||||||
"ask the extension author to check and make it "
|
"it isn't - please ask the extension author "
|
||||||
"explicit")
|
"to check and make it explicit")
|
||||||
|
message_not_safe = __("the %s extension is not safe for parallel reading")
|
||||||
elif typ == 'write':
|
elif typ == 'write':
|
||||||
attrname = 'parallel_write_safe'
|
attrname = 'parallel_write_safe'
|
||||||
message = __("the %s extension does not declare if it is safe "
|
message_not_declared = __("the %s extension does not declare if it "
|
||||||
"for parallel writing, assuming it isn't - please "
|
"is safe for parallel writing, assuming "
|
||||||
"ask the extension author to check and make it "
|
"it isn't - please ask the extension author "
|
||||||
"explicit")
|
"to check and make it explicit")
|
||||||
|
message_not_safe = __("the %s extension is not safe for parallel writing")
|
||||||
else:
|
else:
|
||||||
raise ValueError('parallel type %s is not supported' % typ)
|
raise ValueError('parallel type %s is not supported' % typ)
|
||||||
|
|
||||||
for ext in self.extensions.values():
|
for ext in self.extensions.values():
|
||||||
allowed = getattr(ext, attrname, None)
|
allowed = getattr(ext, attrname, None)
|
||||||
if allowed is None:
|
if allowed is None:
|
||||||
logger.warning(message, ext.name)
|
logger.warning(message_not_declared, ext.name)
|
||||||
logger.warning(__('doing serial %s'), typ)
|
logger.warning(__('doing serial %s'), typ)
|
||||||
return False
|
return False
|
||||||
elif not allowed:
|
elif not allowed:
|
||||||
|
logger.warning(message_not_safe, ext.name)
|
||||||
|
logger.warning(__('doing serial %s'), typ)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -272,7 +272,7 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
|||||||
if ':' in node_id:
|
if ':' in node_id:
|
||||||
target['ids'][i] = self.fix_fragment('', node_id)
|
target['ids'][i] = self.fix_fragment('', node_id)
|
||||||
|
|
||||||
next_node = target.next_node(siblings=True) # type: Node
|
next_node = target.next_node(ascend=True) # type: Node
|
||||||
if isinstance(next_node, nodes.Element):
|
if isinstance(next_node, nodes.Element):
|
||||||
for i, node_id in enumerate(next_node['ids']):
|
for i, node_id in enumerate(next_node['ids']):
|
||||||
if ':' in node_id:
|
if ':' in node_id:
|
||||||
|
@ -9,7 +9,7 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from os import path
|
from os import path
|
||||||
from typing import Any, Dict, Set
|
from typing import Any, Dict
|
||||||
|
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
from sphinx.builders.html import StandaloneHTMLBuilder
|
from sphinx.builders.html import StandaloneHTMLBuilder
|
||||||
@ -45,10 +45,6 @@ class DirectoryHTMLBuilder(StandaloneHTMLBuilder):
|
|||||||
|
|
||||||
return outfilename
|
return outfilename
|
||||||
|
|
||||||
def prepare_writing(self, docnames: Set[str]) -> None:
|
|
||||||
super().prepare_writing(docnames)
|
|
||||||
self.globalcontext['no_search_suffix'] = True
|
|
||||||
|
|
||||||
|
|
||||||
# for compatibility
|
# for compatibility
|
||||||
deprecated_alias('sphinx.builders.html',
|
deprecated_alias('sphinx.builders.html',
|
||||||
|
@ -11,16 +11,16 @@
|
|||||||
from codecs import open
|
from codecs import open
|
||||||
from collections import defaultdict, OrderedDict
|
from collections import defaultdict, OrderedDict
|
||||||
from datetime import datetime, tzinfo, timedelta
|
from datetime import datetime, tzinfo, timedelta
|
||||||
from io import StringIO
|
|
||||||
from os import path, walk, getenv
|
from os import path, walk, getenv
|
||||||
from time import time
|
from time import time
|
||||||
from typing import Any, Dict, Iterable, List, Set, Tuple, Union
|
from typing import Any, Dict, Iterable, Generator, List, Set, Tuple, Union
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from docutils.nodes import Element
|
from docutils.nodes import Element
|
||||||
|
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
|
from sphinx import package_dir
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
from sphinx.builders import Builder
|
from sphinx.builders import Builder
|
||||||
from sphinx.domains.python import pairindextypes
|
from sphinx.domains.python import pairindextypes
|
||||||
@ -30,8 +30,9 @@ from sphinx.util import split_index_msg, logging, status_iterator
|
|||||||
from sphinx.util.console import bold # type: ignore
|
from sphinx.util.console import bold # type: ignore
|
||||||
from sphinx.util.i18n import CatalogInfo, docname_to_domain
|
from sphinx.util.i18n import CatalogInfo, docname_to_domain
|
||||||
from sphinx.util.nodes import extract_messages, traverse_translatable_index
|
from sphinx.util.nodes import extract_messages, traverse_translatable_index
|
||||||
from sphinx.util.osutil import relpath, ensuredir, canon_path
|
from sphinx.util.osutil import ensuredir, canon_path
|
||||||
from sphinx.util.tags import Tags
|
from sphinx.util.tags import Tags
|
||||||
|
from sphinx.util.template import SphinxRenderer
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
# For type annotation
|
# For type annotation
|
||||||
@ -58,7 +59,15 @@ msgstr ""
|
|||||||
"Content-Type: text/plain; charset=UTF-8\n"
|
"Content-Type: text/plain; charset=UTF-8\n"
|
||||||
"Content-Transfer-Encoding: 8bit\n"
|
"Content-Transfer-Encoding: 8bit\n"
|
||||||
|
|
||||||
"""[1:]
|
"""[1:] # RemovedInSphinx40Warning
|
||||||
|
|
||||||
|
|
||||||
|
class Message:
|
||||||
|
"""An entry of translatable message."""
|
||||||
|
def __init__(self, text: str, locations: List[Tuple[str, int]], uuids: List[str]):
|
||||||
|
self.text = text
|
||||||
|
self.locations = locations
|
||||||
|
self.uuids = uuids
|
||||||
|
|
||||||
|
|
||||||
class Catalog:
|
class Catalog:
|
||||||
@ -80,6 +89,12 @@ class Catalog:
|
|||||||
self.metadata[msg] = []
|
self.metadata[msg] = []
|
||||||
self.metadata[msg].append((origin.source, origin.line, origin.uid)) # type: ignore
|
self.metadata[msg].append((origin.source, origin.line, origin.uid)) # type: ignore
|
||||||
|
|
||||||
|
def __iter__(self) -> Generator[Message, None, None]:
|
||||||
|
for message in self.messages:
|
||||||
|
positions = [(source, line) for source, line, uuid in self.metadata[message]]
|
||||||
|
uuids = [uuid for source, line, uuid in self.metadata[message]]
|
||||||
|
yield Message(message, positions, uuids)
|
||||||
|
|
||||||
|
|
||||||
class MsgOrigin:
|
class MsgOrigin:
|
||||||
"""
|
"""
|
||||||
@ -92,6 +107,22 @@ class MsgOrigin:
|
|||||||
self.uid = uuid4().hex
|
self.uid = uuid4().hex
|
||||||
|
|
||||||
|
|
||||||
|
class GettextRenderer(SphinxRenderer):
|
||||||
|
def __init__(self, template_path: str = None) -> None:
|
||||||
|
if template_path is None:
|
||||||
|
template_path = path.join(package_dir, 'templates', 'gettext')
|
||||||
|
super().__init__(template_path)
|
||||||
|
|
||||||
|
def escape(s: str) -> str:
|
||||||
|
s = s.replace('\\', r'\\')
|
||||||
|
s = s.replace('"', r'\"')
|
||||||
|
return s.replace('\n', '\\n"\n"')
|
||||||
|
|
||||||
|
# use texescape as escape filter
|
||||||
|
self.env.filters['e'] = escape
|
||||||
|
self.env.filters['escape'] = escape
|
||||||
|
|
||||||
|
|
||||||
class I18nTags(Tags):
|
class I18nTags(Tags):
|
||||||
"""Dummy tags module for I18nBuilder.
|
"""Dummy tags module for I18nBuilder.
|
||||||
|
|
||||||
@ -247,12 +278,13 @@ class MessageCatalogBuilder(I18nBuilder):
|
|||||||
|
|
||||||
def finish(self) -> None:
|
def finish(self) -> None:
|
||||||
super().finish()
|
super().finish()
|
||||||
data = {
|
context = {
|
||||||
'version': self.config.version,
|
'version': self.config.version,
|
||||||
'copyright': self.config.copyright,
|
'copyright': self.config.copyright,
|
||||||
'project': self.config.project,
|
'project': self.config.project,
|
||||||
'ctime': datetime.fromtimestamp(
|
'ctime': datetime.fromtimestamp(timestamp, ltz).strftime('%Y-%m-%d %H:%M%z'),
|
||||||
timestamp, ltz).strftime('%Y-%m-%d %H:%M%z'),
|
'display_location': self.config.gettext_location,
|
||||||
|
'display_uuid': self.config.gettext_uuid,
|
||||||
}
|
}
|
||||||
for textdomain, catalog in status_iterator(self.catalogs.items(),
|
for textdomain, catalog in status_iterator(self.catalogs.items(),
|
||||||
__("writing message catalogs... "),
|
__("writing message catalogs... "),
|
||||||
@ -262,30 +294,10 @@ class MessageCatalogBuilder(I18nBuilder):
|
|||||||
# noop if config.gettext_compact is set
|
# noop if config.gettext_compact is set
|
||||||
ensuredir(path.join(self.outdir, path.dirname(textdomain)))
|
ensuredir(path.join(self.outdir, path.dirname(textdomain)))
|
||||||
|
|
||||||
|
context['messages'] = list(catalog)
|
||||||
|
content = GettextRenderer().render('message.pot_t', context)
|
||||||
|
|
||||||
pofn = path.join(self.outdir, textdomain + '.pot')
|
pofn = path.join(self.outdir, textdomain + '.pot')
|
||||||
output = StringIO()
|
|
||||||
output.write(POHEADER % data)
|
|
||||||
|
|
||||||
for message in catalog.messages:
|
|
||||||
positions = catalog.metadata[message]
|
|
||||||
|
|
||||||
if self.config.gettext_location:
|
|
||||||
# generate "#: file1:line1\n#: file2:line2 ..."
|
|
||||||
output.write("#: %s\n" % "\n#: ".join(
|
|
||||||
"%s:%s" % (canon_path(relpath(source, self.outdir)), line)
|
|
||||||
for source, line, _ in positions))
|
|
||||||
if self.config.gettext_uuid:
|
|
||||||
# generate "# uuid1\n# uuid2\n ..."
|
|
||||||
output.write("# %s\n" % "\n# ".join(uid for _, _, uid in positions))
|
|
||||||
|
|
||||||
# message contains *one* line of text ready for translation
|
|
||||||
message = message.replace('\\', r'\\'). \
|
|
||||||
replace('"', r'\"'). \
|
|
||||||
replace('\n', '\\n"\n"')
|
|
||||||
output.write('msgid "%s"\nmsgstr ""\n\n' % message)
|
|
||||||
|
|
||||||
content = output.getvalue()
|
|
||||||
|
|
||||||
if should_write(pofn, content):
|
if should_write(pofn, content):
|
||||||
with open(pofn, 'w', encoding='utf-8') as pofile:
|
with open(pofn, 'w', encoding='utf-8') as pofile:
|
||||||
pofile.write(content)
|
pofile.write(content)
|
||||||
|
@ -1162,7 +1162,8 @@ def validate_html_extra_path(app: Sphinx, config: Config) -> None:
|
|||||||
if not path.exists(extra_path):
|
if not path.exists(extra_path):
|
||||||
logger.warning(__('html_extra_path entry %r does not exist'), entry)
|
logger.warning(__('html_extra_path entry %r does not exist'), entry)
|
||||||
config.html_extra_path.remove(entry)
|
config.html_extra_path.remove(entry)
|
||||||
elif path.commonpath([app.outdir, extra_path]) == app.outdir:
|
elif (path.splitdrive(app.outdir)[0] == path.splitdrive(extra_path)[0] and
|
||||||
|
path.commonpath([app.outdir, extra_path]) == app.outdir):
|
||||||
logger.warning(__('html_extra_path entry %r is placed inside outdir'), entry)
|
logger.warning(__('html_extra_path entry %r is placed inside outdir'), entry)
|
||||||
config.html_extra_path.remove(entry)
|
config.html_extra_path.remove(entry)
|
||||||
|
|
||||||
@ -1174,7 +1175,8 @@ def validate_html_static_path(app: Sphinx, config: Config) -> None:
|
|||||||
if not path.exists(static_path):
|
if not path.exists(static_path):
|
||||||
logger.warning(__('html_static_path entry %r does not exist'), entry)
|
logger.warning(__('html_static_path entry %r does not exist'), entry)
|
||||||
config.html_static_path.remove(entry)
|
config.html_static_path.remove(entry)
|
||||||
elif path.commonpath([app.outdir, static_path]) == app.outdir:
|
elif (path.splitdrive(app.outdir)[0] == path.splitdrive(static_path)[0] and
|
||||||
|
path.commonpath([app.outdir, static_path]) == app.outdir):
|
||||||
logger.warning(__('html_static_path entry %r is placed inside outdir'), entry)
|
logger.warning(__('html_static_path entry %r is placed inside outdir'), entry)
|
||||||
config.html_static_path.remove(entry)
|
config.html_static_path.remove(entry)
|
||||||
|
|
||||||
|
@ -101,7 +101,6 @@ class CheckExternalLinksBuilder(Builder):
|
|||||||
'allow_redirects': True,
|
'allow_redirects': True,
|
||||||
'headers': {
|
'headers': {
|
||||||
'Accept': 'text/html,application/xhtml+xml;q=0.9,*/*;q=0.8',
|
'Accept': 'text/html,application/xhtml+xml;q=0.9,*/*;q=0.8',
|
||||||
'User-Agent': requests.useragent_header[0][1],
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
if self.app.config.linkcheck_timeout:
|
if self.app.config.linkcheck_timeout:
|
||||||
|
@ -148,6 +148,7 @@ class Config:
|
|||||||
'math_numfig': (True, 'env', []),
|
'math_numfig': (True, 'env', []),
|
||||||
'tls_verify': (True, 'env', []),
|
'tls_verify': (True, 'env', []),
|
||||||
'tls_cacerts': (None, 'env', []),
|
'tls_cacerts': (None, 'env', []),
|
||||||
|
'user_agent': (None, 'env', [str]),
|
||||||
'smartquotes': (True, 'env', []),
|
'smartquotes': (True, 'env', []),
|
||||||
'smartquotes_action': ('qDe', 'env', []),
|
'smartquotes_action': ('qDe', 'env', []),
|
||||||
'smartquotes_excludes': ({'languages': ['ja'],
|
'smartquotes_excludes': ({'languages': ['ja'],
|
||||||
|
@ -348,7 +348,7 @@ class LiteralIncludeReader:
|
|||||||
return lines[:lineno + 1]
|
return lines[:lineno + 1]
|
||||||
else:
|
else:
|
||||||
if lineno == 0:
|
if lineno == 0:
|
||||||
return []
|
pass # end-before ignores first line
|
||||||
else:
|
else:
|
||||||
return lines[:lineno]
|
return lines[:lineno]
|
||||||
if inclusive is True:
|
if inclusive is True:
|
||||||
|
@ -23,6 +23,7 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import concurrent.futures
|
||||||
import functools
|
import functools
|
||||||
import posixpath
|
import posixpath
|
||||||
import sys
|
import sys
|
||||||
@ -187,21 +188,18 @@ def fetch_inventory(app: Sphinx, uri: str, inv: Any) -> Any:
|
|||||||
return invdata
|
return invdata
|
||||||
|
|
||||||
|
|
||||||
def load_mappings(app: Sphinx) -> None:
|
def fetch_inventory_group(
|
||||||
"""Load all intersphinx mappings into the environment."""
|
name: str, uri: str, invs: Any, cache: Any, app: Any, now: float
|
||||||
now = int(time.time())
|
) -> bool:
|
||||||
cache_time = now - app.config.intersphinx_cache_limit * 86400
|
cache_time = now - app.config.intersphinx_cache_limit * 86400
|
||||||
inventories = InventoryAdapter(app.builder.env)
|
|
||||||
update = False
|
|
||||||
for key, (name, (uri, invs)) in app.config.intersphinx_mapping.items():
|
|
||||||
failures = []
|
failures = []
|
||||||
|
try:
|
||||||
for inv in invs:
|
for inv in invs:
|
||||||
if not inv:
|
if not inv:
|
||||||
inv = posixpath.join(uri, INVENTORY_FILENAME)
|
inv = posixpath.join(uri, INVENTORY_FILENAME)
|
||||||
# decide whether the inventory must be read: always read local
|
# decide whether the inventory must be read: always read local
|
||||||
# files; remote ones only if the cache time is expired
|
# files; remote ones only if the cache time is expired
|
||||||
if '://' not in inv or uri not in inventories.cache \
|
if '://' not in inv or uri not in cache or cache[uri][1] < cache_time:
|
||||||
or inventories.cache[uri][1] < cache_time:
|
|
||||||
safe_inv_url = _get_safe_url(inv)
|
safe_inv_url = _get_safe_url(inv)
|
||||||
logger.info(__('loading intersphinx inventory from %s...'), safe_inv_url)
|
logger.info(__('loading intersphinx inventory from %s...'), safe_inv_url)
|
||||||
try:
|
try:
|
||||||
@ -209,12 +207,11 @@ def load_mappings(app: Sphinx) -> None:
|
|||||||
except Exception as err:
|
except Exception as err:
|
||||||
failures.append(err.args)
|
failures.append(err.args)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if invdata:
|
if invdata:
|
||||||
inventories.cache[uri] = (name, now, invdata)
|
cache[uri] = (name, now, invdata)
|
||||||
update = True
|
return True
|
||||||
break
|
return False
|
||||||
|
finally:
|
||||||
if failures == []:
|
if failures == []:
|
||||||
pass
|
pass
|
||||||
elif len(failures) < len(invs):
|
elif len(failures) < len(invs):
|
||||||
@ -227,7 +224,21 @@ def load_mappings(app: Sphinx) -> None:
|
|||||||
logger.warning(__("failed to reach any of the inventories "
|
logger.warning(__("failed to reach any of the inventories "
|
||||||
"with the following issues:") + "\n" + issues)
|
"with the following issues:") + "\n" + issues)
|
||||||
|
|
||||||
if update:
|
|
||||||
|
def load_mappings(app: Sphinx) -> None:
|
||||||
|
"""Load all intersphinx mappings into the environment."""
|
||||||
|
now = int(time.time())
|
||||||
|
inventories = InventoryAdapter(app.builder.env)
|
||||||
|
|
||||||
|
with concurrent.futures.ThreadPoolExecutor() as pool:
|
||||||
|
futures = []
|
||||||
|
for name, (uri, invs) in app.config.intersphinx_mapping.values():
|
||||||
|
futures.append(pool.submit(
|
||||||
|
fetch_inventory_group, name, uri, invs, inventories.cache, app, now
|
||||||
|
))
|
||||||
|
updated = [f.result() for f in concurrent.futures.as_completed(futures)]
|
||||||
|
|
||||||
|
if any(updated):
|
||||||
inventories.clear()
|
inventories.clear()
|
||||||
|
|
||||||
# Duplicate values in different inventories will shadow each
|
# Duplicate values in different inventories will shadow each
|
||||||
@ -374,6 +385,7 @@ def inspect_main(argv: List[str]) -> None:
|
|||||||
class MockConfig:
|
class MockConfig:
|
||||||
intersphinx_timeout = None # type: int
|
intersphinx_timeout = None # type: int
|
||||||
tls_verify = False
|
tls_verify = False
|
||||||
|
user_agent = None
|
||||||
|
|
||||||
class MockApp:
|
class MockApp:
|
||||||
srcdir = ''
|
srcdir = ''
|
||||||
|
0
sphinx/py.typed
Normal file
0
sphinx/py.typed
Normal file
33
sphinx/templates/gettext/message.pot_t
Normal file
33
sphinx/templates/gettext/message.pot_t
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
# SOME DESCRIPTIVE TITLE.
|
||||||
|
# Copyright (C) {{ copyright }}
|
||||||
|
# This file is distributed under the same license as the {{ project }} package.
|
||||||
|
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
|
||||||
|
#
|
||||||
|
#, fuzzy
|
||||||
|
msgid ""
|
||||||
|
msgstr ""
|
||||||
|
"Project-Id-Version: {{ project|e }} {{ version|e }}\n"
|
||||||
|
"Report-Msgid-Bugs-To: \n"
|
||||||
|
"POT-Creation-Date: {{ ctime|e }}\n"
|
||||||
|
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||||
|
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||||
|
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||||
|
"MIME-Version: 1.0\n"
|
||||||
|
"Content-Type: text/plain; charset=UTF-8\n"
|
||||||
|
"Content-Transfer-Encoding: 8bit\n"
|
||||||
|
{% for message in messages %}
|
||||||
|
{% if display_location -%}
|
||||||
|
{% for source, line in message.locations -%}
|
||||||
|
#: {{ source }}:{{ line }}
|
||||||
|
{% endfor -%}
|
||||||
|
{% endif -%}
|
||||||
|
|
||||||
|
{% if display_uuid -%}
|
||||||
|
{% for uuid in message.uuids -%}
|
||||||
|
#: {{ uuid }}
|
||||||
|
{% endfor -%}
|
||||||
|
{% endif -%}
|
||||||
|
|
||||||
|
msgid "{{ message.text|e }}"
|
||||||
|
msgstr ""
|
||||||
|
{% endfor -%}
|
@ -35,6 +35,7 @@
|
|||||||
<%= sphinxsetup %>
|
<%= sphinxsetup %>
|
||||||
<%= fvset %>
|
<%= fvset %>
|
||||||
<%= geometry %>
|
<%= geometry %>
|
||||||
|
<%= extrapackages %>
|
||||||
|
|
||||||
<%- for name, option in packages %>
|
<%- for name, option in packages %>
|
||||||
<%- if option %>
|
<%- if option %>
|
||||||
|
@ -1054,7 +1054,7 @@
|
|||||||
% Take advantage of the already applied Pygments mark-up to insert
|
% Take advantage of the already applied Pygments mark-up to insert
|
||||||
% potential linebreaks for TeX processing.
|
% potential linebreaks for TeX processing.
|
||||||
% {, <, #, %, $, ' and ": go to next line.
|
% {, <, #, %, $, ' and ": go to next line.
|
||||||
% _, }, ^, &, >, - and ~: stay at end of broken line.
|
% _, }, ^, &, >, -, ~, and \: stay at end of broken line.
|
||||||
% Use of \textquotesingle for straight quote.
|
% Use of \textquotesingle for straight quote.
|
||||||
% FIXME: convert this to package options ?
|
% FIXME: convert this to package options ?
|
||||||
\newcommand*\sphinxbreaksbeforelist {%
|
\newcommand*\sphinxbreaksbeforelist {%
|
||||||
@ -1066,6 +1066,7 @@
|
|||||||
\newcommand*\sphinxbreaksafterlist {%
|
\newcommand*\sphinxbreaksafterlist {%
|
||||||
\do\PYGZus\_\do\PYGZcb\}\do\PYGZca\^\do\PYGZam\&% _, }, ^, &,
|
\do\PYGZus\_\do\PYGZcb\}\do\PYGZca\^\do\PYGZam\&% _, }, ^, &,
|
||||||
\do\PYGZgt\>\do\PYGZhy\-\do\PYGZti\~% >, -, ~
|
\do\PYGZgt\>\do\PYGZhy\-\do\PYGZti\~% >, -, ~
|
||||||
|
\do\PYGZbs\\% \
|
||||||
}
|
}
|
||||||
\newcommand*\sphinxbreaksatspecials {%
|
\newcommand*\sphinxbreaksatspecials {%
|
||||||
\def\do##1##2%
|
\def\do##1##2%
|
||||||
@ -1110,6 +1111,9 @@
|
|||||||
\newcommand*\sphinxVerbatimTitle {}
|
\newcommand*\sphinxVerbatimTitle {}
|
||||||
% This box to typeset the caption before framed.sty multiple passes for framing.
|
% This box to typeset the caption before framed.sty multiple passes for framing.
|
||||||
\newbox\sphinxVerbatim@TitleBox
|
\newbox\sphinxVerbatim@TitleBox
|
||||||
|
% This box to measure contents if nested as inner \MakeFramed requires then
|
||||||
|
% minipage encapsulation but too long contents then break outer \MakeFramed
|
||||||
|
\newbox\sphinxVerbatim@ContentsBox
|
||||||
% This is a workaround to a "feature" of French lists, when literal block
|
% This is a workaround to a "feature" of French lists, when literal block
|
||||||
% follows immediately; usable generally (does only \par then), a priori...
|
% follows immediately; usable generally (does only \par then), a priori...
|
||||||
\newcommand*\sphinxvspacefixafterfrenchlists{%
|
\newcommand*\sphinxvspacefixafterfrenchlists{%
|
||||||
@ -1256,17 +1260,23 @@
|
|||||||
\itemsep \z@skip
|
\itemsep \z@skip
|
||||||
\topsep \z@skip
|
\topsep \z@skip
|
||||||
\partopsep \z@skip
|
\partopsep \z@skip
|
||||||
% trivlist will set \parsep to \parskip = zero
|
% trivlist will set \parsep to \parskip (which itself is set to zero above)
|
||||||
% \leftmargin will be set to zero by trivlist
|
% \leftmargin will be set to zero by trivlist
|
||||||
\rightmargin\z@
|
\rightmargin\z@
|
||||||
\parindent \z@% becomes \itemindent. Default zero, but perhaps overwritten.
|
\parindent \z@% becomes \itemindent. Default zero, but perhaps overwritten.
|
||||||
\trivlist\item\relax
|
\trivlist\item\relax
|
||||||
\ifsphinxverbatimwithminipage\spx@inframedtrue\fi
|
\ifspx@inframed\setbox\sphinxVerbatim@ContentsBox\vbox\bgroup
|
||||||
% use a minipage if we are already inside a framed environment
|
\@setminipage\hsize\linewidth
|
||||||
\ifspx@inframed\noindent\begin{minipage}{\linewidth}\fi
|
% use bulk of minipage paragraph shape restores (this is needed
|
||||||
|
% in indented contexts, at least for some)
|
||||||
|
\textwidth\hsize \columnwidth\hsize \@totalleftmargin\z@
|
||||||
|
\leftskip\z@skip \rightskip\z@skip \@rightskip\z@skip
|
||||||
|
\else
|
||||||
|
\ifsphinxverbatimwithminipage\noindent\begin{minipage}{\linewidth}\fi
|
||||||
\MakeFramed {% adapted over from framed.sty's snugshade environment
|
\MakeFramed {% adapted over from framed.sty's snugshade environment
|
||||||
\advance\hsize-\width\@totalleftmargin\z@\linewidth\hsize\@setminipage
|
\advance\hsize-\width\@totalleftmargin\z@\linewidth\hsize\@setminipage
|
||||||
}%
|
}%
|
||||||
|
\fi
|
||||||
% For grid placement from \strut's in \FancyVerbFormatLine
|
% For grid placement from \strut's in \FancyVerbFormatLine
|
||||||
\lineskip\z@skip
|
\lineskip\z@skip
|
||||||
% active comma should not be overwritten by \@noligs
|
% active comma should not be overwritten by \@noligs
|
||||||
@ -1278,8 +1288,49 @@
|
|||||||
}
|
}
|
||||||
{%
|
{%
|
||||||
\endOriginalVerbatim
|
\endOriginalVerbatim
|
||||||
|
\ifspx@inframed
|
||||||
|
\egroup % finish \sphinxVerbatim@ContentsBox vbox
|
||||||
|
\nobreak % update page totals
|
||||||
|
\ifdim\dimexpr\ht\sphinxVerbatim@ContentsBox+
|
||||||
|
\dp\sphinxVerbatim@ContentsBox+
|
||||||
|
\ht\sphinxVerbatim@TitleBox+
|
||||||
|
\dp\sphinxVerbatim@TitleBox+
|
||||||
|
2\fboxsep+2\fboxrule+
|
||||||
|
% try to account for external frame parameters
|
||||||
|
\FrameSep+\FrameRule+
|
||||||
|
% Usage here of 2 baseline distances is empirical.
|
||||||
|
% In border case where code-block fits barely in remaining space,
|
||||||
|
% it gets framed and looks good but the outer frame may continue
|
||||||
|
% on top of next page and give (if no contents after code-block)
|
||||||
|
% an empty framed line, as testing showed.
|
||||||
|
2\baselineskip+
|
||||||
|
% now add all to accumulated page totals and compare to \pagegoal
|
||||||
|
\pagetotal+\pagedepth>\pagegoal
|
||||||
|
% long contents: do not \MakeFramed. Do make a caption (either before or
|
||||||
|
% after) if title exists. Continuation hints across pagebreaks dropped.
|
||||||
|
% FIXME? a bottom caption may end up isolated at top of next page
|
||||||
|
% (no problem with a top caption, which is default)
|
||||||
|
\spx@opt@verbatimwithframefalse
|
||||||
|
\def\sphinxVerbatim@Title{\noindent\box\sphinxVerbatim@TitleBox\par}%
|
||||||
|
\sphinxVerbatim@Before
|
||||||
|
\noindent\unvbox\sphinxVerbatim@ContentsBox\par
|
||||||
|
\sphinxVerbatim@After
|
||||||
|
\else
|
||||||
|
% short enough contents: use \MakeFramed. As it is nested, this requires
|
||||||
|
% minipage encapsulation.
|
||||||
|
\noindent\begin{minipage}{\linewidth}%
|
||||||
|
\MakeFramed {% Use it now with the fetched contents
|
||||||
|
\advance\hsize-\width\@totalleftmargin\z@\linewidth\hsize\@setminipage
|
||||||
|
}%
|
||||||
|
\unvbox\sphinxVerbatim@ContentsBox
|
||||||
|
% some of this may be superfluous:
|
||||||
|
\par\unskip\@minipagefalse\endMakeFramed
|
||||||
|
\end{minipage}%
|
||||||
|
\fi
|
||||||
|
\else % non-nested \MakeFramed
|
||||||
\par\unskip\@minipagefalse\endMakeFramed % from framed.sty snugshade
|
\par\unskip\@minipagefalse\endMakeFramed % from framed.sty snugshade
|
||||||
\ifspx@inframed\end{minipage}\fi
|
\ifsphinxverbatimwithminipage\end{minipage}\fi
|
||||||
|
\fi
|
||||||
\endtrivlist
|
\endtrivlist
|
||||||
}
|
}
|
||||||
\newenvironment {sphinxVerbatimNoFrame}
|
\newenvironment {sphinxVerbatimNoFrame}
|
||||||
@ -1314,6 +1365,7 @@
|
|||||||
{\def##1{\discretionary{\char`##2}{\sphinxafterbreak}{\char`##2}}}%
|
{\def##1{\discretionary{\char`##2}{\sphinxafterbreak}{\char`##2}}}%
|
||||||
\do\_\_\do\}\}\do\textasciicircum\^\do\&\&% _, }, ^, &,
|
\do\_\_\do\}\}\do\textasciicircum\^\do\&\&% _, }, ^, &,
|
||||||
\do\textgreater\>\do\textasciitilde\~% >, ~
|
\do\textgreater\>\do\textasciitilde\~% >, ~
|
||||||
|
\do\textbackslash\\% \
|
||||||
}
|
}
|
||||||
\newcommand*\sphinxbreaksviaactiveinparsedliteral{%
|
\newcommand*\sphinxbreaksviaactiveinparsedliteral{%
|
||||||
\sphinxbreaksviaactive % by default handles . , ; ? ! /
|
\sphinxbreaksviaactive % by default handles . , ; ? ! /
|
||||||
@ -1736,13 +1788,25 @@
|
|||||||
% to obtain straight quotes we execute \@noligs as patched by upquote, and
|
% to obtain straight quotes we execute \@noligs as patched by upquote, and
|
||||||
% \scantokens is needed in cases where it would be too late for the macro to
|
% \scantokens is needed in cases where it would be too late for the macro to
|
||||||
% first set catcodes and then fetch its argument. We also make the contents
|
% first set catcodes and then fetch its argument. We also make the contents
|
||||||
% breakable at non-escaped . , ; ? ! / using \sphinxbreaksviaactive.
|
% breakable at non-escaped . , ; ? ! / using \sphinxbreaksviaactive,
|
||||||
|
% and also at \ character (which is escaped to \textbackslash{}).
|
||||||
|
\protected\def\sphinxtextbackslashbreakbefore
|
||||||
|
{\discretionary{}{\sphinxafterbreak\sphinx@textbackslash}{\sphinx@textbackslash}}
|
||||||
|
\protected\def\sphinxtextbackslashbreakafter
|
||||||
|
{\discretionary{\sphinx@textbackslash}{\sphinxafterbreak}{\sphinx@textbackslash}}
|
||||||
|
\let\sphinxtextbackslash\sphinxtextbackslashbreakafter
|
||||||
% the macro must be protected if it ends up used in moving arguments,
|
% the macro must be protected if it ends up used in moving arguments,
|
||||||
% in 'alltt' \@noligs is done already, and the \scantokens must be avoided.
|
% in 'alltt' \@noligs is done already, and the \scantokens must be avoided.
|
||||||
\protected\def\sphinxupquote#1{{\def\@tempa{alltt}%
|
\protected\def\sphinxupquote#1{{\def\@tempa{alltt}%
|
||||||
\ifx\@tempa\@currenvir\else
|
\ifx\@tempa\@currenvir\else
|
||||||
\ifspx@opt@inlineliteralwraps
|
\ifspx@opt@inlineliteralwraps
|
||||||
\sphinxbreaksviaactive\let\sphinxafterbreak\empty
|
% break at . , ; ? ! /
|
||||||
|
\sphinxbreaksviaactive
|
||||||
|
% break also at \
|
||||||
|
\let\sphinx@textbackslash\textbackslash
|
||||||
|
\let\textbackslash\sphinxtextbackslash
|
||||||
|
% do not typeset a continuation symbol on next line
|
||||||
|
\let\sphinxafterbreak\sphinxafterbreakofinlineliteral
|
||||||
% do not overwrite the comma set-up
|
% do not overwrite the comma set-up
|
||||||
\let\verbatim@nolig@list\sphinx@literal@nolig@list
|
\let\verbatim@nolig@list\sphinx@literal@nolig@list
|
||||||
\fi
|
\fi
|
||||||
@ -1754,6 +1818,7 @@
|
|||||||
\def\sphinx@do@noligs #1{\catcode`#1\active\begingroup\lccode`\~`#1\relax
|
\def\sphinx@do@noligs #1{\catcode`#1\active\begingroup\lccode`\~`#1\relax
|
||||||
\lowercase{\endgroup\def~{\leavevmode\kern\z@\char`#1 }}}
|
\lowercase{\endgroup\def~{\leavevmode\kern\z@\char`#1 }}}
|
||||||
\def\sphinx@literal@nolig@list {\do\`\do\<\do\>\do\'\do\-}%
|
\def\sphinx@literal@nolig@list {\do\`\do\<\do\>\do\'\do\-}%
|
||||||
|
\let\sphinxafterbreakofinlineliteral\empty
|
||||||
|
|
||||||
% Some custom font markup commands.
|
% Some custom font markup commands.
|
||||||
\protected\def\sphinxstrong#1{\textbf{#1}}
|
\protected\def\sphinxstrong#1{\textbf{#1}}
|
||||||
|
@ -11,7 +11,7 @@
|
|||||||
\ProcessLocalKeyvalOptions* % ignore class options
|
\ProcessLocalKeyvalOptions* % ignore class options
|
||||||
|
|
||||||
\ifspx@cyropt@Xtwo
|
\ifspx@cyropt@Xtwo
|
||||||
% original code by tex.sx user egreg:
|
% original code by tex.sx user egreg (updated 2019/10/28):
|
||||||
% https://tex.stackexchange.com/a/460325/
|
% https://tex.stackexchange.com/a/460325/
|
||||||
% 159 Cyrillic glyphs as available in X2 TeX 8bit font encoding
|
% 159 Cyrillic glyphs as available in X2 TeX 8bit font encoding
|
||||||
% This assumes inputenc loaded with utf8 option, or LaTeX release
|
% This assumes inputenc loaded with utf8 option, or LaTeX release
|
||||||
@ -27,7 +27,9 @@
|
|||||||
{Ӎ}{ӎ}{Ӕ}{ӕ}{Ә}{ә}{Ӡ}{ӡ}{Ө}{ө}\do
|
{Ӎ}{ӎ}{Ӕ}{ӕ}{Ә}{ә}{Ӡ}{ӡ}{Ө}{ө}\do
|
||||||
{%
|
{%
|
||||||
\begingroup\def\IeC{\protect\DeclareTextSymbolDefault}%
|
\begingroup\def\IeC{\protect\DeclareTextSymbolDefault}%
|
||||||
\protected@edef\@temp{\endgroup\next{X2}}\@temp
|
\protected@edef\@temp{\endgroup
|
||||||
|
\@ifl@t@r{\fmtversion}{2019/10/01}{\csname u8:\next\endcsname}{\next}}%
|
||||||
|
\@temp{X2}%
|
||||||
}%
|
}%
|
||||||
\else
|
\else
|
||||||
\ifspx@cyropt@TtwoA
|
\ifspx@cyropt@TtwoA
|
||||||
|
@ -3,7 +3,8 @@ var DOCUMENTATION_OPTIONS = {
|
|||||||
VERSION: '{{ release|e }}',
|
VERSION: '{{ release|e }}',
|
||||||
LANGUAGE: '{{ language }}',
|
LANGUAGE: '{{ language }}',
|
||||||
COLLAPSE_INDEX: false,
|
COLLAPSE_INDEX: false,
|
||||||
FILE_SUFFIX: '{{ '' if no_search_suffix else file_suffix }}',
|
BUILDER: '{{ builder }}',
|
||||||
|
FILE_SUFFIX: '{{ file_suffix }}',
|
||||||
HAS_SOURCE: {{ has_source|lower }},
|
HAS_SOURCE: {{ has_source|lower }},
|
||||||
SOURCELINK_SUFFIX: '{{ sourcelink_suffix }}',
|
SOURCELINK_SUFFIX: '{{ sourcelink_suffix }}',
|
||||||
NAVIGATION_WITH_KEYS: {{ 'true' if theme_navigation_with_keys|tobool else 'false'}}
|
NAVIGATION_WITH_KEYS: {{ 'true' if theme_navigation_with_keys|tobool else 'false'}}
|
||||||
|
@ -245,7 +245,7 @@ var Search = {
|
|||||||
if (results.length) {
|
if (results.length) {
|
||||||
var item = results.pop();
|
var item = results.pop();
|
||||||
var listItem = $('<li style="display:none"></li>');
|
var listItem = $('<li style="display:none"></li>');
|
||||||
if (DOCUMENTATION_OPTIONS.FILE_SUFFIX === '') {
|
if (DOCUMENTATION_OPTIONS.BUILDER === 'dirhtml') {
|
||||||
// dirhtml builder
|
// dirhtml builder
|
||||||
var dirname = item[0] + '/';
|
var dirname = item[0] + '/';
|
||||||
if (dirname.match(/\/index\/$/)) {
|
if (dirname.match(/\/index\/$/)) {
|
||||||
|
@ -53,6 +53,11 @@ def publish_msgstr(app: "Sphinx", source: str, source_path: str, source_line: in
|
|||||||
:return: document
|
:return: document
|
||||||
:rtype: docutils.nodes.document
|
:rtype: docutils.nodes.document
|
||||||
"""
|
"""
|
||||||
|
try:
|
||||||
|
# clear rst_prolog temporarily
|
||||||
|
rst_prolog = config.rst_prolog
|
||||||
|
config.rst_prolog = None # type: ignore
|
||||||
|
|
||||||
from sphinx.io import SphinxI18nReader
|
from sphinx.io import SphinxI18nReader
|
||||||
reader = SphinxI18nReader()
|
reader = SphinxI18nReader()
|
||||||
reader.setup(app)
|
reader.setup(app)
|
||||||
@ -68,6 +73,8 @@ def publish_msgstr(app: "Sphinx", source: str, source_path: str, source_line: in
|
|||||||
except IndexError: # empty node
|
except IndexError: # empty node
|
||||||
pass
|
pass
|
||||||
return doc
|
return doc
|
||||||
|
finally:
|
||||||
|
config.rst_prolog = rst_prolog # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class PreserveTranslatableMessages(SphinxTransform):
|
class PreserveTranslatableMessages(SphinxTransform):
|
||||||
|
@ -187,9 +187,8 @@ class sphinx_domains:
|
|||||||
def __enter__(self) -> None:
|
def __enter__(self) -> None:
|
||||||
self.enable()
|
self.enable()
|
||||||
|
|
||||||
def __exit__(self, exc_type: "Type[Exception]", exc_value: Exception, traceback: Any) -> bool: # type: ignore # NOQA
|
def __exit__(self, exc_type: "Type[Exception]", exc_value: Exception, traceback: Any) -> None: # NOQA
|
||||||
self.disable()
|
self.disable()
|
||||||
return False
|
|
||||||
|
|
||||||
def enable(self) -> None:
|
def enable(self) -> None:
|
||||||
self.directive_func = directives.directive
|
self.directive_func = directives.directive
|
||||||
|
@ -8,6 +8,7 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
import warnings
|
import warnings
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from typing import Generator, Union
|
from typing import Generator, Union
|
||||||
@ -16,6 +17,7 @@ from urllib.parse import urlsplit
|
|||||||
import pkg_resources
|
import pkg_resources
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
import sphinx
|
||||||
from sphinx.config import Config
|
from sphinx.config import Config
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -105,14 +107,28 @@ def _get_tls_cacert(url: str, config: Config) -> Union[str, bool]:
|
|||||||
return certs.get(hostname, True)
|
return certs.get(hostname, True)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_user_agent(config: Config) -> str:
|
||||||
|
if config.user_agent:
|
||||||
|
return config.user_agent
|
||||||
|
else:
|
||||||
|
return ' '.join([
|
||||||
|
'Sphinx/%s' % sphinx.__version__,
|
||||||
|
'requests/%s' % requests.__version__,
|
||||||
|
'python/%s' % '.'.join(map(str, sys.version_info[:3])),
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
def get(url: str, **kwargs) -> requests.Response:
|
def get(url: str, **kwargs) -> requests.Response:
|
||||||
"""Sends a GET request like requests.get().
|
"""Sends a GET request like requests.get().
|
||||||
|
|
||||||
This sets up User-Agent header and TLS verification automatically."""
|
This sets up User-Agent header and TLS verification automatically."""
|
||||||
kwargs.setdefault('headers', dict(useragent_header))
|
headers = kwargs.setdefault('headers', {})
|
||||||
config = kwargs.pop('config', None)
|
config = kwargs.pop('config', None)
|
||||||
if config:
|
if config:
|
||||||
kwargs.setdefault('verify', _get_tls_cacert(url, config))
|
kwargs.setdefault('verify', _get_tls_cacert(url, config))
|
||||||
|
headers.setdefault('User-Agent', _get_user_agent(config))
|
||||||
|
else:
|
||||||
|
headers.setdefault('User-Agent', useragent_header[0][1])
|
||||||
|
|
||||||
with ignore_insecure_warning(**kwargs):
|
with ignore_insecure_warning(**kwargs):
|
||||||
return requests.get(url, **kwargs)
|
return requests.get(url, **kwargs)
|
||||||
@ -122,10 +138,13 @@ def head(url: str, **kwargs) -> requests.Response:
|
|||||||
"""Sends a HEAD request like requests.head().
|
"""Sends a HEAD request like requests.head().
|
||||||
|
|
||||||
This sets up User-Agent header and TLS verification automatically."""
|
This sets up User-Agent header and TLS verification automatically."""
|
||||||
kwargs.setdefault('headers', dict(useragent_header))
|
headers = kwargs.setdefault('headers', {})
|
||||||
config = kwargs.pop('config', None)
|
config = kwargs.pop('config', None)
|
||||||
if config:
|
if config:
|
||||||
kwargs.setdefault('verify', _get_tls_cacert(url, config))
|
kwargs.setdefault('verify', _get_tls_cacert(url, config))
|
||||||
|
headers.setdefault('User-Agent', _get_user_agent(config))
|
||||||
|
else:
|
||||||
|
headers.setdefault('User-Agent', useragent_header[0][1])
|
||||||
|
|
||||||
with ignore_insecure_warning(**kwargs):
|
with ignore_insecure_warning(**kwargs):
|
||||||
return requests.get(url, **kwargs)
|
return requests.get(url, **kwargs)
|
||||||
|
@ -155,6 +155,7 @@ DEFAULT_SETTINGS = {
|
|||||||
'% Set up styles of URL: it should be placed after hyperref.\n'
|
'% Set up styles of URL: it should be placed after hyperref.\n'
|
||||||
'\\urlstyle{same}'),
|
'\\urlstyle{same}'),
|
||||||
'contentsname': '',
|
'contentsname': '',
|
||||||
|
'extrapackages': '',
|
||||||
'preamble': '',
|
'preamble': '',
|
||||||
'title': '',
|
'title': '',
|
||||||
'release': '',
|
'release': '',
|
||||||
|
@ -5,4 +5,9 @@ test-epub-anchor-id
|
|||||||
|
|
||||||
blah blah blah
|
blah blah blah
|
||||||
|
|
||||||
|
.. setting:: STATICFILES_SECTION
|
||||||
|
|
||||||
|
blah blah blah
|
||||||
|
==============
|
||||||
|
|
||||||
see :setting:`STATICFILES_FINDERS`
|
see :setting:`STATICFILES_FINDERS`
|
||||||
|
@ -105,6 +105,8 @@ def test_add_is_parallel_allowed(app, status, warning):
|
|||||||
|
|
||||||
app.setup_extension('read_serial')
|
app.setup_extension('read_serial')
|
||||||
assert app.is_parallel_allowed('read') is False
|
assert app.is_parallel_allowed('read') is False
|
||||||
|
assert "the read_serial extension is not safe for parallel reading" in warning.getvalue()
|
||||||
|
warning.truncate(0) # reset warnings
|
||||||
assert app.is_parallel_allowed('write') is True
|
assert app.is_parallel_allowed('write') is True
|
||||||
assert warning.getvalue() == ''
|
assert warning.getvalue() == ''
|
||||||
app.extensions.pop('read_serial')
|
app.extensions.pop('read_serial')
|
||||||
|
@ -321,6 +321,7 @@ def test_epub_anchor_id(app):
|
|||||||
|
|
||||||
html = (app.outdir / 'index.xhtml').text()
|
html = (app.outdir / 'index.xhtml').text()
|
||||||
assert '<p id="std-setting-STATICFILES_FINDERS">blah blah blah</p>' in html
|
assert '<p id="std-setting-STATICFILES_FINDERS">blah blah blah</p>' in html
|
||||||
|
assert '<span id="std-setting-STATICFILES_SECTION"></span><h1>blah blah blah</h1>' in html
|
||||||
assert 'see <a class="reference internal" href="#std-setting-STATICFILES_FINDERS">' in html
|
assert 'see <a class="reference internal" href="#std-setting-STATICFILES_FINDERS">' in html
|
||||||
|
|
||||||
|
|
||||||
|
@ -1461,3 +1461,11 @@ def test_texescape_for_unicode_supported_engine(app, status, warning):
|
|||||||
assert 'double struck italic small i: i' in result
|
assert 'double struck italic small i: i' in result
|
||||||
assert 'superscript: ⁰, ¹' in result
|
assert 'superscript: ⁰, ¹' in result
|
||||||
assert 'subscript: ₀, ₁' in result
|
assert 'subscript: ₀, ₁' in result
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.sphinx('latex', testroot='basic',
|
||||||
|
confoverrides={'latex_elements': {'extrapackages': r'\usepackage{foo}'}})
|
||||||
|
def test_latex_elements_extrapackages(app, status, warning):
|
||||||
|
app.builder.build_all()
|
||||||
|
result = (app.outdir / 'test.tex').text()
|
||||||
|
assert r'\usepackage{foo}' in result
|
||||||
|
@ -206,6 +206,14 @@ def test_LiteralIncludeReader_missing_start_and_end(literal_inc_path):
|
|||||||
content, lines = reader.read()
|
content, lines = reader.read()
|
||||||
|
|
||||||
|
|
||||||
|
def test_LiteralIncludeReader_end_before(literal_inc_path):
|
||||||
|
options = {'end-before': 'nclud'} # *nclud* matches first and third lines.
|
||||||
|
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
|
||||||
|
content, lines = reader.read()
|
||||||
|
assert content == ("# Literally included file using Python highlighting\n"
|
||||||
|
"\n")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.xfail(os.name != 'posix', reason="Not working on windows")
|
@pytest.mark.xfail(os.name != 'posix', reason="Not working on windows")
|
||||||
def test_LiteralIncludeReader_prepend(literal_inc_path):
|
def test_LiteralIncludeReader_prepend(literal_inc_path):
|
||||||
options = {'lines': '1', 'prepend': 'Hello', 'append': 'Sphinx'}
|
options = {'lines': '1', 'prepend': 'Hello', 'append': 'Sphinx'}
|
||||||
|
Loading…
Reference in New Issue
Block a user