mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge branch 'stable'
This commit is contained in:
commit
2578f47664
@ -9,6 +9,7 @@ python:
|
||||
- "3.3"
|
||||
- "3.4"
|
||||
- "3.5"
|
||||
- "3.5-dev"
|
||||
- "pypy"
|
||||
env:
|
||||
global:
|
||||
|
18
CHANGES
18
CHANGES
@ -48,7 +48,7 @@ Features added
|
||||
code lines themselves) obey the indentation in lists or quoted blocks.
|
||||
|
||||
* #2343: the long source lines in code-blocks are wrapped (without modifying
|
||||
the line numbering) in LaTeX output (ref #1534, #2304).
|
||||
the line numbering) in LaTeX output (ref #1534, #2304).
|
||||
|
||||
Bugs fixed
|
||||
----------
|
||||
@ -77,6 +77,22 @@ Bugs fixed
|
||||
* #2492: Figure directive with :figwidth: generates incorrect Latex-code
|
||||
* The caption of figure is always put on center even if ``:align:`` was specified
|
||||
* #2526: LaTeX writer crashes if the section having only images
|
||||
* #2522: Sphinx touches mo files under installed directory that caused permission error.
|
||||
* #2536: C++, fix crash when an immediately nested scope has the same name as the current scope.
|
||||
* #2555: Fix crash on any-references with unicode.
|
||||
* #2517: wrong bookmark encoding in PDF if using LuaLaTeX
|
||||
* #2521: generated Makefile causes BSD make crashed if sphinx-build not found
|
||||
* #2470: ``typing`` backport package causes autodoc errors with python 2.7
|
||||
* ``sphinx.ext.intersphinx`` crashes if non-string value is used for key of `intersphinx_mapping`
|
||||
* #2518: `intersphinx_mapping` disallows non alphanumeric keys
|
||||
* #2558: unpack error on devhelp builder
|
||||
* #2561: Info builder crashes when a footnote contains a link
|
||||
* #2565: The descriptions of objects generated by ``sphinx.ext.autosummary`` overflow lines at LaTeX writer
|
||||
* Extend pdflatex config in sphinx.sty to subparagraphs (ref: #2551)
|
||||
* #2445: `rst_prolog` and `rst_epilog` affect to non reST sources
|
||||
* #2576: ``sphinx.ext.imgmath`` crashes if subprocess raises error
|
||||
* #2577: ``sphinx.ext.imgmath``: Invalid argument are passed to dvisvgm
|
||||
* #2556: Xapian search does not work with Python 3
|
||||
|
||||
|
||||
Release 1.4.1 (released Apr 12, 2016)
|
||||
|
@ -196,6 +196,8 @@ Use ```Link text <http://example.com/>`_`` for inline web links. If the link
|
||||
text should be the web address, you don't need special markup at all, the parser
|
||||
finds links and mail addresses in ordinary text.
|
||||
|
||||
.. important:: There must be a space between the link text and the opening \< for the URL.
|
||||
|
||||
You can also separate the link and the target definition (:duref:`ref
|
||||
<hyperlink-targets>`), like this::
|
||||
|
||||
|
17
setup.py
17
setup.py
@ -98,6 +98,13 @@ else:
|
||||
def run(self):
|
||||
compile_catalog.run(self)
|
||||
|
||||
if isinstance(self.domain, list):
|
||||
for domain in self.domain:
|
||||
self._run_domain_js(domain)
|
||||
else:
|
||||
self._run_domain_js(self.domain)
|
||||
|
||||
def _run_domain_js(self, domain):
|
||||
po_files = []
|
||||
js_files = []
|
||||
|
||||
@ -106,20 +113,20 @@ else:
|
||||
po_files.append((self.locale,
|
||||
os.path.join(self.directory, self.locale,
|
||||
'LC_MESSAGES',
|
||||
self.domain + '.po')))
|
||||
domain + '.po')))
|
||||
js_files.append(os.path.join(self.directory, self.locale,
|
||||
'LC_MESSAGES',
|
||||
self.domain + '.js'))
|
||||
domain + '.js'))
|
||||
else:
|
||||
for locale in os.listdir(self.directory):
|
||||
po_file = os.path.join(self.directory, locale,
|
||||
'LC_MESSAGES',
|
||||
self.domain + '.po')
|
||||
domain + '.po')
|
||||
if os.path.exists(po_file):
|
||||
po_files.append((locale, po_file))
|
||||
js_files.append(os.path.join(self.directory, locale,
|
||||
'LC_MESSAGES',
|
||||
self.domain + '.js'))
|
||||
domain + '.js'))
|
||||
else:
|
||||
po_files.append((self.locale, self.input_file))
|
||||
if self.output_file:
|
||||
@ -127,7 +134,7 @@ else:
|
||||
else:
|
||||
js_files.append(os.path.join(self.directory, self.locale,
|
||||
'LC_MESSAGES',
|
||||
self.domain + '.js'))
|
||||
domain + '.js'))
|
||||
|
||||
for js_file, (locale, po_file) in zip(js_files, po_files):
|
||||
infile = open(po_file, 'r')
|
||||
|
@ -116,10 +116,14 @@ class index(nodes.Invisible, nodes.Inline, nodes.TextElement):
|
||||
"""Node for index entries.
|
||||
|
||||
This node is created by the ``index`` directive and has one attribute,
|
||||
``entries``. Its value is a list of 4-tuples of ``(entrytype, entryname,
|
||||
target, ignored)``.
|
||||
``entries``. Its value is a list of 5-tuples of ``(entrytype, entryname,
|
||||
target, ignored, key)``.
|
||||
|
||||
*entrytype* is one of "single", "pair", "double", "triple".
|
||||
|
||||
*key* is categolziation characters (usually it is single character) for
|
||||
general index page. For the detail of this, please see also:
|
||||
:rst:directive:`glossary` and issue #2320.
|
||||
"""
|
||||
|
||||
|
||||
|
@ -44,6 +44,7 @@ from sphinx.util.osutil import ENOENT
|
||||
from sphinx.util.logging import is_suppressed_warning
|
||||
from sphinx.util.console import bold, lightgray, darkgray, darkgreen, \
|
||||
term_width_line
|
||||
from sphinx.util.i18n import find_catalog_source_files
|
||||
|
||||
if hasattr(sys, 'intern'):
|
||||
intern = sys.intern
|
||||
@ -207,13 +208,17 @@ class Sphinx(object):
|
||||
if self.config.language is not None:
|
||||
self.info(bold('loading translations [%s]... ' %
|
||||
self.config.language), nonl=True)
|
||||
locale_dirs = [None, path.join(package_dir, 'locale')] + \
|
||||
[path.join(self.srcdir, x) for x in self.config.locale_dirs]
|
||||
user_locale_dirs = [
|
||||
path.join(self.srcdir, x) for x in self.config.locale_dirs]
|
||||
# compile mo files if sphinx.po file in user locale directories are updated
|
||||
for catinfo in find_catalog_source_files(
|
||||
user_locale_dirs, self.config.language, domains=['sphinx'],
|
||||
charset=self.config.source_encoding):
|
||||
catinfo.write_mo(self.config.language)
|
||||
locale_dirs = [None, path.join(package_dir, 'locale')] + user_locale_dirs
|
||||
else:
|
||||
locale_dirs = []
|
||||
self.translator, has_translation = locale.init(locale_dirs,
|
||||
self.config.language,
|
||||
charset=self.config.source_encoding)
|
||||
self.translator, has_translation = locale.init(locale_dirs, self.config.language)
|
||||
if self.config.language is not None:
|
||||
if has_translation or self.config.language == 'en':
|
||||
# "en" never needs to be translated
|
||||
|
@ -123,7 +123,7 @@ class DevhelpBuilder(StandaloneHTMLBuilder):
|
||||
subitem[1], [])
|
||||
|
||||
for (key, group) in index:
|
||||
for title, (refs, subitems) in group:
|
||||
for title, (refs, subitems, key) in group:
|
||||
write_index(title, refs, subitems)
|
||||
|
||||
# Dump the XML file
|
||||
|
@ -2587,7 +2587,7 @@ class Symbol(object):
|
||||
s = s._find_named_symbol(identifier, templateParams,
|
||||
templateArgs, operator,
|
||||
templateShorthand=False,
|
||||
matchSelf=True)
|
||||
matchSelf=False)
|
||||
if not s:
|
||||
return None
|
||||
return s
|
||||
@ -4100,7 +4100,7 @@ class CPPDomain(Domain):
|
||||
parser.assert_end()
|
||||
except DefinitionError as e:
|
||||
warner.warn('Unparseable C++ cross-reference: %r\n%s'
|
||||
% (target, str(e.description)))
|
||||
% (target, text_type(e.description)))
|
||||
return None, None
|
||||
parentKey = node.get("cpp:parent_key", None)
|
||||
rootSymbol = self.data['root_symbol']
|
||||
|
@ -35,7 +35,10 @@ from sphinx.util.inspect import getargspec, isdescriptor, safe_getmembers, \
|
||||
from sphinx.util.docstrings import prepare_docstring
|
||||
|
||||
try:
|
||||
import typing
|
||||
if sys.version_info >= (3,):
|
||||
import typing
|
||||
else:
|
||||
typing = None
|
||||
except ImportError:
|
||||
typing = None
|
||||
|
||||
@ -269,9 +272,17 @@ def format_annotation(annotation):
|
||||
if isinstance(annotation, typing.TypeVar):
|
||||
return annotation.__name__
|
||||
elif hasattr(typing, 'GenericMeta') and \
|
||||
isinstance(annotation, typing.GenericMeta) and \
|
||||
hasattr(annotation, '__parameters__'):
|
||||
params = annotation.__parameters__
|
||||
isinstance(annotation, typing.GenericMeta):
|
||||
# In Python 3.5.2+, all arguments are stored in __args__,
|
||||
# whereas __parameters__ only contains generic parameters.
|
||||
#
|
||||
# Prior to Python 3.5.2, __args__ is not available, and all
|
||||
# arguments are in __parameters__.
|
||||
params = None
|
||||
if hasattr(annotation, '__args__'):
|
||||
params = annotation.__args__
|
||||
elif hasattr(annotation, '__parameters__'):
|
||||
params = annotation.__parameters__
|
||||
if params is not None:
|
||||
param_str = ', '.join(format_annotation(p) for p in params)
|
||||
return '%s[%s]' % (qualified_name, param_str)
|
||||
|
@ -337,7 +337,7 @@ class Autosummary(Directive):
|
||||
*items* is a list produced by :meth:`get_items`.
|
||||
"""
|
||||
table_spec = addnodes.tabular_col_spec()
|
||||
table_spec['spec'] = 'll'
|
||||
table_spec['spec'] = 'p{0.5\linewidth}p{0.5\linewidth}'
|
||||
|
||||
table = autosummary_table('')
|
||||
real_table = nodes.table('', classes=['longtable'])
|
||||
|
@ -176,14 +176,14 @@ def render_math(self, math):
|
||||
raise
|
||||
self.builder.warn('%s command %r cannot be run (needed for math '
|
||||
'display), check the imgmath_%s setting' %
|
||||
image_translator, image_translator_executable,
|
||||
image_translator)
|
||||
(image_translator, image_translator_executable,
|
||||
image_translator))
|
||||
self.builder._imgmath_warned_image_translator = True
|
||||
return None, None
|
||||
|
||||
stdout, stderr = p.communicate()
|
||||
if p.returncode != 0:
|
||||
raise MathExtError('%s exited with error',
|
||||
raise MathExtError('%s exited with error' %
|
||||
image_translator, stderr, stdout)
|
||||
depth = None
|
||||
if use_preview and image_format == 'png': # depth is only useful for png
|
||||
|
@ -33,7 +33,7 @@ import posixpath
|
||||
from os import path
|
||||
import re
|
||||
|
||||
from six import iteritems
|
||||
from six import iteritems, string_types
|
||||
from six.moves.urllib import parse, request
|
||||
from docutils import nodes
|
||||
from docutils.utils import relative_path
|
||||
@ -271,8 +271,9 @@ def load_mappings(app):
|
||||
if isinstance(value, tuple):
|
||||
# new format
|
||||
name, (uri, inv) = key, value
|
||||
if not name.isalnum():
|
||||
app.warn('intersphinx identifier %r is not alphanumeric' % name)
|
||||
if not isinstance(name, string_types):
|
||||
app.warn('intersphinx identifier %r is not string. Ignored' % name)
|
||||
continue
|
||||
else:
|
||||
# old format, no name
|
||||
name, uri, inv = None, key, value
|
||||
|
20
sphinx/io.py
20
sphinx/io.py
@ -112,14 +112,26 @@ class SphinxFileInput(FileInput):
|
||||
return data.decode(self.encoding, 'sphinx') # py2: decoding
|
||||
|
||||
def read(self):
|
||||
def get_parser_type(docname):
|
||||
path = self.env.doc2path(docname)
|
||||
for suffix in self.env.config.source_parsers:
|
||||
if path.endswith(suffix):
|
||||
parser_class = self.env.config.source_parsers[suffix]
|
||||
if isinstance(parser_class, string_types):
|
||||
parser_class = import_object(parser_class, 'source parser')
|
||||
return parser_class.supported
|
||||
else:
|
||||
return ('restructuredtext',)
|
||||
|
||||
data = FileInput.read(self)
|
||||
if self.app:
|
||||
arg = [data]
|
||||
self.app.emit('source-read', self.env.docname, arg)
|
||||
data = arg[0]
|
||||
docinfo, data = split_docinfo(data)
|
||||
if self.env.config.rst_epilog:
|
||||
data = data + '\n' + self.env.config.rst_epilog + '\n'
|
||||
if self.env.config.rst_prolog:
|
||||
data = self.env.config.rst_prolog + '\n' + data
|
||||
if 'restructuredtext' in get_parser_type(self.env.docname):
|
||||
if self.env.config.rst_epilog:
|
||||
data = data + '\n' + self.env.config.rst_epilog + '\n'
|
||||
if self.env.config.rst_prolog:
|
||||
data = self.env.config.rst_prolog + '\n' + data
|
||||
return docinfo + data
|
||||
|
@ -195,7 +195,7 @@ else:
|
||||
return translators['sphinx'].ugettext(message)
|
||||
|
||||
|
||||
def init(locale_dirs, language, catalog='sphinx', charset='utf-8'):
|
||||
def init(locale_dirs, language, catalog='sphinx'):
|
||||
"""Look for message catalogs in `locale_dirs` and *ensure* that there is at
|
||||
least a NullTranslations catalog set in `translators`. If called multiple
|
||||
times or if several ``.mo`` files are found, their contents are merged
|
||||
@ -209,13 +209,6 @@ def init(locale_dirs, language, catalog='sphinx', charset='utf-8'):
|
||||
# the None entry is the system's default locale path
|
||||
has_translation = True
|
||||
|
||||
# compile mo files if po file is updated
|
||||
# TODO: remove circular importing
|
||||
from sphinx.util.i18n import find_catalog_source_files
|
||||
for catinfo in find_catalog_source_files(locale_dirs, language, domains=[catalog],
|
||||
charset=charset):
|
||||
catinfo.write_mo(language)
|
||||
|
||||
# loading
|
||||
for dir_ in locale_dirs:
|
||||
try:
|
||||
|
@ -534,16 +534,6 @@ SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = %(rbuilddir)s
|
||||
|
||||
# User-friendly check for sphinx-build
|
||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||
\t$(error \
|
||||
The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx \
|
||||
installed, then set the SPHINXBUILD environment variable to point \
|
||||
to the full path of the '$(SPHINXBUILD)' executable. Alternatively you \
|
||||
can add the directory with the executable to your PATH. \
|
||||
If you don\\'t have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||
endif
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
@ -1077,16 +1067,6 @@ SPHINXPROJ = %(project_fn)s
|
||||
SOURCEDIR = %(rsrcdir)s
|
||||
BUILDDIR = %(rbuilddir)s
|
||||
|
||||
# User-friendly check for sphinx-build.
|
||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||
$(error \
|
||||
The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx \
|
||||
installed, then set the SPHINXBUILD environment variable to point \
|
||||
to the full path of the '$(SPHINXBUILD)' executable. Alternatively you \
|
||||
can add the directory with the executable to your PATH. \
|
||||
If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||
endif
|
||||
|
||||
# Has to be explicit, otherwise we don't get "make" without targets right.
|
||||
help:
|
||||
\t@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
@ -452,8 +452,12 @@
|
||||
{\py@TitleColor\thesubsection}{0.5em}{\py@TitleColor}{\py@NormalColor}
|
||||
\titleformat{\subsubsection}{\py@HeaderFamily}%
|
||||
{\py@TitleColor\thesubsubsection}{0.5em}{\py@TitleColor}{\py@NormalColor}
|
||||
\titleformat{\paragraph}{\small\py@HeaderFamily}%
|
||||
{\py@TitleColor}{0em}{\py@TitleColor}{\py@NormalColor}
|
||||
% By default paragraphs (and subsubsections) will not be numbered because
|
||||
% sphinxmanual.cls and sphinxhowto.cls set secnumdepth to 2
|
||||
\titleformat{\paragraph}{\py@HeaderFamily}%
|
||||
{\py@TitleColor\theparagraph}{0.5em}{\py@TitleColor}{\py@NormalColor}
|
||||
\titleformat{\subparagraph}{\py@HeaderFamily}%
|
||||
{\py@TitleColor\thesubparagraph}{0.5em}{\py@TitleColor}{\py@NormalColor}
|
||||
|
||||
% {fulllineitems} is the main environment for object descriptions.
|
||||
%
|
||||
@ -697,7 +701,10 @@
|
||||
|
||||
% to make pdf with correct encoded bookmarks in Japanese
|
||||
% this should precede the hyperref package
|
||||
\ifx\kanjiskip\undefined\else
|
||||
\ifx\kanjiskip\undefined
|
||||
% for non-Japanese: make sure bookmarks are ok also with lualatex
|
||||
\PassOptionsToPackage{pdfencoding=unicode}{hyperref}
|
||||
\else
|
||||
\usepackage{atbegshi}
|
||||
\ifx\ucs\undefined
|
||||
\ifnum 42146=\euc"A4A2
|
||||
|
@ -239,9 +239,7 @@ class Locale(Transform):
|
||||
# fetch translations
|
||||
dirs = [path.join(env.srcdir, directory)
|
||||
for directory in env.config.locale_dirs]
|
||||
catalog, has_catalog = init_locale(dirs, env.config.language,
|
||||
textdomain,
|
||||
charset=env.config.source_encoding)
|
||||
catalog, has_catalog = init_locale(dirs, env.config.language, textdomain)
|
||||
if not has_catalog:
|
||||
return
|
||||
|
||||
|
@ -11,6 +11,8 @@
|
||||
|
||||
import xapian
|
||||
|
||||
from six import string_types
|
||||
|
||||
from sphinx.util.osutil import ensuredir
|
||||
from sphinx.websupport.search import BaseSearch
|
||||
|
||||
@ -73,7 +75,10 @@ class XapianSearch(BaseSearch):
|
||||
results = []
|
||||
|
||||
for m in matches:
|
||||
context = self.extract_context(m.document.get_data())
|
||||
data = m.document.get_data()
|
||||
if not isinstance(data, string_types):
|
||||
data = data.decode("utf-8")
|
||||
context = self.extract_context(data)
|
||||
results.append((m.document.get_value(self.DOC_PATH),
|
||||
m.document.get_value(self.DOC_TITLE),
|
||||
''.join(context)))
|
||||
|
@ -650,7 +650,7 @@ class TexinfoTranslator(nodes.NodeVisitor):
|
||||
self.next_section_ids.add(node['refid'])
|
||||
self.next_section_ids.update(node['ids'])
|
||||
return
|
||||
except IndexError:
|
||||
except (IndexError, AttributeError):
|
||||
pass
|
||||
if 'refuri' in node:
|
||||
return
|
||||
|
12
tests/roots/test-prolog/conf.py
Normal file
12
tests/roots/test-prolog/conf.py
Normal file
@ -0,0 +1,12 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import sys
|
||||
sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
|
||||
master_doc = 'index'
|
||||
extensions = ['prolog_markdown_parser']
|
||||
|
||||
rst_prolog = '*Hello world*.\n\n'
|
||||
rst_epilog = '\n\n*Good-bye world*.'
|
7
tests/roots/test-prolog/index.rst
Normal file
7
tests/roots/test-prolog/index.rst
Normal file
@ -0,0 +1,7 @@
|
||||
prolog and epilog
|
||||
=================
|
||||
|
||||
.. toctree::
|
||||
|
||||
restructuredtext
|
||||
markdown
|
3
tests/roots/test-prolog/markdown.md
Normal file
3
tests/roots/test-prolog/markdown.md
Normal file
@ -0,0 +1,3 @@
|
||||
# sample document
|
||||
|
||||
This is a sample document in markdown
|
12
tests/roots/test-prolog/prolog_markdown_parser.py
Normal file
12
tests/roots/test-prolog/prolog_markdown_parser.py
Normal file
@ -0,0 +1,12 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from docutils.parsers import Parser
|
||||
|
||||
|
||||
class DummyMarkdownParser(Parser):
|
||||
def parse(self, inputstring, document):
|
||||
document.rawsource = inputstring
|
||||
|
||||
|
||||
def setup(app):
|
||||
app.add_source_parser('.md', DummyMarkdownParser)
|
4
tests/roots/test-prolog/restructuredtext.rst
Normal file
4
tests/roots/test-prolog/restructuredtext.rst
Normal file
@ -0,0 +1,4 @@
|
||||
sample document
|
||||
===============
|
||||
|
||||
This is a sample document in reST
|
@ -161,7 +161,7 @@ def test_missing_reference(tempdir, app, status, warning):
|
||||
def test_load_mappings_warnings(tempdir, app, status, warning):
|
||||
"""
|
||||
load_mappings issues a warning if new-style mapping
|
||||
identifiers are not alphanumeric
|
||||
identifiers are not string
|
||||
"""
|
||||
inv_file = tempdir / 'inventory'
|
||||
inv_file.write_bytes(inventory_v2)
|
||||
@ -170,13 +170,14 @@ def test_load_mappings_warnings(tempdir, app, status, warning):
|
||||
'py3k': ('https://docs.python.org/py3k/', inv_file),
|
||||
'repoze.workflow': ('http://docs.repoze.org/workflow/', inv_file),
|
||||
'django-taggit': ('http://django-taggit.readthedocs.org/en/latest/',
|
||||
inv_file)
|
||||
inv_file),
|
||||
12345: ('http://www.sphinx-doc.org/en/stable/', inv_file),
|
||||
}
|
||||
|
||||
app.config.intersphinx_cache_limit = 0
|
||||
# load the inventory and check if it's done correctly
|
||||
load_mappings(app)
|
||||
assert warning.getvalue().count('\n') == 2
|
||||
assert warning.getvalue().count('\n') == 1
|
||||
|
||||
|
||||
class TestStripBasicAuth(unittest.TestCase):
|
||||
|
@ -10,6 +10,7 @@
|
||||
"""
|
||||
|
||||
import re
|
||||
import pickle
|
||||
|
||||
from docutils import frontend, utils, nodes
|
||||
from docutils.parsers import rst
|
||||
@ -18,7 +19,7 @@ from sphinx.util import texescape
|
||||
from sphinx.writers.html import HTMLWriter, SmartyPantsHTMLTranslator
|
||||
from sphinx.writers.latex import LaTeXWriter, LaTeXTranslator
|
||||
|
||||
from util import TestApp
|
||||
from util import TestApp, with_app, assert_node
|
||||
|
||||
|
||||
app = settings = parser = None
|
||||
@ -142,3 +143,27 @@ def test_latex_escaping():
|
||||
# in URIs
|
||||
yield (verify_re, u'`test <http://example.com/~me/>`_', None,
|
||||
r'\\href{http://example.com/~me/}{test}.*')
|
||||
|
||||
|
||||
@with_app(buildername='dummy', testroot='prolog')
|
||||
def test_rst_prolog(app, status, warning):
|
||||
app.builder.build_all()
|
||||
rst = pickle.loads((app.doctreedir / 'restructuredtext.doctree').bytes())
|
||||
md = pickle.loads((app.doctreedir / 'markdown.doctree').bytes())
|
||||
|
||||
# rst_prolog
|
||||
assert_node(rst[0], nodes.paragraph)
|
||||
assert_node(rst[0][0], nodes.emphasis)
|
||||
assert_node(rst[0][0][0], nodes.Text)
|
||||
assert rst[0][0][0] == 'Hello world'
|
||||
|
||||
# rst_epilog
|
||||
assert_node(rst[-1], nodes.section)
|
||||
assert_node(rst[-1][-1], nodes.paragraph)
|
||||
assert_node(rst[-1][-1][0], nodes.emphasis)
|
||||
assert_node(rst[-1][-1][0][0], nodes.Text)
|
||||
assert rst[-1][-1][0][0] == 'Good-bye world'
|
||||
|
||||
# rst_prolog & rst_epilog on exlucding reST parser
|
||||
assert not md.rawsource.startswith('*Hello world*.')
|
||||
assert not md.rawsource.endswith('*Good-bye world*.\n')
|
||||
|
@ -109,10 +109,10 @@ try:
|
||||
except ImportError:
|
||||
def assert_in(x, thing, msg=''):
|
||||
if x not in thing:
|
||||
assert False, msg or '%r is not in %r%r' % (x, thing)
|
||||
assert False, msg or '%r is not in %r' % (x, thing)
|
||||
def assert_not_in(x, thing, msg=''):
|
||||
if x in thing:
|
||||
assert False, msg or '%r is in %r%r' % (x, thing)
|
||||
assert False, msg or '%r is in %r' % (x, thing)
|
||||
|
||||
|
||||
def skip_if(condition, msg=None):
|
||||
|
Loading…
Reference in New Issue
Block a user