mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge branch '1.8' into 5127_quickstart_should_not_overwrite_Makefiles
This commit is contained in:
commit
d9694befae
26
CHANGES
26
CHANGES
@ -9,6 +9,7 @@ Incompatible changes
|
||||
|
||||
* #5282: html theme: refer ``pygments_style`` settings of HTML themes
|
||||
preferentially
|
||||
* The URL of download files are changed
|
||||
* #5127: quickstart: ``Makefile`` and ``make.bat`` are not overwritten if exists
|
||||
|
||||
Deprecated
|
||||
@ -20,6 +21,9 @@ Deprecated
|
||||
Features added
|
||||
--------------
|
||||
|
||||
* #5388: Ensure frozen object descriptions are reproducible
|
||||
* #5362: apidoc: Add ``--tocfile`` option to change the filename of ToC
|
||||
|
||||
Bugs fixed
|
||||
----------
|
||||
|
||||
@ -31,6 +35,11 @@ Bugs fixed
|
||||
* #5348: download reference to remote file is not displayed
|
||||
* #5282: html theme: ``pygments_style`` of theme was overrided by ``conf.py``
|
||||
by default
|
||||
* #4379: toctree shows confusible warning when document is excluded
|
||||
* #2401: autodoc: ``:members:`` causes ``:special-members:`` not to be shown
|
||||
* autodoc: ImportError is replaced by AttributeError for deeper module
|
||||
* #2720, #4034: Incorrect links with ``:download:``, duplicate names, and
|
||||
parallel builds
|
||||
|
||||
Testing
|
||||
--------
|
||||
@ -284,8 +293,8 @@ Documentation
|
||||
* #5083: Fix wrong make.bat option for internationalization.
|
||||
* #5115: napoleon: add admonitions added by #4613 to the docs.
|
||||
|
||||
Release 1.7.9 (in development)
|
||||
==============================
|
||||
Release 1.7.10 (in development)
|
||||
===============================
|
||||
|
||||
Dependencies
|
||||
------------
|
||||
@ -305,6 +314,19 @@ Bugs fixed
|
||||
Testing
|
||||
--------
|
||||
|
||||
Release 1.7.9 (released Sep 05, 2018)
|
||||
=====================================
|
||||
|
||||
Features added
|
||||
--------------
|
||||
|
||||
* #5359: Make generated texinfo files reproducible by sorting the anchors
|
||||
|
||||
Bugs fixed
|
||||
----------
|
||||
|
||||
* #5361: crashed on incremental build if document uses include directive
|
||||
|
||||
Release 1.7.8 (released Aug 29, 2018)
|
||||
=====================================
|
||||
|
||||
|
@ -58,6 +58,10 @@ Options
|
||||
|
||||
Maximum depth for the generated table of contents file.
|
||||
|
||||
.. option:: --tocfile
|
||||
|
||||
Filename for a table of contents file. Defaults to ``modules``.
|
||||
|
||||
.. option:: -T, --no-toc
|
||||
|
||||
Do not create a table of contents file. Ignored when :option:`--full` is
|
||||
|
@ -208,6 +208,8 @@ The following variables available in the templates:
|
||||
List containing names of all inherited members of class. Only available for
|
||||
classes.
|
||||
|
||||
.. versionadded:: 1.8.0
|
||||
|
||||
.. data:: functions
|
||||
|
||||
List containing names of "public" functions in the module. Here, "public"
|
||||
|
@ -864,10 +864,10 @@ class StandaloneHTMLBuilder(Builder):
|
||||
for src in status_iterator(self.env.dlfiles, __('copying downloadable files... '),
|
||||
"brown", len(self.env.dlfiles), self.app.verbosity,
|
||||
stringify_func=to_relpath):
|
||||
dest = self.env.dlfiles[src][1]
|
||||
try:
|
||||
copyfile(path.join(self.srcdir, src),
|
||||
path.join(self.outdir, '_downloads', dest))
|
||||
dest = path.join(self.outdir, '_downloads', self.env.dlfiles[src][1])
|
||||
ensuredir(path.dirname(dest))
|
||||
copyfile(path.join(self.srcdir, src), dest)
|
||||
except EnvironmentError as err:
|
||||
logger.warning(__('cannot copy downloadable file %r: %s'),
|
||||
path.join(self.srcdir, src), err)
|
||||
|
@ -22,7 +22,7 @@ from sphinx.domains.changeset import VersionChange # NOQA # for compatibility
|
||||
from sphinx.locale import _
|
||||
from sphinx.util import url_re, docname_join
|
||||
from sphinx.util.docutils import SphinxDirective
|
||||
from sphinx.util.matching import patfilter
|
||||
from sphinx.util.matching import Matcher, patfilter
|
||||
from sphinx.util.nodes import explicit_title_re, set_source_info, \
|
||||
process_index_entry
|
||||
|
||||
@ -96,6 +96,7 @@ class TocTree(SphinxDirective):
|
||||
all_docnames.remove(self.env.docname) # remove current document
|
||||
|
||||
ret = []
|
||||
excluded = Matcher(self.config.exclude_patterns)
|
||||
for entry in self.content:
|
||||
if not entry:
|
||||
continue
|
||||
@ -131,9 +132,13 @@ class TocTree(SphinxDirective):
|
||||
if url_re.match(ref) or ref == 'self':
|
||||
toctree['entries'].append((title, ref))
|
||||
elif docname not in self.env.found_docs:
|
||||
ret.append(self.state.document.reporter.warning(
|
||||
'toctree contains reference to nonexisting '
|
||||
'document %r' % docname, line=self.lineno))
|
||||
if excluded(self.env.doc2path(docname, None)):
|
||||
message = 'toctree contains reference to excluded document %r'
|
||||
else:
|
||||
message = 'toctree contains reference to nonexisting document %r'
|
||||
|
||||
ret.append(self.state.document.reporter.warning(message % docname,
|
||||
line=self.lineno))
|
||||
self.env.note_reread()
|
||||
else:
|
||||
all_docnames.discard(docname)
|
||||
|
@ -19,7 +19,7 @@ from os import path
|
||||
|
||||
from docutils.utils import get_source_line
|
||||
from six import BytesIO, next
|
||||
from six.moves import cPickle as pickle, reduce
|
||||
from six.moves import cPickle as pickle
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.deprecation import RemovedInSphinx20Warning, RemovedInSphinx30Warning
|
||||
@ -28,7 +28,7 @@ from sphinx.environment.adapters.toctree import TocTree
|
||||
from sphinx.errors import SphinxError, BuildEnvironmentError, DocumentError, ExtensionError
|
||||
from sphinx.locale import __
|
||||
from sphinx.transforms import SphinxTransformer
|
||||
from sphinx.util import get_matching_docs, FilenameUniqDict
|
||||
from sphinx.util import get_matching_docs, DownloadFiles, FilenameUniqDict
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.docutils import LoggingReporter
|
||||
from sphinx.util.i18n import find_catalog_files
|
||||
@ -67,7 +67,7 @@ default_settings = {
|
||||
# or changed to properly invalidate pickle files.
|
||||
#
|
||||
# NOTE: increase base version by 2 to have distinct numbers for Py2 and 3
|
||||
ENV_VERSION = 53 + (sys.version_info[0] - 2)
|
||||
ENV_VERSION = 54 + (sys.version_info[0] - 2)
|
||||
|
||||
# config status
|
||||
CONFIG_OK = 1
|
||||
@ -190,7 +190,8 @@ class BuildEnvironment(object):
|
||||
|
||||
# these map absolute path -> (docnames, unique filename)
|
||||
self.images = FilenameUniqDict() # type: FilenameUniqDict
|
||||
self.dlfiles = FilenameUniqDict() # type: FilenameUniqDict
|
||||
self.dlfiles = DownloadFiles() # type: DownloadFiles
|
||||
# filename -> (set of docnames, destination)
|
||||
|
||||
# the original URI for images
|
||||
self.original_image_uri = {} # type: Dict[unicode, unicode]
|
||||
@ -724,7 +725,7 @@ class BuildEnvironment(object):
|
||||
def check_consistency(self):
|
||||
# type: () -> None
|
||||
"""Do consistency checks."""
|
||||
included = reduce(lambda x, y: x | y, self.included.values(), set()) # type: Set[unicode] # NOQA
|
||||
included = set().union(*self.included.values()) # type: ignore
|
||||
for docname in sorted(self.all_docs):
|
||||
if docname not in self.files_to_rebuild:
|
||||
if docname == self.config.master_doc:
|
||||
|
@ -15,6 +15,7 @@ from six import iteritems
|
||||
from sphinx import addnodes
|
||||
from sphinx.locale import __
|
||||
from sphinx.util import url_re, logging
|
||||
from sphinx.util.matching import Matcher
|
||||
from sphinx.util.nodes import clean_astext, process_only_nodes
|
||||
|
||||
if False:
|
||||
@ -83,6 +84,7 @@ class TocTree(object):
|
||||
# interactions between marking and pruning the tree (see bug #1046).
|
||||
|
||||
toctree_ancestors = self.get_toctree_ancestors(docname)
|
||||
excluded = Matcher(self.env.config.exclude_patterns)
|
||||
|
||||
def _toctree_add_classes(node, depth):
|
||||
# type: (nodes.Node, int) -> None
|
||||
@ -172,8 +174,12 @@ class TocTree(object):
|
||||
ref, location=toctreenode)
|
||||
except KeyError:
|
||||
# this is raised if the included file does not exist
|
||||
logger.warning(__('toctree contains reference to nonexisting document %r'),
|
||||
ref, location=toctreenode)
|
||||
if excluded(self.env.doc2path(ref, None)):
|
||||
message = __('toctree contains reference to excluded document %r')
|
||||
else:
|
||||
message = __('toctree contains reference to nonexisting document %r')
|
||||
|
||||
logger.warning(message, ref, location=toctreenode)
|
||||
else:
|
||||
# if titles_only is given, only keep the main title and
|
||||
# sub-toctrees
|
||||
|
@ -340,7 +340,9 @@ Note: By default this script will not overwrite already created files."""))
|
||||
parser.add_argument('-P', '--private', action='store_true',
|
||||
dest='includeprivate',
|
||||
help=__('include "_private" modules'))
|
||||
parser.add_argument('-T', '--no-toc', action='store_true', dest='notoc',
|
||||
parser.add_argument('--tocfile', action='store', dest='tocfile', default='modules',
|
||||
help=__("don't create a table of contents file"))
|
||||
parser.add_argument('-T', '--no-toc', action='store_false', dest='tocfile',
|
||||
help=__("don't create a table of contents file"))
|
||||
parser.add_argument('-E', '--no-headings', action='store_true',
|
||||
dest='noheadings',
|
||||
@ -453,8 +455,8 @@ def main(argv=sys.argv[1:]):
|
||||
|
||||
if not args.dryrun:
|
||||
qs.generate(d, silent=True, overwrite=args.force)
|
||||
elif not args.notoc:
|
||||
create_modules_toc_file(modules, args)
|
||||
elif args.tocfile:
|
||||
create_modules_toc_file(modules, args, args.tocfile)
|
||||
|
||||
return 0
|
||||
|
||||
|
@ -110,6 +110,20 @@ def bool_option(arg):
|
||||
return True
|
||||
|
||||
|
||||
def merge_special_members_option(options):
|
||||
# type: (Dict) -> None
|
||||
"""Merge :special-members: option to :members: option."""
|
||||
if 'special-members' in options and options['special-members'] is not ALL:
|
||||
if options.get('members') is ALL:
|
||||
pass
|
||||
elif options.get('members'):
|
||||
for member in options['special-members']:
|
||||
if member not in options['members']:
|
||||
options['members'].append(member)
|
||||
else:
|
||||
options['members'] = options['special-members']
|
||||
|
||||
|
||||
class AutodocReporter(object):
|
||||
"""
|
||||
A reporter replacement that assigns the correct source name
|
||||
@ -823,6 +837,11 @@ class ModuleDocumenter(Documenter):
|
||||
'imported-members': bool_option, 'ignore-module-all': bool_option
|
||||
} # type: Dict[unicode, Callable]
|
||||
|
||||
def __init__(self, *args):
|
||||
# type: (Any) -> None
|
||||
super(ModuleDocumenter, self).__init__(*args)
|
||||
merge_special_members_option(self.options)
|
||||
|
||||
@classmethod
|
||||
def can_document_member(cls, member, membername, isattr, parent):
|
||||
# type: (Any, unicode, bool, Any) -> bool
|
||||
@ -1075,6 +1094,11 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
||||
'private-members': bool_option, 'special-members': members_option,
|
||||
} # type: Dict[unicode, Callable]
|
||||
|
||||
def __init__(self, *args):
|
||||
# type: (Any) -> None
|
||||
super(ClassDocumenter, self).__init__(*args)
|
||||
merge_special_members_option(self.options)
|
||||
|
||||
@classmethod
|
||||
def can_document_member(cls, member, membername, isattr, parent):
|
||||
# type: (Any, unicode, bool, Any) -> bool
|
||||
|
@ -168,13 +168,15 @@ def import_object(modname, objpath, objtype='', attrgetter=safe_getattr, warning
|
||||
|
||||
try:
|
||||
module = None
|
||||
exc_on_importing = None
|
||||
objpath = list(objpath)
|
||||
while module is None:
|
||||
try:
|
||||
module = import_module(modname, warningiserror=warningiserror)
|
||||
logger.debug('[autodoc] import %s => %r', modname, module)
|
||||
except ImportError:
|
||||
except ImportError as exc:
|
||||
logger.debug('[autodoc] import %s => failed', modname)
|
||||
exc_on_importing = exc
|
||||
if '.' in modname:
|
||||
# retry with parent module
|
||||
modname, name = modname.rsplit('.', 1)
|
||||
@ -193,6 +195,10 @@ def import_object(modname, objpath, objtype='', attrgetter=safe_getattr, warning
|
||||
object_name = attrname
|
||||
return [module, parent, object_name, obj]
|
||||
except (AttributeError, ImportError) as exc:
|
||||
if isinstance(exc, AttributeError) and exc_on_importing:
|
||||
# restore ImportError
|
||||
exc = exc_on_importing
|
||||
|
||||
if objpath:
|
||||
errmsg = ('autodoc: failed to import %s %r from module %r' %
|
||||
(objtype, '.'.join(objpath), modname))
|
||||
|
@ -81,6 +81,7 @@ from sphinx.util import import_object, rst, logging
|
||||
from sphinx.util.docutils import (
|
||||
NullReporter, SphinxDirective, new_document, switch_source_input
|
||||
)
|
||||
from sphinx.util.matching import Matcher
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
@ -261,12 +262,17 @@ class Autosummary(SphinxDirective):
|
||||
|
||||
tree_prefix = self.options['toctree'].strip()
|
||||
docnames = []
|
||||
excluded = Matcher(self.config.exclude_patterns)
|
||||
for name, sig, summary, real_name in items:
|
||||
docname = posixpath.join(tree_prefix, real_name)
|
||||
docname = posixpath.normpath(posixpath.join(dirname, docname))
|
||||
if docname not in self.env.found_docs:
|
||||
self.warn('toctree references unknown document %r'
|
||||
% docname)
|
||||
if excluded(self.env.doc2path(docname, None)):
|
||||
self.warn('toctree references excluded document %r'
|
||||
% docname)
|
||||
else:
|
||||
self.warn('toctree references unknown document %r'
|
||||
% docname)
|
||||
docnames.append(docname)
|
||||
|
||||
tocnode = addnodes.toctree()
|
||||
|
@ -43,11 +43,12 @@ from sphinx.util.rst import escape as rst_escape
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Callable, Dict, Tuple, List # NOQA
|
||||
from typing import Any, Callable, Dict, List, Tuple, Type # NOQA
|
||||
from jinja2 import BaseLoader # NOQA
|
||||
from sphinx import addnodes # NOQA
|
||||
from sphinx.builders import Builder # NOQA
|
||||
from sphinx.environment import BuildEnvironment # NOQA
|
||||
from sphinx.ext.autodoc import Documenter # NOQA
|
||||
|
||||
|
||||
class DummyApplication(object):
|
||||
@ -69,7 +70,7 @@ def setup_documenters(app):
|
||||
ModuleDocumenter, ClassDocumenter, ExceptionDocumenter, DataDocumenter,
|
||||
FunctionDocumenter, MethodDocumenter, AttributeDocumenter,
|
||||
InstanceAttributeDocumenter
|
||||
]
|
||||
] # type: List[Type[Documenter]]
|
||||
for documenter in documenters:
|
||||
app.registry.add_documenter(documenter.objtype, documenter)
|
||||
|
||||
|
@ -22,6 +22,7 @@ import warnings
|
||||
from codecs import BOM_UTF8
|
||||
from collections import deque
|
||||
from datetime import datetime
|
||||
from hashlib import md5
|
||||
from os import path
|
||||
from time import mktime, strptime
|
||||
|
||||
@ -167,6 +168,37 @@ class FilenameUniqDict(dict):
|
||||
self._existing = state
|
||||
|
||||
|
||||
class DownloadFiles(dict):
|
||||
"""A special dictionary for download files.
|
||||
|
||||
.. important:: This class would be refactored in nearly future.
|
||||
Hence don't hack this directly.
|
||||
"""
|
||||
|
||||
def add_file(self, docname, filename):
|
||||
# type: (unicode, unicode) -> None
|
||||
if filename not in self:
|
||||
digest = md5(filename.encode('utf-8')).hexdigest()
|
||||
dest = '%s/%s' % (digest, os.path.basename(filename))
|
||||
self[filename] = (set(), dest)
|
||||
|
||||
self[filename][0].add(docname)
|
||||
return self[filename][1]
|
||||
|
||||
def purge_doc(self, docname):
|
||||
# type: (unicode) -> None
|
||||
for filename, (docs, dest) in list(self.items()):
|
||||
docs.discard(docname)
|
||||
if not docs:
|
||||
del self[filename]
|
||||
|
||||
def merge_other(self, docnames, other):
|
||||
# type: (Set[unicode], Dict[unicode, Tuple[Set[unicode], Any]]) -> None
|
||||
for filename, (docs, dest) in other.items():
|
||||
for docname in docs & set(docnames):
|
||||
self.add_file(docname, filename)
|
||||
|
||||
|
||||
def copy_static_entry(source, targetdir, builder, context={},
|
||||
exclude_matchers=(), level=0):
|
||||
# type: (unicode, unicode, Any, Dict, Tuple[Callable, ...], int) -> None
|
||||
|
@ -612,7 +612,7 @@ class TexinfoTranslator(nodes.NodeVisitor):
|
||||
node_name = node['node_name']
|
||||
pointers = tuple([node_name] + self.rellinks[node_name])
|
||||
self.body.append('\n@node %s,%s,%s,%s\n' % pointers) # type: ignore
|
||||
for id in self.next_section_ids:
|
||||
for id in sorted(self.next_section_ids):
|
||||
self.add_anchor(id, node)
|
||||
|
||||
self.next_section_ids.clear()
|
||||
|
@ -18,7 +18,7 @@ import six
|
||||
from docutils.statemachine import ViewList
|
||||
from six import StringIO
|
||||
|
||||
from sphinx.ext.autodoc import add_documenter, FunctionDocumenter, ALL # NOQA
|
||||
from sphinx.ext.autodoc import add_documenter, FunctionDocumenter, ALL, Options # NOQA
|
||||
from sphinx.testing.util import SphinxTestApp, Struct
|
||||
from sphinx.util import logging
|
||||
|
||||
@ -49,7 +49,7 @@ def setup_test():
|
||||
global options, directive
|
||||
global processed_docstrings, processed_signatures
|
||||
|
||||
options = Struct(
|
||||
options = Options(
|
||||
inherited_members = False,
|
||||
undoc_members = False,
|
||||
private_members = False,
|
||||
|
@ -21,7 +21,7 @@ from six import PY3
|
||||
|
||||
from sphinx.ext.autodoc import (
|
||||
AutoDirective, ModuleLevelDocumenter, cut_lines, between, ALL,
|
||||
merge_autodoc_default_flags
|
||||
merge_autodoc_default_flags, Options
|
||||
)
|
||||
from sphinx.ext.autodoc.directive import DocumenterBridge, process_documenter_options
|
||||
from sphinx.testing.util import SphinxTestApp, Struct # NOQA
|
||||
@ -79,7 +79,7 @@ def setup_test():
|
||||
global options, directive
|
||||
global processed_docstrings, processed_signatures
|
||||
|
||||
options = Struct(
|
||||
options = Options(
|
||||
inherited_members = False,
|
||||
undoc_members = False,
|
||||
private_members = False,
|
||||
@ -757,6 +757,29 @@ def test_autodoc_imported_members(app):
|
||||
|
||||
@pytest.mark.sphinx('html', testroot='ext-autodoc')
|
||||
def test_autodoc_special_members(app):
|
||||
# specific special methods
|
||||
options = {"undoc-members": None,
|
||||
"special-members": "__init__,__special1__"}
|
||||
actual = do_autodoc(app, 'class', 'target.Class', options)
|
||||
assert list(filter(lambda l: '::' in l, actual)) == [
|
||||
'.. py:class:: Class(arg)',
|
||||
' .. py:method:: Class.__init__(arg)',
|
||||
' .. py:method:: Class.__special1__()',
|
||||
]
|
||||
|
||||
# combination with specific members
|
||||
options = {"members": "attr,docattr",
|
||||
"undoc-members": None,
|
||||
"special-members": "__init__,__special1__"}
|
||||
actual = do_autodoc(app, 'class', 'target.Class', options)
|
||||
assert list(filter(lambda l: '::' in l, actual)) == [
|
||||
'.. py:class:: Class(arg)',
|
||||
' .. py:method:: Class.__init__(arg)',
|
||||
' .. py:method:: Class.__special1__()',
|
||||
' .. py:attribute:: Class.attr',
|
||||
' .. py:attribute:: Class.docattr',
|
||||
]
|
||||
|
||||
# all special methods
|
||||
options = {"members": None,
|
||||
"undoc-members": None,
|
||||
@ -786,33 +809,6 @@ def test_autodoc_special_members(app):
|
||||
' .. py:method:: Class.undocmeth()'
|
||||
]
|
||||
|
||||
# specific special methods
|
||||
options = {"members": None,
|
||||
"undoc-members": None,
|
||||
"special-members": "__init__,__special1__"}
|
||||
actual = do_autodoc(app, 'class', 'target.Class', options)
|
||||
assert list(filter(lambda l: '::' in l, actual)) == [
|
||||
'.. py:class:: Class(arg)',
|
||||
' .. py:method:: Class.__init__(arg)',
|
||||
' .. py:method:: Class.__special1__()',
|
||||
' .. py:attribute:: Class.attr',
|
||||
' .. py:attribute:: Class.descr',
|
||||
' .. py:attribute:: Class.docattr',
|
||||
' .. py:method:: Class.excludemeth()',
|
||||
' .. py:attribute:: Class.inst_attr_comment',
|
||||
' .. py:attribute:: Class.inst_attr_inline',
|
||||
' .. py:attribute:: Class.inst_attr_string',
|
||||
' .. py:attribute:: Class.mdocattr',
|
||||
' .. py:method:: Class.meth()',
|
||||
' .. py:classmethod:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:attribute:: Class.prop',
|
||||
ROGER_METHOD,
|
||||
' .. py:attribute:: Class.skipattr',
|
||||
' .. py:method:: Class.skipmeth()',
|
||||
' .. py:attribute:: Class.udocattr',
|
||||
' .. py:method:: Class.undocmeth()'
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.sphinx('html', testroot='ext-autodoc')
|
||||
def test_autodoc_ignore_module_all(app):
|
||||
@ -1551,9 +1547,7 @@ def test_autodoc_default_options_with_values(app):
|
||||
assert ' .. py:attribute:: EnumCls.val4' not in actual
|
||||
|
||||
# with :special-members:
|
||||
# Note that :members: must be *on* for :special-members: to work.
|
||||
app.config.autodoc_default_options = {
|
||||
'members': None,
|
||||
'special-members': '__init__,__iter__',
|
||||
}
|
||||
actual = do_autodoc(app, 'class', 'target.CustomIter')
|
||||
|
@ -151,7 +151,7 @@ def test_html_warnings(app, warning):
|
||||
(".//img[@src='../_images/rimg.png']", ''),
|
||||
],
|
||||
'subdir/includes.html': [
|
||||
(".//a[@href='../_downloads/img.png']", ''),
|
||||
(".//a[@class='reference download internal']", ''),
|
||||
(".//img[@src='../_images/img.png']", ''),
|
||||
(".//p", 'This is an include file.'),
|
||||
(".//pre/span", 'line 1'),
|
||||
@ -159,8 +159,7 @@ def test_html_warnings(app, warning):
|
||||
],
|
||||
'includes.html': [
|
||||
(".//pre", u'Max Strauß'),
|
||||
(".//a[@href='_downloads/img.png']", ''),
|
||||
(".//a[@href='_downloads/img1.png']", ''),
|
||||
(".//a[@class='reference download internal']", ''),
|
||||
(".//pre/span", u'"quotes"'),
|
||||
(".//pre/span", u"'included'"),
|
||||
(".//pre/span[@class='s2']", u'üöä'),
|
||||
@ -421,6 +420,31 @@ def test_html_output(app, cached_etree_parse, fname, expect):
|
||||
check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect)
|
||||
|
||||
|
||||
@pytest.mark.sphinx('html', tags=['testtag'], confoverrides={
|
||||
'html_context.hckey_co': 'hcval_co'})
|
||||
@pytest.mark.test_params(shared_result='test_build_html_output')
|
||||
def test_html_download(app):
|
||||
app.build()
|
||||
|
||||
# subdir/includes.html
|
||||
result = (app.outdir / 'subdir' / 'includes.html').text()
|
||||
pattern = ('<a class="reference download internal" download="" '
|
||||
'href="../(_downloads/.*/img.png)">')
|
||||
matched = re.search(pattern, result)
|
||||
assert matched
|
||||
assert (app.outdir / matched.group(1)).exists()
|
||||
filename = matched.group(1)
|
||||
|
||||
# includes.html
|
||||
result = (app.outdir / 'includes.html').text()
|
||||
pattern = ('<a class="reference download internal" download="" '
|
||||
'href="(_downloads/.*/img.png)">')
|
||||
matched = re.search(pattern, result)
|
||||
assert matched
|
||||
assert (app.outdir / matched.group(1)).exists()
|
||||
assert matched.group(1) == filename
|
||||
|
||||
|
||||
@pytest.mark.sphinx('html', testroot='build-html-translator')
|
||||
def test_html_translator(app):
|
||||
app.build()
|
||||
|
@ -14,7 +14,9 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
import xml.etree.cElementTree as ElementTree
|
||||
from hashlib import md5
|
||||
|
||||
import pytest
|
||||
from html5lib import getTreeBuilder, HTMLParser
|
||||
@ -58,7 +60,7 @@ def cached_etree_parse():
|
||||
(".//img[@src='../_images/rimg.png']", ''),
|
||||
],
|
||||
'subdir/includes.html': [
|
||||
(".//a[@href='../_downloads/img.png']", ''),
|
||||
(".//a[@class='reference download internal']", ''),
|
||||
(".//img[@src='../_images/img.png']", ''),
|
||||
(".//p", 'This is an include file.'),
|
||||
(".//pre/span", 'line 1'),
|
||||
@ -66,8 +68,7 @@ def cached_etree_parse():
|
||||
],
|
||||
'includes.html': [
|
||||
(".//pre", u'Max Strauß'),
|
||||
(".//a[@href='_downloads/img.png']", ''),
|
||||
(".//a[@href='_downloads/img1.png']", ''),
|
||||
(".//a[@class='reference download internal']", ''),
|
||||
(".//pre/span", u'"quotes"'),
|
||||
(".//pre/span", u"'included'"),
|
||||
(".//pre/span[@class='s2']", u'üöä'),
|
||||
@ -323,17 +324,45 @@ def test_html5_output(app, cached_etree_parse, fname, expect):
|
||||
check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect)
|
||||
|
||||
|
||||
@pytest.mark.sphinx('html', tags=['testtag'], confoverrides={
|
||||
'html_context.hckey_co': 'hcval_co',
|
||||
'html_experimental_html5_writer': True})
|
||||
@pytest.mark.test_params(shared_result='test_build_html_output')
|
||||
def test_html_download(app):
|
||||
app.build()
|
||||
|
||||
# subdir/includes.html
|
||||
result = (app.outdir / 'subdir' / 'includes.html').text()
|
||||
pattern = ('<a class="reference download internal" download="" '
|
||||
'href="../(_downloads/.*/img.png)">')
|
||||
matched = re.search(pattern, result)
|
||||
assert matched
|
||||
assert (app.outdir / matched.group(1)).exists()
|
||||
filename = matched.group(1)
|
||||
|
||||
# includes.html
|
||||
result = (app.outdir / 'includes.html').text()
|
||||
pattern = ('<a class="reference download internal" download="" '
|
||||
'href="(_downloads/.*/img.png)">')
|
||||
matched = re.search(pattern, result)
|
||||
assert matched
|
||||
assert (app.outdir / matched.group(1)).exists()
|
||||
assert matched.group(1) == filename
|
||||
|
||||
|
||||
@pytest.mark.sphinx('html', testroot='roles-download',
|
||||
confoverrides={'html_experimental_html5_writer': True})
|
||||
def test_html_download_role(app, status, warning):
|
||||
app.build()
|
||||
assert (app.outdir / '_downloads' / 'dummy.dat').exists()
|
||||
digest = md5((app.srcdir / 'dummy.dat').encode('utf-8')).hexdigest()
|
||||
assert (app.outdir / '_downloads' / digest / 'dummy.dat').exists()
|
||||
|
||||
content = (app.outdir / 'index.html').text()
|
||||
assert ('<li><p><a class="reference download internal" download="" '
|
||||
'href="_downloads/dummy.dat">'
|
||||
'<code class="xref download docutils literal notranslate">'
|
||||
'<span class="pre">dummy.dat</span></code></a></p></li>' in content)
|
||||
assert (('<li><p><a class="reference download internal" download="" '
|
||||
'href="_downloads/%s/dummy.dat">'
|
||||
'<code class="xref download docutils literal notranslate">'
|
||||
'<span class="pre">dummy.dat</span></code></a></p></li>' % digest)
|
||||
in content)
|
||||
assert ('<li><p><code class="xref download docutils literal notranslate">'
|
||||
'<span class="pre">not_found.dat</span></code></p></li>' in content)
|
||||
assert ('<li><p><a class="reference download external" download="" '
|
||||
|
@ -50,6 +50,10 @@ def test_texinfo_warnings(app, status, warning):
|
||||
def test_texinfo(app, status, warning):
|
||||
TexinfoTranslator.ignore_missing_images = True
|
||||
app.builder.build_all()
|
||||
result = (app.outdir / 'SphinxTests.texi').text(encoding='utf8')
|
||||
assert ('@anchor{markup doc}@anchor{12}'
|
||||
'@anchor{markup id1}@anchor{13}'
|
||||
'@anchor{markup testing-various-markup}@anchor{14}' in result)
|
||||
# now, try to run makeinfo over it
|
||||
cwd = os.getcwd()
|
||||
os.chdir(app.outdir)
|
||||
|
Loading…
Reference in New Issue
Block a user