Merge branch '1.8'

This commit is contained in:
Takeshi KOMIYA
2018-09-09 19:26:48 +09:00
26 changed files with 299 additions and 69 deletions

13
CHANGES
View File

@@ -45,16 +45,23 @@ Incompatible changes
* #5282: html theme: refer ``pygments_style`` settings of HTML themes * #5282: html theme: refer ``pygments_style`` settings of HTML themes
preferentially preferentially
* The URL of download files are changed
* #5127: quickstart: ``Makefile`` and ``make.bat`` are not overwritten if exists
Deprecated Deprecated
---------- ----------
* ``sphinx.io.SphinxI18nReader.set_lineno_for_reporter()`` is deprecated * ``sphinx.io.SphinxI18nReader.set_lineno_for_reporter()`` is deprecated
* ``sphinx.io.SphinxI18nReader.line`` is deprecated * ``sphinx.io.SphinxI18nReader.line`` is deprecated
* ``sphinx.util.i18n.find_catalog_source_file()`` has changed; the
*gettext_compact* argument has been deprecated
Features added Features added
-------------- --------------
* #5388: Ensure frozen object descriptions are reproducible
* #5362: apidoc: Add ``--tocfile`` option to change the filename of ToC
Bugs fixed Bugs fixed
---------- ----------
@@ -67,6 +74,12 @@ Bugs fixed
* #5282: html theme: ``pygments_style`` of theme was overrided by ``conf.py`` * #5282: html theme: ``pygments_style`` of theme was overrided by ``conf.py``
by default by default
* #4379: toctree shows confusible warning when document is excluded * #4379: toctree shows confusible warning when document is excluded
* #2401: autodoc: ``:members:`` causes ``:special-members:`` not to be shown
* autodoc: ImportError is replaced by AttributeError for deeper module
* #2720, #4034: Incorrect links with ``:download:``, duplicate names, and
parallel builds
* #5290: autodoc: failed to analyze source code in egg package
* #5399: Sphinx crashes if unknown po file exists
Testing Testing
-------- --------

View File

@@ -136,6 +136,12 @@ The following is a list of deprecated interface.
- 4.0 - 4.0
- :confval:`autodoc_default_options` - :confval:`autodoc_default_options`
* - ``gettext_compact`` arguments of
``sphinx.util.i18n.find_catalog_source_files()``
- 1.8
- 3.0
- N/A
* - ``sphinx.io.SphinxI18nReader.set_lineno_for_reporter()`` * - ``sphinx.io.SphinxI18nReader.set_lineno_for_reporter()``
- 1.8 - 1.8
- 3.0 - 3.0

View File

@@ -58,6 +58,10 @@ Options
Maximum depth for the generated table of contents file. Maximum depth for the generated table of contents file.
.. option:: --tocfile
Filename for a table of contents file. Defaults to ``modules``.
.. option:: -T, --no-toc .. option:: -T, --no-toc
Do not create a table of contents file. Ignored when :option:`--full` is Do not create a table of contents file. Ignored when :option:`--full` is

View File

@@ -241,7 +241,6 @@ class Builder(object):
[path.join(self.srcdir, x) for x in self.config.locale_dirs], [path.join(self.srcdir, x) for x in self.config.locale_dirs],
self.config.language, self.config.language,
charset=self.config.source_encoding, charset=self.config.source_encoding,
gettext_compact=self.config.gettext_compact,
force_all=True, force_all=True,
excluded=Matcher(['**/.?**'])) excluded=Matcher(['**/.?**']))
message = __('all of %d po files') % len(catalogs) message = __('all of %d po files') % len(catalogs)
@@ -264,7 +263,6 @@ class Builder(object):
self.config.language, self.config.language,
domains=list(specified_domains), domains=list(specified_domains),
charset=self.config.source_encoding, charset=self.config.source_encoding,
gettext_compact=self.config.gettext_compact,
excluded=Matcher(['**/.?**'])) excluded=Matcher(['**/.?**']))
message = __('targets for %d po files that are specified') % len(catalogs) message = __('targets for %d po files that are specified') % len(catalogs)
self.compile_catalogs(catalogs, message) self.compile_catalogs(catalogs, message)
@@ -275,7 +273,6 @@ class Builder(object):
[path.join(self.srcdir, x) for x in self.config.locale_dirs], [path.join(self.srcdir, x) for x in self.config.locale_dirs],
self.config.language, self.config.language,
charset=self.config.source_encoding, charset=self.config.source_encoding,
gettext_compact=self.config.gettext_compact,
excluded=Matcher(['**/.?**'])) excluded=Matcher(['**/.?**']))
message = __('targets for %d po files that are out of date') % len(catalogs) message = __('targets for %d po files that are out of date') % len(catalogs)
self.compile_catalogs(catalogs, message) self.compile_catalogs(catalogs, message)

View File

@@ -817,10 +817,10 @@ class StandaloneHTMLBuilder(Builder):
for src in status_iterator(self.env.dlfiles, __('copying downloadable files... '), for src in status_iterator(self.env.dlfiles, __('copying downloadable files... '),
"brown", len(self.env.dlfiles), self.app.verbosity, "brown", len(self.env.dlfiles), self.app.verbosity,
stringify_func=to_relpath): stringify_func=to_relpath):
dest = self.env.dlfiles[src][1]
try: try:
copyfile(path.join(self.srcdir, src), dest = path.join(self.outdir, '_downloads', self.env.dlfiles[src][1])
path.join(self.outdir, '_downloads', dest)) ensuredir(path.dirname(dest))
copyfile(path.join(self.srcdir, src), dest)
except EnvironmentError as err: except EnvironmentError as err:
logger.warning(__('cannot copy downloadable file %r: %s'), logger.warning(__('cannot copy downloadable file %r: %s'),
path.join(self.srcdir, src), err) path.join(self.srcdir, src), err)

View File

@@ -678,7 +678,7 @@ def main(argv=sys.argv[1:]):
except ValueError: except ValueError:
print(__('Invalid template variable: %s') % variable) print(__('Invalid template variable: %s') % variable)
generate(d, templatedir=args.templatedir) generate(d, overwrite=False, templatedir=args.templatedir)
return 0 return 0

View File

@@ -25,7 +25,7 @@ from sphinx.environment.adapters.toctree import TocTree
from sphinx.errors import SphinxError, BuildEnvironmentError, DocumentError, ExtensionError from sphinx.errors import SphinxError, BuildEnvironmentError, DocumentError, ExtensionError
from sphinx.locale import __ from sphinx.locale import __
from sphinx.transforms import SphinxTransformer from sphinx.transforms import SphinxTransformer
from sphinx.util import get_matching_docs, FilenameUniqDict from sphinx.util import get_matching_docs, DownloadFiles, FilenameUniqDict
from sphinx.util import logging from sphinx.util import logging
from sphinx.util.docutils import LoggingReporter from sphinx.util.docutils import LoggingReporter
from sphinx.util.i18n import find_catalog_files from sphinx.util.i18n import find_catalog_files
@@ -184,7 +184,8 @@ class BuildEnvironment(object):
# these map absolute path -> (docnames, unique filename) # these map absolute path -> (docnames, unique filename)
self.images = FilenameUniqDict() # type: FilenameUniqDict self.images = FilenameUniqDict() # type: FilenameUniqDict
self.dlfiles = FilenameUniqDict() # type: FilenameUniqDict self.dlfiles = DownloadFiles() # type: DownloadFiles
# filename -> (set of docnames, destination)
# the original URI for images # the original URI for images
self.original_image_uri = {} # type: Dict[unicode, unicode] self.original_image_uri = {} # type: Dict[unicode, unicode]

View File

@@ -340,7 +340,9 @@ Note: By default this script will not overwrite already created files."""))
parser.add_argument('-P', '--private', action='store_true', parser.add_argument('-P', '--private', action='store_true',
dest='includeprivate', dest='includeprivate',
help=__('include "_private" modules')) help=__('include "_private" modules'))
parser.add_argument('-T', '--no-toc', action='store_true', dest='notoc', parser.add_argument('--tocfile', action='store', dest='tocfile', default='modules',
help=__("don't create a table of contents file"))
parser.add_argument('-T', '--no-toc', action='store_false', dest='tocfile',
help=__("don't create a table of contents file")) help=__("don't create a table of contents file"))
parser.add_argument('-E', '--no-headings', action='store_true', parser.add_argument('-E', '--no-headings', action='store_true',
dest='noheadings', dest='noheadings',
@@ -453,8 +455,8 @@ def main(argv=sys.argv[1:]):
if not args.dryrun: if not args.dryrun:
qs.generate(d, silent=True, overwrite=args.force) qs.generate(d, silent=True, overwrite=args.force)
elif not args.notoc: elif args.tocfile:
create_modules_toc_file(modules, args) create_modules_toc_file(modules, args, args.tocfile)
return 0 return 0

View File

@@ -106,6 +106,20 @@ def bool_option(arg):
return True return True
def merge_special_members_option(options):
# type: (Dict) -> None
"""Merge :special-members: option to :members: option."""
if 'special-members' in options and options['special-members'] is not ALL:
if options.get('members') is ALL:
pass
elif options.get('members'):
for member in options['special-members']:
if member not in options['members']:
options['members'].append(member)
else:
options['members'] = options['special-members']
# Some useful event listener factories for autodoc-process-docstring. # Some useful event listener factories for autodoc-process-docstring.
def cut_lines(pre, post=0, what=None): def cut_lines(pre, post=0, what=None):
@@ -768,6 +782,11 @@ class ModuleDocumenter(Documenter):
'imported-members': bool_option, 'ignore-module-all': bool_option 'imported-members': bool_option, 'ignore-module-all': bool_option
} # type: Dict[unicode, Callable] } # type: Dict[unicode, Callable]
def __init__(self, *args):
# type: (Any) -> None
super(ModuleDocumenter, self).__init__(*args)
merge_special_members_option(self.options)
@classmethod @classmethod
def can_document_member(cls, member, membername, isattr, parent): def can_document_member(cls, member, membername, isattr, parent):
# type: (Any, unicode, bool, Any) -> bool # type: (Any, unicode, bool, Any) -> bool
@@ -1026,6 +1045,11 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
'private-members': bool_option, 'special-members': members_option, 'private-members': bool_option, 'special-members': members_option,
} # type: Dict[unicode, Callable] } # type: Dict[unicode, Callable]
def __init__(self, *args):
# type: (Any) -> None
super(ClassDocumenter, self).__init__(*args)
merge_special_members_option(self.options)
@classmethod @classmethod
def can_document_member(cls, member, membername, isattr, parent): def can_document_member(cls, member, membername, isattr, parent):
# type: (Any, unicode, bool, Any) -> bool # type: (Any, unicode, bool, Any) -> bool

View File

@@ -168,13 +168,15 @@ def import_object(modname, objpath, objtype='', attrgetter=safe_getattr, warning
try: try:
module = None module = None
exc_on_importing = None
objpath = list(objpath) objpath = list(objpath)
while module is None: while module is None:
try: try:
module = import_module(modname, warningiserror=warningiserror) module = import_module(modname, warningiserror=warningiserror)
logger.debug('[autodoc] import %s => %r', modname, module) logger.debug('[autodoc] import %s => %r', modname, module)
except ImportError: except ImportError as exc:
logger.debug('[autodoc] import %s => failed', modname) logger.debug('[autodoc] import %s => failed', modname)
exc_on_importing = exc
if '.' in modname: if '.' in modname:
# retry with parent module # retry with parent module
modname, name = modname.rsplit('.', 1) modname, name = modname.rsplit('.', 1)
@@ -193,6 +195,10 @@ def import_object(modname, objpath, objtype='', attrgetter=safe_getattr, warning
object_name = attrname object_name = attrname
return [module, parent, object_name, obj] return [module, parent, object_name, obj]
except (AttributeError, ImportError) as exc: except (AttributeError, ImportError) as exc:
if isinstance(exc, AttributeError) and exc_on_importing:
# restore ImportError
exc = exc_on_importing
if objpath: if objpath:
errmsg = ('autodoc: failed to import %s %r from module %r' % errmsg = ('autodoc: failed to import %s %r from module %r' %
(objtype, '.'.join(objpath), modname)) (objtype, '.'.join(objpath), modname))

View File

@@ -43,11 +43,12 @@ from sphinx.util.rst import escape as rst_escape
if False: if False:
# For type annotation # For type annotation
from typing import Any, Callable, Dict, Tuple, List # NOQA from typing import Any, Callable, Dict, List, Tuple, Type # NOQA
from jinja2 import BaseLoader # NOQA from jinja2 import BaseLoader # NOQA
from sphinx import addnodes # NOQA from sphinx import addnodes # NOQA
from sphinx.builders import Builder # NOQA from sphinx.builders import Builder # NOQA
from sphinx.environment import BuildEnvironment # NOQA from sphinx.environment import BuildEnvironment # NOQA
from sphinx.ext.autodoc import Documenter # NOQA
class DummyApplication(object): class DummyApplication(object):
@@ -69,7 +70,7 @@ def setup_documenters(app):
ModuleDocumenter, ClassDocumenter, ExceptionDocumenter, DataDocumenter, ModuleDocumenter, ClassDocumenter, ExceptionDocumenter, DataDocumenter,
FunctionDocumenter, MethodDocumenter, AttributeDocumenter, FunctionDocumenter, MethodDocumenter, AttributeDocumenter,
InstanceAttributeDocumenter InstanceAttributeDocumenter
] ] # type: List[Type[Documenter]]
for documenter in documenters: for documenter in documenters:
app.registry.add_documenter(documenter.objtype, documenter) app.registry.add_documenter(documenter.objtype, documenter)

View File

@@ -10,6 +10,9 @@
""" """
from __future__ import print_function from __future__ import print_function
import re
from zipfile import ZipFile
from six import iteritems, BytesIO, StringIO from six import iteritems, BytesIO, StringIO
from sphinx.errors import PycodeError from sphinx.errors import PycodeError
@@ -42,9 +45,23 @@ class ModuleAnalyzer(object):
obj = cls(f, modname, filename) # type: ignore obj = cls(f, modname, filename) # type: ignore
cls.cache['file', filename] = obj cls.cache['file', filename] = obj
except Exception as err: except Exception as err:
raise PycodeError('error opening %r' % filename, err) if '.egg/' in filename:
obj = cls.cache['file', filename] = cls.for_egg(filename, modname)
else:
raise PycodeError('error opening %r' % filename, err)
return obj return obj
@classmethod
def for_egg(cls, filename, modname):
# type: (unicode, unicode) -> ModuleAnalyzer
eggpath, relpath = re.split('(?<=\\.egg)/', filename)
try:
with ZipFile(eggpath) as egg:
code = egg.read(relpath).decode('utf-8')
return cls.for_string(code, modname, filename)
except Exception as exc:
raise PycodeError('error opening %r' % filename, exc)
@classmethod @classmethod
def for_module(cls, modname): def for_module(cls, modname):
# type: (str) -> ModuleAnalyzer # type: (str) -> ModuleAnalyzer

View File

@@ -22,6 +22,7 @@ import warnings
from codecs import BOM_UTF8 from codecs import BOM_UTF8
from collections import deque from collections import deque
from datetime import datetime from datetime import datetime
from hashlib import md5
from os import path from os import path
from time import mktime, strptime from time import mktime, strptime
@@ -167,6 +168,37 @@ class FilenameUniqDict(dict):
self._existing = state self._existing = state
class DownloadFiles(dict):
"""A special dictionary for download files.
.. important:: This class would be refactored in nearly future.
Hence don't hack this directly.
"""
def add_file(self, docname, filename):
# type: (unicode, unicode) -> None
if filename not in self:
digest = md5(filename.encode('utf-8')).hexdigest()
dest = '%s/%s' % (digest, os.path.basename(filename))
self[filename] = (set(), dest)
self[filename][0].add(docname)
return self[filename][1]
def purge_doc(self, docname):
# type: (unicode) -> None
for filename, (docs, dest) in list(self.items()):
docs.discard(docname)
if not docs:
del self[filename]
def merge_other(self, docnames, other):
# type: (Set[unicode], Dict[unicode, Tuple[Set[unicode], Any]]) -> None
for filename, (docs, dest) in other.items():
for docname in docs & set(docnames):
self.add_file(docname, filename)
def copy_static_entry(source, targetdir, builder, context={}, def copy_static_entry(source, targetdir, builder, context={},
exclude_matchers=(), level=0): exclude_matchers=(), level=0):
# type: (unicode, unicode, Any, Dict, Tuple[Callable, ...], int) -> None # type: (unicode, unicode, Any, Dict, Tuple[Callable, ...], int) -> None
@@ -282,6 +314,11 @@ def get_module_source(modname):
filename += 'w' filename += 'w'
elif not (lfilename.endswith('.py') or lfilename.endswith('.pyw')): elif not (lfilename.endswith('.py') or lfilename.endswith('.pyw')):
raise PycodeError('source is not a .py file: %r' % filename) raise PycodeError('source is not a .py file: %r' % filename)
elif '.egg' in filename:
eggpath, _ = re.split('(?<=\\.egg)/', filename)
if path.isfile(eggpath):
return 'file', filename
if not path.isfile(filename): if not path.isfile(filename):
raise PycodeError('source file is not present: %r' % filename) raise PycodeError('source file is not present: %r' % filename)
return 'file', filename return 'file', filename

View File

@@ -12,6 +12,7 @@ import gettext
import io import io
import os import os
import re import re
import warnings
from collections import namedtuple from collections import namedtuple
from datetime import datetime from datetime import datetime
from os import path from os import path
@@ -20,6 +21,7 @@ import babel.dates
from babel.messages.mofile import write_mo from babel.messages.mofile import write_mo
from babel.messages.pofile import read_po from babel.messages.pofile import read_po
from sphinx.deprecation import RemovedInSphinx30Warning
from sphinx.errors import SphinxError from sphinx.errors import SphinxError
from sphinx.locale import __ from sphinx.locale import __
from sphinx.util import logging from sphinx.util import logging
@@ -103,7 +105,7 @@ def find_catalog_files(docname, srcdir, locale_dirs, lang, compaction):
return files # type: ignore return files # type: ignore
def find_catalog_source_files(locale_dirs, locale, domains=None, gettext_compact=False, def find_catalog_source_files(locale_dirs, locale, domains=None, gettext_compact=None,
charset='utf-8', force_all=False, charset='utf-8', force_all=False,
excluded=Matcher([])): excluded=Matcher([])):
# type: (List[unicode], unicode, List[unicode], bool, unicode, bool, Matcher) -> Set[CatalogInfo] # NOQA # type: (List[unicode], unicode, List[unicode], bool, unicode, bool, Matcher) -> Set[CatalogInfo] # NOQA
@@ -115,14 +117,15 @@ def find_catalog_source_files(locale_dirs, locale, domains=None, gettext_compact
:param str locale: a language as `'en'` :param str locale: a language as `'en'`
:param list domains: list of domain names to get. If empty list or None :param list domains: list of domain names to get. If empty list or None
is specified, get all domain names. default is None. is specified, get all domain names. default is None.
:param boolean gettext_compact:
* False: keep domains directory structure (default).
* True: domains in the sub directory will be merged into 1 file.
:param boolean force_all: :param boolean force_all:
Set True if you want to get all catalogs rather than updated catalogs. Set True if you want to get all catalogs rather than updated catalogs.
default is False. default is False.
:return: [CatalogInfo(), ...] :return: [CatalogInfo(), ...]
""" """
if gettext_compact is not None:
warnings.warn('gettext_compact argument for find_catalog_source_files() '
'is deprecated.', RemovedInSphinx30Warning)
catalogs = set() # type: Set[CatalogInfo] catalogs = set() # type: Set[CatalogInfo]
if not locale: if not locale:
@@ -143,10 +146,7 @@ def find_catalog_source_files(locale_dirs, locale, domains=None, gettext_compact
if excluded(path.join(relpath(dirpath, base_dir), filename)): if excluded(path.join(relpath(dirpath, base_dir), filename)):
continue continue
base = path.splitext(filename)[0] base = path.splitext(filename)[0]
domain = relpath(path.join(dirpath, base), base_dir) domain = relpath(path.join(dirpath, base), base_dir).replace(path.sep, SEP)
if gettext_compact and path.sep in domain:
domain = path.split(domain)[0]
domain = domain.replace(path.sep, SEP)
if domains and domain not in domains: if domains and domain not in domains:
continue continue
cat = CatalogInfo(base_dir, domain, charset) cat = CatalogInfo(base_dir, domain, charset)

View File

@@ -18,7 +18,7 @@ import six
from docutils.statemachine import ViewList from docutils.statemachine import ViewList
from six import StringIO from six import StringIO
from sphinx.ext.autodoc import FunctionDocumenter, ALL from sphinx.ext.autodoc import FunctionDocumenter, ALL, Options
from sphinx.testing.util import SphinxTestApp, Struct from sphinx.testing.util import SphinxTestApp, Struct
from sphinx.util import logging from sphinx.util import logging
from sphinx.util import save_traceback # NOQA from sphinx.util import save_traceback # NOQA
@@ -50,7 +50,7 @@ def setup_test():
global options, directive global options, directive
global processed_docstrings, processed_signatures global processed_docstrings, processed_signatures
options = Struct( options = Options(
inherited_members = False, inherited_members = False,
undoc_members = False, undoc_members = False,
private_members = False, private_members = False,

View File

@@ -0,0 +1,8 @@
# -*- coding: utf-8 -*-
import os
import sys
sys.path.insert(0, os.path.abspath('sample-0.0.0-py3.7.egg'))
master_doc = 'index'
extensions = ['sphinx.ext.autodoc']

View File

@@ -0,0 +1,2 @@
test-pycode-egg
===============

Binary file not shown.

View File

@@ -0,0 +1,8 @@
# -*- coding: utf-8 -*-
#: constant on sample.py
CONSTANT = 1
def hello(s):
print('Hello %s' % s)

View File

@@ -0,0 +1,6 @@
# -*- coding: utf-8 -*-
from setuptools import setup
setup(name='sample',
py_modules=['sample'])

View File

@@ -21,7 +21,7 @@ from six import PY3
from sphinx.ext.autodoc import ( from sphinx.ext.autodoc import (
ModuleLevelDocumenter, cut_lines, between, ALL, ModuleLevelDocumenter, cut_lines, between, ALL,
merge_autodoc_default_flags merge_autodoc_default_flags, Options
) )
from sphinx.ext.autodoc.directive import DocumenterBridge, process_documenter_options from sphinx.ext.autodoc.directive import DocumenterBridge, process_documenter_options
from sphinx.testing.util import SphinxTestApp, Struct # NOQA from sphinx.testing.util import SphinxTestApp, Struct # NOQA
@@ -79,7 +79,7 @@ def setup_test():
global options, directive global options, directive
global processed_docstrings, processed_signatures global processed_docstrings, processed_signatures
options = Struct( options = Options(
inherited_members = False, inherited_members = False,
undoc_members = False, undoc_members = False,
private_members = False, private_members = False,
@@ -757,6 +757,29 @@ def test_autodoc_imported_members(app):
@pytest.mark.sphinx('html', testroot='ext-autodoc') @pytest.mark.sphinx('html', testroot='ext-autodoc')
def test_autodoc_special_members(app): def test_autodoc_special_members(app):
# specific special methods
options = {"undoc-members": None,
"special-members": "__init__,__special1__"}
actual = do_autodoc(app, 'class', 'target.Class', options)
assert list(filter(lambda l: '::' in l, actual)) == [
'.. py:class:: Class(arg)',
' .. py:method:: Class.__init__(arg)',
' .. py:method:: Class.__special1__()',
]
# combination with specific members
options = {"members": "attr,docattr",
"undoc-members": None,
"special-members": "__init__,__special1__"}
actual = do_autodoc(app, 'class', 'target.Class', options)
assert list(filter(lambda l: '::' in l, actual)) == [
'.. py:class:: Class(arg)',
' .. py:method:: Class.__init__(arg)',
' .. py:method:: Class.__special1__()',
' .. py:attribute:: Class.attr',
' .. py:attribute:: Class.docattr',
]
# all special methods # all special methods
options = {"members": None, options = {"members": None,
"undoc-members": None, "undoc-members": None,
@@ -786,33 +809,6 @@ def test_autodoc_special_members(app):
' .. py:method:: Class.undocmeth()' ' .. py:method:: Class.undocmeth()'
] ]
# specific special methods
options = {"members": None,
"undoc-members": None,
"special-members": "__init__,__special1__"}
actual = do_autodoc(app, 'class', 'target.Class', options)
assert list(filter(lambda l: '::' in l, actual)) == [
'.. py:class:: Class(arg)',
' .. py:method:: Class.__init__(arg)',
' .. py:method:: Class.__special1__()',
' .. py:attribute:: Class.attr',
' .. py:attribute:: Class.descr',
' .. py:attribute:: Class.docattr',
' .. py:method:: Class.excludemeth()',
' .. py:attribute:: Class.inst_attr_comment',
' .. py:attribute:: Class.inst_attr_inline',
' .. py:attribute:: Class.inst_attr_string',
' .. py:attribute:: Class.mdocattr',
' .. py:method:: Class.meth()',
' .. py:classmethod:: Class.moore(a, e, f) -> happiness',
' .. py:attribute:: Class.prop',
ROGER_METHOD,
' .. py:attribute:: Class.skipattr',
' .. py:method:: Class.skipmeth()',
' .. py:attribute:: Class.udocattr',
' .. py:method:: Class.undocmeth()'
]
@pytest.mark.sphinx('html', testroot='ext-autodoc') @pytest.mark.sphinx('html', testroot='ext-autodoc')
def test_autodoc_ignore_module_all(app): def test_autodoc_ignore_module_all(app):
@@ -1551,9 +1547,7 @@ def test_autodoc_default_options_with_values(app):
assert ' .. py:attribute:: EnumCls.val4' not in actual assert ' .. py:attribute:: EnumCls.val4' not in actual
# with :special-members: # with :special-members:
# Note that :members: must be *on* for :special-members: to work.
app.config.autodoc_default_options = { app.config.autodoc_default_options = {
'members': None,
'special-members': '__init__,__iter__', 'special-members': '__init__,__iter__',
} }
actual = do_autodoc(app, 'class', 'target.CustomIter') actual = do_autodoc(app, 'class', 'target.CustomIter')
@@ -1590,3 +1584,26 @@ def test_autodoc_default_options_with_values(app):
assert ' list of weak references to the object (if defined)' not in actual assert ' list of weak references to the object (if defined)' not in actual
assert ' .. py:method:: CustomIter.snafucate()' not in actual assert ' .. py:method:: CustomIter.snafucate()' not in actual
assert ' Makes this snafucated.' not in actual assert ' Makes this snafucated.' not in actual
@pytest.mark.sphinx('html', testroot='pycode-egg')
def test_autodoc_for_egged_code(app):
options = {"members": None,
"undoc-members": None}
actual = do_autodoc(app, 'module', 'sample', options)
assert list(actual) == [
'',
'.. py:module:: sample',
'',
'',
'.. py:data:: CONSTANT',
' :module: sample',
' :annotation: = 1',
'',
' constant on sample.py',
' ',
'',
'.. py:function:: hello(s)',
' :module: sample',
''
]

View File

@@ -370,6 +370,8 @@ def test_html_download_role(app, status, warning):
'/_static/sphinxheader.png]</span></p></li>' in content) '/_static/sphinxheader.png]</span></p></li>' in content)
@pytest.mark.skipif('DO_EPUBCHECK' not in os.environ,
reason='Skipped because DO_EPUBCHECK is not set')
@pytest.mark.sphinx('epub') @pytest.mark.sphinx('epub')
def test_run_epubcheck(app): def test_run_epubcheck(app):
app.build() app.build()

View File

@@ -151,7 +151,7 @@ def test_html_warnings(app, warning):
(".//img[@src='../_images/rimg.png']", ''), (".//img[@src='../_images/rimg.png']", ''),
], ],
'subdir/includes.html': [ 'subdir/includes.html': [
(".//a[@href='../_downloads/img.png']", ''), (".//a[@class='reference download internal']", ''),
(".//img[@src='../_images/img.png']", ''), (".//img[@src='../_images/img.png']", ''),
(".//p", 'This is an include file.'), (".//p", 'This is an include file.'),
(".//pre/span", 'line 1'), (".//pre/span", 'line 1'),
@@ -159,8 +159,7 @@ def test_html_warnings(app, warning):
], ],
'includes.html': [ 'includes.html': [
(".//pre", u'Max Strauß'), (".//pre", u'Max Strauß'),
(".//a[@href='_downloads/img.png']", ''), (".//a[@class='reference download internal']", ''),
(".//a[@href='_downloads/img1.png']", ''),
(".//pre/span", u'"quotes"'), (".//pre/span", u'"quotes"'),
(".//pre/span", u"'included'"), (".//pre/span", u"'included'"),
(".//pre/span[@class='s2']", u'üöä'), (".//pre/span[@class='s2']", u'üöä'),
@@ -421,6 +420,31 @@ def test_html_output(app, cached_etree_parse, fname, expect):
check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect) check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect)
@pytest.mark.sphinx('html', tags=['testtag'], confoverrides={
'html_context.hckey_co': 'hcval_co'})
@pytest.mark.test_params(shared_result='test_build_html_output')
def test_html_download(app):
app.build()
# subdir/includes.html
result = (app.outdir / 'subdir' / 'includes.html').text()
pattern = ('<a class="reference download internal" download="" '
'href="../(_downloads/.*/img.png)">')
matched = re.search(pattern, result)
assert matched
assert (app.outdir / matched.group(1)).exists()
filename = matched.group(1)
# includes.html
result = (app.outdir / 'includes.html').text()
pattern = ('<a class="reference download internal" download="" '
'href="(_downloads/.*/img.png)">')
matched = re.search(pattern, result)
assert matched
assert (app.outdir / matched.group(1)).exists()
assert matched.group(1) == filename
@pytest.mark.sphinx('html', testroot='build-html-translator') @pytest.mark.sphinx('html', testroot='build-html-translator')
def test_html_translator(app): def test_html_translator(app):
app.build() app.build()

View File

@@ -14,7 +14,9 @@
:license: BSD, see LICENSE for details. :license: BSD, see LICENSE for details.
""" """
import re
import xml.etree.cElementTree as ElementTree import xml.etree.cElementTree as ElementTree
from hashlib import md5
import pytest import pytest
from html5lib import getTreeBuilder, HTMLParser from html5lib import getTreeBuilder, HTMLParser
@@ -58,7 +60,7 @@ def cached_etree_parse():
(".//img[@src='../_images/rimg.png']", ''), (".//img[@src='../_images/rimg.png']", ''),
], ],
'subdir/includes.html': [ 'subdir/includes.html': [
(".//a[@href='../_downloads/img.png']", ''), (".//a[@class='reference download internal']", ''),
(".//img[@src='../_images/img.png']", ''), (".//img[@src='../_images/img.png']", ''),
(".//p", 'This is an include file.'), (".//p", 'This is an include file.'),
(".//pre/span", 'line 1'), (".//pre/span", 'line 1'),
@@ -66,8 +68,7 @@ def cached_etree_parse():
], ],
'includes.html': [ 'includes.html': [
(".//pre", u'Max Strauß'), (".//pre", u'Max Strauß'),
(".//a[@href='_downloads/img.png']", ''), (".//a[@class='reference download internal']", ''),
(".//a[@href='_downloads/img1.png']", ''),
(".//pre/span", u'"quotes"'), (".//pre/span", u'"quotes"'),
(".//pre/span", u"'included'"), (".//pre/span", u"'included'"),
(".//pre/span[@class='s2']", u'üöä'), (".//pre/span[@class='s2']", u'üöä'),
@@ -323,17 +324,45 @@ def test_html5_output(app, cached_etree_parse, fname, expect):
check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect) check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect)
@pytest.mark.sphinx('html', tags=['testtag'], confoverrides={
'html_context.hckey_co': 'hcval_co',
'html_experimental_html5_writer': True})
@pytest.mark.test_params(shared_result='test_build_html_output')
def test_html_download(app):
app.build()
# subdir/includes.html
result = (app.outdir / 'subdir' / 'includes.html').text()
pattern = ('<a class="reference download internal" download="" '
'href="../(_downloads/.*/img.png)">')
matched = re.search(pattern, result)
assert matched
assert (app.outdir / matched.group(1)).exists()
filename = matched.group(1)
# includes.html
result = (app.outdir / 'includes.html').text()
pattern = ('<a class="reference download internal" download="" '
'href="(_downloads/.*/img.png)">')
matched = re.search(pattern, result)
assert matched
assert (app.outdir / matched.group(1)).exists()
assert matched.group(1) == filename
@pytest.mark.sphinx('html', testroot='roles-download', @pytest.mark.sphinx('html', testroot='roles-download',
confoverrides={'html_experimental_html5_writer': True}) confoverrides={'html_experimental_html5_writer': True})
def test_html_download_role(app, status, warning): def test_html_download_role(app, status, warning):
app.build() app.build()
assert (app.outdir / '_downloads' / 'dummy.dat').exists() digest = md5((app.srcdir / 'dummy.dat').encode('utf-8')).hexdigest()
assert (app.outdir / '_downloads' / digest / 'dummy.dat').exists()
content = (app.outdir / 'index.html').text() content = (app.outdir / 'index.html').text()
assert ('<li><p><a class="reference download internal" download="" ' assert (('<li><p><a class="reference download internal" download="" '
'href="_downloads/dummy.dat">' 'href="_downloads/%s/dummy.dat">'
'<code class="xref download docutils literal notranslate">' '<code class="xref download docutils literal notranslate">'
'<span class="pre">dummy.dat</span></code></a></p></li>' in content) '<span class="pre">dummy.dat</span></code></a></p></li>' % digest)
in content)
assert ('<li><p><code class="xref download docutils literal notranslate">' assert ('<li><p><code class="xref download docutils literal notranslate">'
'<span class="pre">not_found.dat</span></code></p></li>' in content) '<span class="pre">not_found.dat</span></code></p></li>' in content)
assert ('<li><p><a class="reference download external" download="" ' assert ('<li><p><a class="reference download external" download="" '

View File

@@ -10,6 +10,7 @@
""" """
import os import os
import sys
from six import PY2 from six import PY2
@@ -47,6 +48,31 @@ def test_ModuleAnalyzer_for_module():
assert analyzer.encoding == 'utf-8' assert analyzer.encoding == 'utf-8'
def test_ModuleAnalyzer_for_file_in_egg(rootdir):
try:
path = rootdir / 'test-pycode-egg' / 'sample-0.0.0-py3.7.egg'
sys.path.insert(0, path)
import sample
analyzer = ModuleAnalyzer.for_file(sample.__file__, 'sample')
docs = analyzer.find_attr_docs()
assert docs == {('', 'CONSTANT'): ['constant on sample.py', '']}
finally:
sys.path.pop(0)
def test_ModuleAnalyzer_for_module_in_egg(rootdir):
try:
path = rootdir / 'test-pycode-egg' / 'sample-0.0.0-py3.7.egg'
sys.path.insert(0, path)
analyzer = ModuleAnalyzer.for_module('sample')
docs = analyzer.find_attr_docs()
assert docs == {('', 'CONSTANT'): ['constant on sample.py', '']}
finally:
sys.path.pop(0)
def test_ModuleAnalyzer_find_tags(): def test_ModuleAnalyzer_find_tags():
code = ('class Foo(object):\n' # line: 1 code = ('class Foo(object):\n' # line: 1
' """class Foo!"""\n' ' """class Foo!"""\n'

View File

@@ -154,7 +154,7 @@ def test_get_catalogs_with_compact(tempdir):
catalogs = i18n.find_catalog_source_files([tempdir / 'loc1'], 'xx', gettext_compact=True) catalogs = i18n.find_catalog_source_files([tempdir / 'loc1'], 'xx', gettext_compact=True)
domains = set(c.domain for c in catalogs) domains = set(c.domain for c in catalogs)
assert domains == set(['test1', 'test2', 'sub']) assert domains == set(['test1', 'test2', 'sub/test3', 'sub/test4'])
def test_get_catalogs_excluded(tempdir): def test_get_catalogs_excluded(tempdir):