mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge branch '2.0'
This commit is contained in:
commit
75477633b6
5
CHANGES
5
CHANGES
@ -54,6 +54,8 @@ Features added
|
||||
:confval:`html_extra_path` directories are inside output directory
|
||||
* #6514: html: Add a label to search input for accessability purposes
|
||||
* #5602: apidoc: Add ``--templatedir`` option
|
||||
* #6475: Add ``override`` argument to ``app.add_autodocumenter()``
|
||||
* #6533: LaTeX: refactor visit_enumerated_list() to use ``\sphinxsetlistlabels``
|
||||
|
||||
Bugs fixed
|
||||
----------
|
||||
@ -67,6 +69,9 @@ Bugs fixed
|
||||
* #6486: UnboundLocalError is raised if broken extension installed
|
||||
* #6498: autosummary: crashed with wrong autosummary_generate setting
|
||||
* #6507: autosummary: crashes without no autosummary_generate setting
|
||||
* #6511: LaTeX: autonumbered list can not be customized in LaTeX
|
||||
since Sphinx 1.8.0 (refs: #6533)
|
||||
* #6531: Failed to load last environment object when extension added
|
||||
|
||||
Testing
|
||||
--------
|
||||
|
@ -990,8 +990,8 @@ class Sphinx:
|
||||
else:
|
||||
lexer_classes[alias] = lexer
|
||||
|
||||
def add_autodocumenter(self, cls):
|
||||
# type: (Any) -> None
|
||||
def add_autodocumenter(self, cls, override=False):
|
||||
# type: (Any, bool) -> None
|
||||
"""Register a new documenter class for the autodoc extension.
|
||||
|
||||
Add *cls* as a new documenter class for the :mod:`sphinx.ext.autodoc`
|
||||
@ -1003,11 +1003,13 @@ class Sphinx:
|
||||
.. todo:: Add real docs for Documenter and subclassing
|
||||
|
||||
.. versionadded:: 0.6
|
||||
.. versionchanged:: 2.2
|
||||
Add *override* keyword.
|
||||
"""
|
||||
logger.debug('[app] adding autodocumenter: %r', cls)
|
||||
from sphinx.ext.autodoc.directive import AutodocDirective
|
||||
self.registry.add_documenter(cls.objtype, cls)
|
||||
self.add_directive('auto' + cls.objtype, AutodocDirective)
|
||||
self.add_directive('auto' + cls.objtype, AutodocDirective, override=override)
|
||||
|
||||
def add_autodoc_attrgetter(self, typ, getter):
|
||||
# type: (Type, Callable[[Any, str, Any], Any]) -> None
|
||||
|
@ -38,8 +38,7 @@ from sphinx.highlighting import PygmentsBridge
|
||||
from sphinx.locale import _, __
|
||||
from sphinx.search import js_index
|
||||
from sphinx.theming import HTMLThemeFactory
|
||||
from sphinx.util import logging, status_iterator
|
||||
from sphinx.util.console import bold # type: ignore
|
||||
from sphinx.util import logging, progress_message, status_iterator
|
||||
from sphinx.util.docutils import is_html5_writer_available, new_document
|
||||
from sphinx.util.fileutil import copy_asset
|
||||
from sphinx.util.i18n import format_date
|
||||
@ -597,6 +596,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
|
||||
def finish(self) -> None:
|
||||
self.finish_tasks.add_task(self.gen_indices)
|
||||
self.finish_tasks.add_task(self.gen_pages_from_extensions)
|
||||
self.finish_tasks.add_task(self.gen_additional_pages)
|
||||
self.finish_tasks.add_task(self.copy_image_files)
|
||||
self.finish_tasks.add_task(self.copy_download_files)
|
||||
@ -607,9 +607,8 @@ class StandaloneHTMLBuilder(Builder):
|
||||
# dump the search index
|
||||
self.handle_finish()
|
||||
|
||||
@progress_message(__('generating indices'))
|
||||
def gen_indices(self) -> None:
|
||||
logger.info(bold(__('generating indices...')), nonl=True)
|
||||
|
||||
# the global general index
|
||||
if self.use_index:
|
||||
self.write_genindex()
|
||||
@ -617,16 +616,14 @@ class StandaloneHTMLBuilder(Builder):
|
||||
# the global domain-specific indices
|
||||
self.write_domain_indices()
|
||||
|
||||
logger.info('')
|
||||
|
||||
def gen_additional_pages(self) -> None:
|
||||
def gen_pages_from_extensions(self) -> None:
|
||||
# pages from extensions
|
||||
for pagelist in self.events.emit('html-collect-pages'):
|
||||
for pagename, context, template in pagelist:
|
||||
self.handle_page(pagename, context, template)
|
||||
|
||||
logger.info(bold(__('writing additional pages...')), nonl=True)
|
||||
|
||||
@progress_message(__('writing additional pages'))
|
||||
def gen_additional_pages(self) -> None:
|
||||
# additional pages from conf.py
|
||||
for pagename, template in self.config.html_additional_pages.items():
|
||||
logger.info(' ' + pagename, nonl=True)
|
||||
@ -643,8 +640,6 @@ class StandaloneHTMLBuilder(Builder):
|
||||
fn = path.join(self.outdir, '_static', 'opensearch.xml')
|
||||
self.handle_page('opensearch', {}, 'opensearch.xml', outfilename=fn)
|
||||
|
||||
logger.info('')
|
||||
|
||||
def write_genindex(self) -> None:
|
||||
# the total count of lines for each index letter, used to distribute
|
||||
# the entries into two columns
|
||||
@ -720,63 +715,60 @@ class StandaloneHTMLBuilder(Builder):
|
||||
def copy_static_files(self) -> None:
|
||||
try:
|
||||
# copy static files
|
||||
logger.info(bold(__('copying static files... ')), nonl=True)
|
||||
ensuredir(path.join(self.outdir, '_static'))
|
||||
# first, create pygments style file
|
||||
with open(path.join(self.outdir, '_static', 'pygments.css'), 'w') as f:
|
||||
f.write(self.highlighter.get_stylesheet())
|
||||
# then, copy translations JavaScript file
|
||||
if self.config.language is not None:
|
||||
jsfile = self._get_translations_js()
|
||||
if jsfile:
|
||||
copyfile(jsfile, path.join(self.outdir, '_static',
|
||||
'translations.js'))
|
||||
with progress_message(__('copying static files... ')):
|
||||
ensuredir(path.join(self.outdir, '_static'))
|
||||
# first, create pygments style file
|
||||
with open(path.join(self.outdir, '_static', 'pygments.css'), 'w') as f:
|
||||
f.write(self.highlighter.get_stylesheet())
|
||||
# then, copy translations JavaScript file
|
||||
if self.config.language is not None:
|
||||
jsfile = self._get_translations_js()
|
||||
if jsfile:
|
||||
copyfile(jsfile, path.join(self.outdir, '_static',
|
||||
'translations.js'))
|
||||
|
||||
# copy non-minified stemmer JavaScript file
|
||||
if self.indexer is not None:
|
||||
jsfile = self.indexer.get_js_stemmer_rawcode()
|
||||
if jsfile:
|
||||
copyfile(jsfile, path.join(self.outdir, '_static', '_stemmer.js'))
|
||||
# copy non-minified stemmer JavaScript file
|
||||
if self.indexer is not None:
|
||||
jsfile = self.indexer.get_js_stemmer_rawcode()
|
||||
if jsfile:
|
||||
copyfile(jsfile, path.join(self.outdir, '_static', '_stemmer.js'))
|
||||
|
||||
ctx = self.globalcontext.copy()
|
||||
ctx = self.globalcontext.copy()
|
||||
|
||||
# add context items for search function used in searchtools.js_t
|
||||
if self.indexer is not None:
|
||||
ctx.update(self.indexer.context_for_searchtool())
|
||||
# add context items for search function used in searchtools.js_t
|
||||
if self.indexer is not None:
|
||||
ctx.update(self.indexer.context_for_searchtool())
|
||||
|
||||
# then, copy over theme-supplied static files
|
||||
if self.theme:
|
||||
for theme_path in self.theme.get_theme_dirs()[::-1]:
|
||||
entry = path.join(theme_path, 'static')
|
||||
copy_asset(entry, path.join(self.outdir, '_static'), excluded=DOTFILES,
|
||||
# then, copy over theme-supplied static files
|
||||
if self.theme:
|
||||
for theme_path in self.theme.get_theme_dirs()[::-1]:
|
||||
entry = path.join(theme_path, 'static')
|
||||
copy_asset(entry, path.join(self.outdir, '_static'), excluded=DOTFILES,
|
||||
context=ctx, renderer=self.templates)
|
||||
# then, copy over all user-supplied static files
|
||||
excluded = Matcher(self.config.exclude_patterns + ["**/.*"])
|
||||
for static_path in self.config.html_static_path:
|
||||
entry = path.join(self.confdir, static_path)
|
||||
copy_asset(entry, path.join(self.outdir, '_static'), excluded,
|
||||
context=ctx, renderer=self.templates)
|
||||
# then, copy over all user-supplied static files
|
||||
excluded = Matcher(self.config.exclude_patterns + ["**/.*"])
|
||||
for static_path in self.config.html_static_path:
|
||||
entry = path.join(self.confdir, static_path)
|
||||
copy_asset(entry, path.join(self.outdir, '_static'), excluded,
|
||||
context=ctx, renderer=self.templates)
|
||||
# copy logo and favicon files if not already in static path
|
||||
if self.config.html_logo:
|
||||
entry = path.join(self.confdir, self.config.html_logo)
|
||||
copy_asset(entry, path.join(self.outdir, '_static'))
|
||||
if self.config.html_favicon:
|
||||
entry = path.join(self.confdir, self.config.html_favicon)
|
||||
copy_asset(entry, path.join(self.outdir, '_static'))
|
||||
logger.info(__('done'))
|
||||
# copy logo and favicon files if not already in static path
|
||||
if self.config.html_logo:
|
||||
entry = path.join(self.confdir, self.config.html_logo)
|
||||
copy_asset(entry, path.join(self.outdir, '_static'))
|
||||
if self.config.html_favicon:
|
||||
entry = path.join(self.confdir, self.config.html_favicon)
|
||||
copy_asset(entry, path.join(self.outdir, '_static'))
|
||||
except OSError as err:
|
||||
logger.warning(__('cannot copy static file %r'), err)
|
||||
|
||||
def copy_extra_files(self) -> None:
|
||||
"""copy html_extra_path files."""
|
||||
try:
|
||||
# copy html_extra_path files
|
||||
logger.info(bold(__('copying extra files... ')), nonl=True)
|
||||
excluded = Matcher(self.config.exclude_patterns)
|
||||
|
||||
for extra_path in self.config.html_extra_path:
|
||||
entry = path.join(self.confdir, extra_path)
|
||||
copy_asset(entry, self.outdir, excluded)
|
||||
logger.info(__('done'))
|
||||
with progress_message(__('copying extra files')):
|
||||
excluded = Matcher(self.config.exclude_patterns)
|
||||
for extra_path in self.config.html_extra_path:
|
||||
entry = path.join(self.confdir, extra_path)
|
||||
copy_asset(entry, self.outdir, excluded)
|
||||
except OSError as err:
|
||||
logger.warning(__('cannot copy extra file %r'), err)
|
||||
|
||||
@ -1011,27 +1003,23 @@ class StandaloneHTMLBuilder(Builder):
|
||||
self.finish_tasks.add_task(self.dump_search_index)
|
||||
self.finish_tasks.add_task(self.dump_inventory)
|
||||
|
||||
@progress_message(__('dumping object inventory'))
|
||||
def dump_inventory(self) -> None:
|
||||
logger.info(bold(__('dumping object inventory... ')), nonl=True)
|
||||
InventoryFile.dump(path.join(self.outdir, INVENTORY_FILENAME), self.env, self)
|
||||
logger.info(__('done'))
|
||||
|
||||
def dump_search_index(self) -> None:
|
||||
logger.info(
|
||||
bold(__('dumping search index in %s ... ') % self.indexer.label()),
|
||||
nonl=True)
|
||||
self.indexer.prune(self.env.all_docs)
|
||||
searchindexfn = path.join(self.outdir, self.searchindex_filename)
|
||||
# first write to a temporary file, so that if dumping fails,
|
||||
# the existing index won't be overwritten
|
||||
if self.indexer_dumps_unicode:
|
||||
with open(searchindexfn + '.tmp', 'w', encoding='utf-8') as ft:
|
||||
self.indexer.dump(ft, self.indexer_format)
|
||||
else:
|
||||
with open(searchindexfn + '.tmp', 'wb') as fb:
|
||||
self.indexer.dump(fb, self.indexer_format)
|
||||
movefile(searchindexfn + '.tmp', searchindexfn)
|
||||
logger.info(__('done'))
|
||||
with progress_message(__('dumping search index in %s') % self.indexer.label()):
|
||||
self.indexer.prune(self.env.all_docs)
|
||||
searchindexfn = path.join(self.outdir, self.searchindex_filename)
|
||||
# first write to a temporary file, so that if dumping fails,
|
||||
# the existing index won't be overwritten
|
||||
if self.indexer_dumps_unicode:
|
||||
with open(searchindexfn + '.tmp', 'w', encoding='utf-8') as ft:
|
||||
self.indexer.dump(ft, self.indexer_format)
|
||||
else:
|
||||
with open(searchindexfn + '.tmp', 'wb') as fb:
|
||||
self.indexer.dump(fb, self.indexer_format)
|
||||
movefile(searchindexfn + '.tmp', searchindexfn)
|
||||
|
||||
|
||||
def convert_html_css_files(app: Sphinx, config: Config) -> None:
|
||||
|
@ -14,6 +14,7 @@ import multiprocessing
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
from typing import Any, IO, List
|
||||
|
||||
from docutils.utils import SystemMessage
|
||||
|
||||
@ -26,13 +27,8 @@ from sphinx.util import Tee, format_exception_cut_frames, save_traceback
|
||||
from sphinx.util.console import red, nocolor, color_terminal, terminal_safe # type: ignore
|
||||
from sphinx.util.docutils import docutils_namespace, patch_docutils
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, IO, List, Union # NOQA
|
||||
|
||||
|
||||
def handle_exception(app, args, exception, stderr=sys.stderr):
|
||||
# type: (Sphinx, Any, Union[Exception, KeyboardInterrupt], IO) -> None
|
||||
def handle_exception(app: Sphinx, args: Any, exception: BaseException, stderr: IO = sys.stderr) -> None: # NOQA
|
||||
if args.pdb:
|
||||
import pdb
|
||||
print(red(__('Exception occurred while building, starting debugger:')),
|
||||
@ -82,8 +78,7 @@ def handle_exception(app, args, exception, stderr=sys.stderr):
|
||||
file=stderr)
|
||||
|
||||
|
||||
def jobs_argument(value):
|
||||
# type: (str) -> int
|
||||
def jobs_argument(value: str) -> int:
|
||||
"""
|
||||
Special type to handle 'auto' flags passed to 'sphinx-build' via -j flag. Can
|
||||
be expanded to handle other special scaling requests, such as setting job count
|
||||
@ -99,8 +94,7 @@ def jobs_argument(value):
|
||||
return jobs
|
||||
|
||||
|
||||
def get_parser():
|
||||
# type: () -> argparse.ArgumentParser
|
||||
def get_parser() -> argparse.ArgumentParser:
|
||||
parser = argparse.ArgumentParser(
|
||||
usage='%(prog)s [OPTIONS] SOURCEDIR OUTPUTDIR [FILENAMES...]',
|
||||
epilog=__('For more information, visit <http://sphinx-doc.org/>.'),
|
||||
@ -195,15 +189,13 @@ files can be built by specifying individual filenames.
|
||||
return parser
|
||||
|
||||
|
||||
def make_main(argv=sys.argv[1:]):
|
||||
# type: (List[str]) -> int
|
||||
def make_main(argv: List[str] = sys.argv[1:]) -> int:
|
||||
"""Sphinx build "make mode" entry."""
|
||||
from sphinx.cmd import make_mode
|
||||
return make_mode.run_make_mode(argv[1:])
|
||||
|
||||
|
||||
def build_main(argv=sys.argv[1:]):
|
||||
# type: (List[str]) -> int
|
||||
def build_main(argv: List[str] = sys.argv[1:]) -> int:
|
||||
"""Sphinx build "main" command-line entry."""
|
||||
|
||||
parser = get_parser()
|
||||
@ -288,8 +280,7 @@ def build_main(argv=sys.argv[1:]):
|
||||
return 2
|
||||
|
||||
|
||||
def main(argv=sys.argv[1:]):
|
||||
# type: (List[str]) -> int
|
||||
def main(argv: List[str] = sys.argv[1:]) -> int:
|
||||
sphinx.locale.setlocale(locale.LC_ALL, '')
|
||||
sphinx.locale.init_console(os.path.join(package_dir, 'locale'), 'sphinx')
|
||||
|
||||
|
@ -18,16 +18,13 @@ import os
|
||||
import subprocess
|
||||
import sys
|
||||
from os import path
|
||||
from typing import List
|
||||
|
||||
import sphinx
|
||||
from sphinx.cmd.build import build_main
|
||||
from sphinx.util.console import color_terminal, nocolor, bold, blue # type: ignore
|
||||
from sphinx.util.osutil import cd, rmtree
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import List # NOQA
|
||||
|
||||
|
||||
BUILDERS = [
|
||||
("", "html", "to make standalone HTML files"),
|
||||
@ -58,20 +55,16 @@ BUILDERS = [
|
||||
|
||||
|
||||
class Make:
|
||||
|
||||
def __init__(self, srcdir, builddir, opts):
|
||||
# type: (str, str, List[str]) -> None
|
||||
def __init__(self, srcdir: str, builddir: str, opts: List[str]) -> None:
|
||||
self.srcdir = srcdir
|
||||
self.builddir = builddir
|
||||
self.opts = opts
|
||||
self.makecmd = os.environ.get('MAKE', 'make') # refer $MAKE to determine make command
|
||||
|
||||
def builddir_join(self, *comps):
|
||||
# type: (str) -> str
|
||||
def builddir_join(self, *comps: str) -> str:
|
||||
return path.join(self.builddir, *comps)
|
||||
|
||||
def build_clean(self):
|
||||
# type: () -> int
|
||||
def build_clean(self) -> int:
|
||||
srcdir = path.abspath(self.srcdir)
|
||||
builddir = path.abspath(self.builddir)
|
||||
if not path.exists(self.builddir):
|
||||
@ -90,8 +83,7 @@ class Make:
|
||||
rmtree(self.builddir_join(item))
|
||||
return 0
|
||||
|
||||
def build_help(self):
|
||||
# type: () -> None
|
||||
def build_help(self) -> None:
|
||||
if not color_terminal():
|
||||
nocolor()
|
||||
|
||||
@ -101,8 +93,7 @@ class Make:
|
||||
if not osname or os.name == osname:
|
||||
print(' %s %s' % (blue(bname.ljust(10)), description))
|
||||
|
||||
def build_latexpdf(self):
|
||||
# type: () -> int
|
||||
def build_latexpdf(self) -> int:
|
||||
if self.run_generic_build('latex') > 0:
|
||||
return 1
|
||||
|
||||
@ -117,8 +108,7 @@ class Make:
|
||||
print('Error: Failed to run: %s' % makecmd)
|
||||
return 1
|
||||
|
||||
def build_latexpdfja(self):
|
||||
# type: () -> int
|
||||
def build_latexpdfja(self) -> int:
|
||||
if self.run_generic_build('latex') > 0:
|
||||
return 1
|
||||
|
||||
@ -133,8 +123,7 @@ class Make:
|
||||
print('Error: Failed to run: %s' % makecmd)
|
||||
return 1
|
||||
|
||||
def build_info(self):
|
||||
# type: () -> int
|
||||
def build_info(self) -> int:
|
||||
if self.run_generic_build('texinfo') > 0:
|
||||
return 1
|
||||
try:
|
||||
@ -144,15 +133,13 @@ class Make:
|
||||
print('Error: Failed to run: %s' % self.makecmd)
|
||||
return 1
|
||||
|
||||
def build_gettext(self):
|
||||
# type: () -> int
|
||||
def build_gettext(self) -> int:
|
||||
dtdir = self.builddir_join('gettext', '.doctrees')
|
||||
if self.run_generic_build('gettext', doctreedir=dtdir) > 0:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def run_generic_build(self, builder, doctreedir=None):
|
||||
# type: (str, str) -> int
|
||||
def run_generic_build(self, builder: str, doctreedir: str = None) -> int:
|
||||
# compatibility with old Makefile
|
||||
papersize = os.getenv('PAPER', '')
|
||||
opts = self.opts
|
||||
@ -168,8 +155,7 @@ class Make:
|
||||
return build_main(args + opts)
|
||||
|
||||
|
||||
def run_make_mode(args):
|
||||
# type: (List[str]) -> int
|
||||
def run_make_mode(args: List[str]) -> int:
|
||||
if len(args) < 3:
|
||||
print('Error: at least 3 arguments (builder, source '
|
||||
'dir, build dir) are required.', file=sys.stderr)
|
||||
|
@ -17,6 +17,7 @@ import time
|
||||
import warnings
|
||||
from collections import OrderedDict
|
||||
from os import path
|
||||
from typing import Any, Callable, Dict, List, Pattern, Union
|
||||
|
||||
# try to import readline, unix specific enhancement
|
||||
try:
|
||||
@ -42,10 +43,6 @@ from sphinx.util.console import ( # type: ignore
|
||||
from sphinx.util.osutil import ensuredir
|
||||
from sphinx.util.template import SphinxRenderer
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Callable, Dict, List, Pattern, Union # NOQA
|
||||
|
||||
TERM_ENCODING = getattr(sys.stdin, 'encoding', None) # RemovedInSphinx40Warning
|
||||
|
||||
EXTENSIONS = OrderedDict([
|
||||
@ -82,8 +79,7 @@ else:
|
||||
|
||||
|
||||
# function to get input from terminal -- overridden by the test suite
|
||||
def term_input(prompt):
|
||||
# type: (str) -> str
|
||||
def term_input(prompt: str) -> str:
|
||||
if sys.platform == 'win32':
|
||||
# Important: On windows, readline is not enabled by default. In these
|
||||
# environment, escape sequences have been broken. To avoid the
|
||||
@ -98,57 +94,48 @@ class ValidationError(Exception):
|
||||
"""Raised for validation errors."""
|
||||
|
||||
|
||||
def is_path(x):
|
||||
# type: (str) -> str
|
||||
def is_path(x: str) -> str:
|
||||
x = path.expanduser(x)
|
||||
if not path.isdir(x):
|
||||
raise ValidationError(__("Please enter a valid path name."))
|
||||
return x
|
||||
|
||||
|
||||
def allow_empty(x):
|
||||
# type: (str) -> str
|
||||
def allow_empty(x: str) -> str:
|
||||
return x
|
||||
|
||||
|
||||
def nonempty(x):
|
||||
# type: (str) -> str
|
||||
def nonempty(x: str) -> str:
|
||||
if not x:
|
||||
raise ValidationError(__("Please enter some text."))
|
||||
return x
|
||||
|
||||
|
||||
def choice(*l):
|
||||
# type: (str) -> Callable[[str], str]
|
||||
def val(x):
|
||||
# type: (str) -> str
|
||||
def choice(*l: str) -> Callable[[str], str]:
|
||||
def val(x: str) -> str:
|
||||
if x not in l:
|
||||
raise ValidationError(__('Please enter one of %s.') % ', '.join(l))
|
||||
return x
|
||||
return val
|
||||
|
||||
|
||||
def boolean(x):
|
||||
# type: (str) -> bool
|
||||
def boolean(x: str) -> bool:
|
||||
if x.upper() not in ('Y', 'YES', 'N', 'NO'):
|
||||
raise ValidationError(__("Please enter either 'y' or 'n'."))
|
||||
return x.upper() in ('Y', 'YES')
|
||||
|
||||
|
||||
def suffix(x):
|
||||
# type: (str) -> str
|
||||
def suffix(x: str) -> str:
|
||||
if not (x[0:1] == '.' and len(x) > 1):
|
||||
raise ValidationError(__("Please enter a file suffix, e.g. '.rst' or '.txt'."))
|
||||
return x
|
||||
|
||||
|
||||
def ok(x):
|
||||
# type: (str) -> str
|
||||
def ok(x: str) -> str:
|
||||
return x
|
||||
|
||||
|
||||
def term_decode(text):
|
||||
# type: (Union[bytes,str]) -> str
|
||||
def term_decode(text: Union[bytes, str]) -> str:
|
||||
warnings.warn('term_decode() is deprecated.',
|
||||
RemovedInSphinx40Warning, stacklevel=2)
|
||||
|
||||
@ -172,8 +159,7 @@ def term_decode(text):
|
||||
return text.decode('latin1')
|
||||
|
||||
|
||||
def do_prompt(text, default=None, validator=nonempty):
|
||||
# type: (str, str, Callable[[str], Any]) -> Union[str, bool]
|
||||
def do_prompt(text: str, default: str = None, validator: Callable[[str], Any] = nonempty) -> Union[str, bool]: # NOQA
|
||||
while True:
|
||||
if default is not None:
|
||||
prompt = PROMPT_PREFIX + '%s [%s]: ' % (text, default)
|
||||
@ -198,8 +184,7 @@ def do_prompt(text, default=None, validator=nonempty):
|
||||
return x
|
||||
|
||||
|
||||
def convert_python_source(source, rex=re.compile(r"[uU]('.*?')")):
|
||||
# type: (str, Pattern) -> str
|
||||
def convert_python_source(source: str, rex: Pattern = re.compile(r"[uU]('.*?')")) -> str:
|
||||
# remove Unicode literal prefixes
|
||||
warnings.warn('convert_python_source() is deprecated.',
|
||||
RemovedInSphinx40Warning)
|
||||
@ -207,13 +192,11 @@ def convert_python_source(source, rex=re.compile(r"[uU]('.*?')")):
|
||||
|
||||
|
||||
class QuickstartRenderer(SphinxRenderer):
|
||||
def __init__(self, templatedir):
|
||||
# type: (str) -> None
|
||||
def __init__(self, templatedir: str) -> None:
|
||||
self.templatedir = templatedir or ''
|
||||
super().__init__()
|
||||
|
||||
def render(self, template_name, context):
|
||||
# type: (str, Dict) -> str
|
||||
def render(self, template_name: str, context: Dict) -> str:
|
||||
user_template = path.join(self.templatedir, path.basename(template_name))
|
||||
if self.templatedir and path.exists(user_template):
|
||||
return self.render_from_file(user_template, context)
|
||||
@ -221,8 +204,7 @@ class QuickstartRenderer(SphinxRenderer):
|
||||
return super().render(template_name, context)
|
||||
|
||||
|
||||
def ask_user(d):
|
||||
# type: (Dict) -> None
|
||||
def ask_user(d: Dict) -> None:
|
||||
"""Ask the user for quickstart values missing from *d*.
|
||||
|
||||
Values are:
|
||||
@ -360,8 +342,8 @@ def ask_user(d):
|
||||
print()
|
||||
|
||||
|
||||
def generate(d, overwrite=True, silent=False, templatedir=None):
|
||||
# type: (Dict, bool, bool, str) -> None
|
||||
def generate(d: Dict, overwrite: bool = True, silent: bool = False, templatedir: str = None
|
||||
) -> None:
|
||||
"""Generate project based on values in *d*."""
|
||||
template = QuickstartRenderer(templatedir=templatedir)
|
||||
|
||||
@ -394,8 +376,7 @@ def generate(d, overwrite=True, silent=False, templatedir=None):
|
||||
ensuredir(path.join(srcdir, d['dot'] + 'templates'))
|
||||
ensuredir(path.join(srcdir, d['dot'] + 'static'))
|
||||
|
||||
def write_file(fpath, content, newline=None):
|
||||
# type: (str, str, str) -> None
|
||||
def write_file(fpath: str, content: str, newline: str = None) -> None:
|
||||
if overwrite or not path.isfile(fpath):
|
||||
if 'quiet' not in d:
|
||||
print(__('Creating file %s.') % fpath)
|
||||
@ -454,8 +435,7 @@ def generate(d, overwrite=True, silent=False, templatedir=None):
|
||||
print()
|
||||
|
||||
|
||||
def valid_dir(d):
|
||||
# type: (Dict) -> bool
|
||||
def valid_dir(d: Dict) -> bool:
|
||||
dir = d['path']
|
||||
if not path.exists(dir):
|
||||
return True
|
||||
@ -484,8 +464,7 @@ def valid_dir(d):
|
||||
return True
|
||||
|
||||
|
||||
def get_parser():
|
||||
# type: () -> argparse.ArgumentParser
|
||||
def get_parser() -> argparse.ArgumentParser:
|
||||
description = __(
|
||||
"\n"
|
||||
"Generate required files for a Sphinx project.\n"
|
||||
@ -568,8 +547,7 @@ def get_parser():
|
||||
return parser
|
||||
|
||||
|
||||
def main(argv=sys.argv[1:]):
|
||||
# type: (List[str]) -> int
|
||||
def main(argv: List[str] = sys.argv[1:]) -> int:
|
||||
sphinx.locale.setlocale(locale.LC_ALL, '')
|
||||
sphinx.locale.init_console(os.path.join(package_dir, 'locale'), 'sphinx')
|
||||
|
||||
|
@ -11,6 +11,7 @@
|
||||
import re
|
||||
import string
|
||||
from typing import Any, Dict, Iterator, List, Tuple
|
||||
from typing import cast
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Element
|
||||
@ -22,12 +23,15 @@ from sphinx.builders import Builder
|
||||
from sphinx.directives import ObjectDescription
|
||||
from sphinx.domains import Domain, ObjType
|
||||
from sphinx.environment import BuildEnvironment
|
||||
from sphinx.locale import _
|
||||
from sphinx.locale import _, __
|
||||
from sphinx.roles import XRefRole
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.docfields import Field, TypedField
|
||||
from sphinx.util.nodes import make_refnode
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# RE to split at word boundaries
|
||||
wsplit_re = re.compile(r'(\W+)')
|
||||
|
||||
@ -201,13 +205,9 @@ class CObject(ObjectDescription):
|
||||
signode['ids'].append(targetname)
|
||||
signode['first'] = (not self.names)
|
||||
self.state.document.note_explicit_target(signode)
|
||||
inv = self.env.domaindata['c']['objects']
|
||||
if name in inv:
|
||||
self.state_machine.reporter.warning(
|
||||
'duplicate C object description of %s, ' % name +
|
||||
'other instance in ' + self.env.doc2path(inv[name][0]),
|
||||
line=self.lineno)
|
||||
inv[name] = (self.env.docname, self.objtype)
|
||||
|
||||
domain = cast(CDomain, self.env.get_domain('c'))
|
||||
domain.note_object(name, self.objtype)
|
||||
|
||||
indextext = self.get_index_text(name)
|
||||
if indextext:
|
||||
@ -271,10 +271,22 @@ class CDomain(Domain):
|
||||
'objects': {}, # fullname -> docname, objtype
|
||||
} # type: Dict[str, Dict[str, Tuple[str, Any]]]
|
||||
|
||||
@property
|
||||
def objects(self) -> Dict[str, Tuple[str, str]]:
|
||||
return self.data.setdefault('objects', {}) # fullname -> docname, objtype
|
||||
|
||||
def note_object(self, name: str, objtype: str, location: Any = None) -> None:
|
||||
if name in self.objects:
|
||||
docname = self.objects[name][0]
|
||||
logger.warning(__('duplicate C object description of %s, '
|
||||
'other instance in %s, use :noindex: for one of them'),
|
||||
name, docname, location=location)
|
||||
self.objects[name] = (self.env.docname, objtype)
|
||||
|
||||
def clear_doc(self, docname: str) -> None:
|
||||
for fullname, (fn, _l) in list(self.data['objects'].items()):
|
||||
for fullname, (fn, _l) in list(self.objects.items()):
|
||||
if fn == docname:
|
||||
del self.data['objects'][fullname]
|
||||
del self.objects[fullname]
|
||||
|
||||
def merge_domaindata(self, docnames: List[str], otherdata: Dict) -> None:
|
||||
# XXX check duplicates
|
||||
@ -290,9 +302,9 @@ class CDomain(Domain):
|
||||
# becase TypedField can generate xrefs
|
||||
if target in CObject.stopwords:
|
||||
return contnode
|
||||
if target not in self.data['objects']:
|
||||
if target not in self.objects:
|
||||
return None
|
||||
obj = self.data['objects'][target]
|
||||
obj = self.objects[target]
|
||||
return make_refnode(builder, fromdocname, obj[0], 'c.' + target,
|
||||
contnode, target)
|
||||
|
||||
@ -301,15 +313,15 @@ class CDomain(Domain):
|
||||
) -> List[Tuple[str, Element]]:
|
||||
# strip pointer asterisk
|
||||
target = target.rstrip(' *')
|
||||
if target not in self.data['objects']:
|
||||
if target not in self.objects:
|
||||
return []
|
||||
obj = self.data['objects'][target]
|
||||
obj = self.objects[target]
|
||||
return [('c:' + self.role_for_objtype(obj[1]),
|
||||
make_refnode(builder, fromdocname, obj[0], 'c.' + target,
|
||||
contnode, target))]
|
||||
|
||||
def get_objects(self) -> Iterator[Tuple[str, str, str, str, str, int]]:
|
||||
for refname, (docname, type) in list(self.data['objects'].items()):
|
||||
for refname, (docname, type) in list(self.objects.items()):
|
||||
yield (refname, refname, type, docname, 'c.' + refname, 1)
|
||||
|
||||
|
||||
|
@ -108,22 +108,9 @@ class ChangeSetDomain(Domain):
|
||||
'changes': {}, # version -> list of ChangeSet
|
||||
} # type: Dict
|
||||
|
||||
def clear_doc(self, docname: str) -> None:
|
||||
for version, changes in self.data['changes'].items():
|
||||
for changeset in changes[:]:
|
||||
if changeset.docname == docname:
|
||||
changes.remove(changeset)
|
||||
|
||||
def merge_domaindata(self, docnames: List[str], otherdata: Dict) -> None:
|
||||
# XXX duplicates?
|
||||
for version, otherchanges in otherdata['changes'].items():
|
||||
changes = self.data['changes'].setdefault(version, [])
|
||||
for changeset in otherchanges:
|
||||
if changeset.docname in docnames:
|
||||
changes.append(changeset)
|
||||
|
||||
def process_doc(self, env: "BuildEnvironment", docname: str, document: nodes.document) -> None: # NOQA
|
||||
pass # nothing to do here. All changesets are registered on calling directive.
|
||||
@property
|
||||
def changesets(self) -> Dict[str, List[ChangeSet]]:
|
||||
return self.data.setdefault('changes', {}) # version -> list of ChangeSet
|
||||
|
||||
def note_changeset(self, node: addnodes.versionmodified) -> None:
|
||||
version = node['version']
|
||||
@ -131,10 +118,27 @@ class ChangeSetDomain(Domain):
|
||||
objname = self.env.temp_data.get('object')
|
||||
changeset = ChangeSet(node['type'], self.env.docname, node.line,
|
||||
module, objname, node.astext())
|
||||
self.data['changes'].setdefault(version, []).append(changeset)
|
||||
self.changesets.setdefault(version, []).append(changeset)
|
||||
|
||||
def clear_doc(self, docname: str) -> None:
|
||||
for version, changes in self.changesets.items():
|
||||
for changeset in changes[:]:
|
||||
if changeset.docname == docname:
|
||||
changes.remove(changeset)
|
||||
|
||||
def merge_domaindata(self, docnames: List[str], otherdata: Dict) -> None:
|
||||
# XXX duplicates?
|
||||
for version, otherchanges in otherdata['changes'].items():
|
||||
changes = self.changesets.setdefault(version, [])
|
||||
for changeset in otherchanges:
|
||||
if changeset.docname in docnames:
|
||||
changes.append(changeset)
|
||||
|
||||
def process_doc(self, env: "BuildEnvironment", docname: str, document: nodes.document) -> None: # NOQA
|
||||
pass # nothing to do here. All changesets are registered on calling directive.
|
||||
|
||||
def get_changesets_for(self, version: str) -> List[ChangeSet]:
|
||||
return self.data['changes'].get(version, [])
|
||||
return self.changesets.get(version, [])
|
||||
|
||||
|
||||
def setup(app: "Sphinx") -> Dict[str, Any]:
|
||||
|
@ -9,6 +9,7 @@
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, Iterator, List, Tuple
|
||||
from typing import cast
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Element, Node
|
||||
@ -22,13 +23,17 @@ from sphinx.directives import ObjectDescription
|
||||
from sphinx.domains import Domain, ObjType
|
||||
from sphinx.domains.python import _pseudo_parse_arglist
|
||||
from sphinx.environment import BuildEnvironment
|
||||
from sphinx.locale import _
|
||||
from sphinx.locale import _, __
|
||||
from sphinx.roles import XRefRole
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.docfields import Field, GroupedField, TypedField
|
||||
from sphinx.util.docutils import SphinxDirective
|
||||
from sphinx.util.nodes import make_refnode
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class JSObject(ObjectDescription):
|
||||
"""
|
||||
Description of a JavaScript object.
|
||||
@ -106,14 +111,10 @@ class JSObject(ObjectDescription):
|
||||
signode['ids'].append(fullname.replace('$', '_S_'))
|
||||
signode['first'] = not self.names
|
||||
self.state.document.note_explicit_target(signode)
|
||||
objects = self.env.domaindata['js']['objects']
|
||||
if fullname in objects:
|
||||
self.state_machine.reporter.warning(
|
||||
'duplicate object description of %s, ' % fullname +
|
||||
'other instance in ' +
|
||||
self.env.doc2path(objects[fullname][0]),
|
||||
line=self.lineno)
|
||||
objects[fullname] = self.env.docname, self.objtype
|
||||
|
||||
domain = cast(JavaScriptDomain, self.env.get_domain('js'))
|
||||
domain.note_object(fullname, self.objtype,
|
||||
location=(self.env.docname, self.lineno))
|
||||
|
||||
indextext = self.get_index_text(mod_name, name_obj)
|
||||
if indextext:
|
||||
@ -248,10 +249,13 @@ class JSModule(SphinxDirective):
|
||||
noindex = 'noindex' in self.options
|
||||
ret = [] # type: List[Node]
|
||||
if not noindex:
|
||||
self.env.domaindata['js']['modules'][mod_name] = self.env.docname
|
||||
domain = cast(JavaScriptDomain, self.env.get_domain('js'))
|
||||
|
||||
domain.note_module(mod_name)
|
||||
# Make a duplicate entry in 'objects' to facilitate searching for
|
||||
# the module in JavaScriptDomain.find_obj()
|
||||
self.env.domaindata['js']['objects'][mod_name] = (self.env.docname, 'module')
|
||||
domain.note_object(mod_name, 'module', location=(self.env.docname, self.lineno))
|
||||
|
||||
targetnode = nodes.target('', '', ids=['module-' + mod_name],
|
||||
ismod=True)
|
||||
self.state.document.note_explicit_target(targetnode)
|
||||
@ -314,31 +318,48 @@ class JavaScriptDomain(Domain):
|
||||
}
|
||||
initial_data = {
|
||||
'objects': {}, # fullname -> docname, objtype
|
||||
'modules': {}, # mod_name -> docname
|
||||
'modules': {}, # modname -> docname
|
||||
} # type: Dict[str, Dict[str, Tuple[str, str]]]
|
||||
|
||||
@property
|
||||
def objects(self) -> Dict[str, Tuple[str, str]]:
|
||||
return self.data.setdefault('objects', {}) # fullname -> docname, objtype
|
||||
|
||||
def note_object(self, fullname: str, objtype: str, location: Any = None) -> None:
|
||||
if fullname in self.objects:
|
||||
docname = self.objects[fullname][0]
|
||||
logger.warning(__('duplicate object description of %s, other instance in %s'),
|
||||
fullname, docname, location=location)
|
||||
self.objects[fullname] = (self.env.docname, objtype)
|
||||
|
||||
@property
|
||||
def modules(self) -> Dict[str, str]:
|
||||
return self.data.setdefault('modules', {}) # modname -> docname
|
||||
|
||||
def note_module(self, modname: str) -> None:
|
||||
self.modules[modname] = self.env.docname
|
||||
|
||||
def clear_doc(self, docname: str) -> None:
|
||||
for fullname, (pkg_docname, _l) in list(self.data['objects'].items()):
|
||||
for fullname, (pkg_docname, _l) in list(self.objects.items()):
|
||||
if pkg_docname == docname:
|
||||
del self.data['objects'][fullname]
|
||||
for mod_name, pkg_docname in list(self.data['modules'].items()):
|
||||
del self.objects[fullname]
|
||||
for modname, pkg_docname in list(self.modules.items()):
|
||||
if pkg_docname == docname:
|
||||
del self.data['modules'][mod_name]
|
||||
del self.modules[modname]
|
||||
|
||||
def merge_domaindata(self, docnames: List[str], otherdata: Dict) -> None:
|
||||
# XXX check duplicates
|
||||
for fullname, (fn, objtype) in otherdata['objects'].items():
|
||||
if fn in docnames:
|
||||
self.data['objects'][fullname] = (fn, objtype)
|
||||
self.objects[fullname] = (fn, objtype)
|
||||
for mod_name, pkg_docname in otherdata['modules'].items():
|
||||
if pkg_docname in docnames:
|
||||
self.data['modules'][mod_name] = pkg_docname
|
||||
self.modules[mod_name] = pkg_docname
|
||||
|
||||
def find_obj(self, env: BuildEnvironment, mod_name: str, prefix: str, name: str,
|
||||
typ: str, searchorder: int = 0) -> Tuple[str, Tuple[str, str]]:
|
||||
if name[-2:] == '()':
|
||||
name = name[:-2]
|
||||
objects = self.data['objects']
|
||||
|
||||
searches = []
|
||||
if mod_name and prefix:
|
||||
@ -354,10 +375,10 @@ class JavaScriptDomain(Domain):
|
||||
|
||||
newname = None
|
||||
for search_name in searches:
|
||||
if search_name in objects:
|
||||
if search_name in self.objects:
|
||||
newname = search_name
|
||||
|
||||
return newname, objects.get(newname)
|
||||
return newname, self.objects.get(newname)
|
||||
|
||||
def resolve_xref(self, env: BuildEnvironment, fromdocname: str, builder: Builder,
|
||||
typ: str, target: str, node: pending_xref, contnode: Element
|
||||
@ -384,9 +405,8 @@ class JavaScriptDomain(Domain):
|
||||
name.replace('$', '_S_'), contnode, name))]
|
||||
|
||||
def get_objects(self) -> Iterator[Tuple[str, str, str, str, str, int]]:
|
||||
for refname, (docname, type) in list(self.data['objects'].items()):
|
||||
yield refname, refname, type, docname, \
|
||||
refname.replace('$', '_S_'), 1
|
||||
for refname, (docname, type) in list(self.objects.items()):
|
||||
yield refname, refname, type, docname, refname.replace('$', '_S_'), 1
|
||||
|
||||
def get_full_qualified_name(self, node: Element) -> str:
|
||||
modname = node.get('js:module')
|
||||
|
@ -234,6 +234,8 @@ class BuildEnvironment:
|
||||
self.config_status = CONFIG_OK
|
||||
if self.config is None:
|
||||
self.config_status = CONFIG_NEW
|
||||
elif self.config.extensions != config.extensions:
|
||||
self.config_status = CONFIG_EXTENSIONS_CHANGED
|
||||
else:
|
||||
# check if a config value was changed that affects how
|
||||
# doctrees are read
|
||||
@ -242,11 +244,6 @@ class BuildEnvironment:
|
||||
self.config_status = CONFIG_CHANGED
|
||||
break
|
||||
|
||||
# this value is not covered by the above loop because it is handled
|
||||
# specially by the config class
|
||||
if self.config.extensions != config.extensions:
|
||||
self.config_status = CONFIG_EXTENSIONS_CHANGED
|
||||
|
||||
self.config = config
|
||||
|
||||
def _update_settings(self, config):
|
||||
|
@ -413,6 +413,18 @@
|
||||
\newcommand\sphinxsetup[1]{\setkeys{sphinx}{#1}}
|
||||
|
||||
|
||||
%% ALPHANUMERIC LIST ITEMS
|
||||
\newcommand\sphinxsetlistlabels[5]
|
||||
{% #1 = style, #2 = enum, #3 = enumnext, #4 = prefix, #5 = suffix
|
||||
% #2 and #3 are counters used by enumerate environement e.g. enumi, enumii.
|
||||
% #1 is a macro such as \arabic or \alph
|
||||
% prefix and suffix are strings (by default empty and a dot).
|
||||
\@namedef{the#2}{#1{#2}}%
|
||||
\@namedef{label#2}{#4\@nameuse{the#2}#5}%
|
||||
\@namedef{p@#3}{\@nameuse{p@#2}#4\@nameuse{the#2}#5}%
|
||||
}%
|
||||
|
||||
|
||||
%% MAXLISTDEPTH
|
||||
%
|
||||
% remove LaTeX's cap on nesting depth if 'maxlistdepth' key used.
|
||||
|
@ -1335,11 +1335,8 @@ class LaTeXTranslator(SphinxTranslator):
|
||||
suffix = node.get('suffix', '.')
|
||||
|
||||
self.body.append('\\begin{enumerate}\n')
|
||||
self.body.append('\\def\\the%s{%s{%s}}\n' % (enum, style, enum))
|
||||
self.body.append('\\def\\label%s{%s\\the%s %s}\n' %
|
||||
(enum, prefix, enum, suffix))
|
||||
self.body.append('\\makeatletter\\def\\p@%s{\\p@%s %s\\the%s %s}\\makeatother\n' %
|
||||
(enumnext, enum, prefix, enum, suffix))
|
||||
self.body.append('\\sphinxsetlistlabels{%s}{%s}{%s}{%s}{%s}%%\n' %
|
||||
(style, enum, enumnext, prefix, suffix))
|
||||
if 'start' in node:
|
||||
self.body.append('\\setcounter{%s}{%d}\n' % (enum, node['start'] - 1))
|
||||
if self.table:
|
||||
|
@ -1292,25 +1292,15 @@ def test_latex_nested_enumerated_list(app, status, warning):
|
||||
app.builder.build_all()
|
||||
|
||||
result = (app.outdir / 'python.tex').text(encoding='utf8')
|
||||
assert ('\\def\\theenumi{\\arabic{enumi}}\n'
|
||||
'\\def\\labelenumi{\\theenumi .}\n'
|
||||
'\\makeatletter\\def\\p@enumii{\\p@enumi \\theenumi .}\\makeatother\n'
|
||||
assert ('\\sphinxsetlistlabels{\\arabic}{enumi}{enumii}{}{.}%\n'
|
||||
'\\setcounter{enumi}{4}\n' in result)
|
||||
assert ('\\def\\theenumii{\\alph{enumii}}\n'
|
||||
'\\def\\labelenumii{\\theenumii .}\n'
|
||||
'\\makeatletter\\def\\p@enumiii{\\p@enumii \\theenumii .}\\makeatother\n'
|
||||
assert ('\\sphinxsetlistlabels{\\alph}{enumii}{enumiii}{}{.}%\n'
|
||||
'\\setcounter{enumii}{3}\n' in result)
|
||||
assert ('\\def\\theenumiii{\\arabic{enumiii}}\n'
|
||||
'\\def\\labelenumiii{\\theenumiii )}\n'
|
||||
'\\makeatletter\\def\\p@enumiv{\\p@enumiii \\theenumiii )}\\makeatother\n'
|
||||
assert ('\\sphinxsetlistlabels{\\arabic}{enumiii}{enumiv}{}{)}%\n'
|
||||
'\\setcounter{enumiii}{9}\n' in result)
|
||||
assert ('\\def\\theenumiv{\\arabic{enumiv}}\n'
|
||||
'\\def\\labelenumiv{(\\theenumiv )}\n'
|
||||
'\\makeatletter\\def\\p@enumv{\\p@enumiv (\\theenumiv )}\\makeatother\n'
|
||||
assert ('\\sphinxsetlistlabels{\\arabic}{enumiv}{enumv}{(}{)}%\n'
|
||||
'\\setcounter{enumiv}{23}\n' in result)
|
||||
assert ('\\def\\theenumii{\\roman{enumii}}\n'
|
||||
'\\def\\labelenumii{\\theenumii .}\n'
|
||||
'\\makeatletter\\def\\p@enumiii{\\p@enumii \\theenumii .}\\makeatother\n'
|
||||
assert ('\\sphinxsetlistlabels{\\roman}{enumii}{enumiii}{}{.}%\n'
|
||||
'\\setcounter{enumii}{2}\n' in result)
|
||||
|
||||
|
||||
|
@ -11,9 +11,32 @@ import pytest
|
||||
|
||||
from sphinx.builders.html import StandaloneHTMLBuilder
|
||||
from sphinx.builders.latex import LaTeXBuilder
|
||||
from sphinx.environment import CONFIG_OK, CONFIG_CHANGED, CONFIG_EXTENSIONS_CHANGED, CONFIG_NEW
|
||||
from sphinx.testing.comparer import PathComparer
|
||||
|
||||
|
||||
@pytest.mark.sphinx('dummy', testroot='basic')
|
||||
def test_config_status(make_app, app_params):
|
||||
args, kwargs = app_params
|
||||
|
||||
# clean build
|
||||
app1 = make_app(*args, freshenv=True, **kwargs)
|
||||
assert app1.env.config_status == CONFIG_NEW
|
||||
app1.build()
|
||||
|
||||
# incremental build (no config changed)
|
||||
app2 = make_app(*args, **kwargs)
|
||||
assert app2.env.config_status == CONFIG_OK
|
||||
|
||||
# incremental build (config entry changed)
|
||||
app3 = make_app(*args, confoverrides={'master_doc': 'content'}, **kwargs)
|
||||
assert app3.env.config_status == CONFIG_CHANGED
|
||||
|
||||
# incremental build (extension changed)
|
||||
app4 = make_app(*args, confoverrides={'extensions': ['sphinx.ext.autodoc']}, **kwargs)
|
||||
assert app4.env.config_status == CONFIG_EXTENSIONS_CHANGED
|
||||
|
||||
|
||||
@pytest.mark.sphinx('dummy')
|
||||
def test_images(app):
|
||||
app.build()
|
||||
|
Loading…
Reference in New Issue
Block a user