mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Minor PEP8 cleanup.
This commit is contained in:
parent
c5cc2a8cf9
commit
df53ece2f7
@ -25,6 +25,7 @@ class desc(nodes.Admonition, nodes.Element):
|
|||||||
contains one or more ``desc_signature`` and a ``desc_content``.
|
contains one or more ``desc_signature`` and a ``desc_content``.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class desc_signature(nodes.Part, nodes.Inline, nodes.TextElement):
|
class desc_signature(nodes.Part, nodes.Inline, nodes.TextElement):
|
||||||
"""Node for object signatures.
|
"""Node for object signatures.
|
||||||
|
|
||||||
@ -39,33 +40,42 @@ class desc_addname(nodes.Part, nodes.Inline, nodes.TextElement):
|
|||||||
# compatibility alias
|
# compatibility alias
|
||||||
desc_classname = desc_addname
|
desc_classname = desc_addname
|
||||||
|
|
||||||
|
|
||||||
class desc_type(nodes.Part, nodes.Inline, nodes.TextElement):
|
class desc_type(nodes.Part, nodes.Inline, nodes.TextElement):
|
||||||
"""Node for return types or object type names."""
|
"""Node for return types or object type names."""
|
||||||
|
|
||||||
|
|
||||||
class desc_returns(desc_type):
|
class desc_returns(desc_type):
|
||||||
"""Node for a "returns" annotation (a la -> in Python)."""
|
"""Node for a "returns" annotation (a la -> in Python)."""
|
||||||
def astext(self):
|
def astext(self):
|
||||||
return ' -> ' + nodes.TextElement.astext(self)
|
return ' -> ' + nodes.TextElement.astext(self)
|
||||||
|
|
||||||
|
|
||||||
class desc_name(nodes.Part, nodes.Inline, nodes.TextElement):
|
class desc_name(nodes.Part, nodes.Inline, nodes.TextElement):
|
||||||
"""Node for the main object name."""
|
"""Node for the main object name."""
|
||||||
|
|
||||||
|
|
||||||
class desc_parameterlist(nodes.Part, nodes.Inline, nodes.TextElement):
|
class desc_parameterlist(nodes.Part, nodes.Inline, nodes.TextElement):
|
||||||
"""Node for a general parameter list."""
|
"""Node for a general parameter list."""
|
||||||
child_text_separator = ', '
|
child_text_separator = ', '
|
||||||
|
|
||||||
|
|
||||||
class desc_parameter(nodes.Part, nodes.Inline, nodes.TextElement):
|
class desc_parameter(nodes.Part, nodes.Inline, nodes.TextElement):
|
||||||
"""Node for a single parameter."""
|
"""Node for a single parameter."""
|
||||||
|
|
||||||
|
|
||||||
class desc_optional(nodes.Part, nodes.Inline, nodes.TextElement):
|
class desc_optional(nodes.Part, nodes.Inline, nodes.TextElement):
|
||||||
"""Node for marking optional parts of the parameter list."""
|
"""Node for marking optional parts of the parameter list."""
|
||||||
child_text_separator = ', '
|
child_text_separator = ', '
|
||||||
|
|
||||||
def astext(self):
|
def astext(self):
|
||||||
return '[' + nodes.TextElement.astext(self) + ']'
|
return '[' + nodes.TextElement.astext(self) + ']'
|
||||||
|
|
||||||
|
|
||||||
class desc_annotation(nodes.Part, nodes.Inline, nodes.TextElement):
|
class desc_annotation(nodes.Part, nodes.Inline, nodes.TextElement):
|
||||||
"""Node for signature annotations (not Python 3-style annotations)."""
|
"""Node for signature annotations (not Python 3-style annotations)."""
|
||||||
|
|
||||||
|
|
||||||
class desc_content(nodes.General, nodes.Element):
|
class desc_content(nodes.General, nodes.Element):
|
||||||
"""Node for object description content.
|
"""Node for object description content.
|
||||||
|
|
||||||
@ -82,15 +92,18 @@ class versionmodified(nodes.Admonition, nodes.TextElement):
|
|||||||
directives.
|
directives.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class seealso(nodes.Admonition, nodes.Element):
|
class seealso(nodes.Admonition, nodes.Element):
|
||||||
"""Custom "see also" admonition."""
|
"""Custom "see also" admonition."""
|
||||||
|
|
||||||
|
|
||||||
class productionlist(nodes.Admonition, nodes.Element):
|
class productionlist(nodes.Admonition, nodes.Element):
|
||||||
"""Node for grammar production lists.
|
"""Node for grammar production lists.
|
||||||
|
|
||||||
Contains ``production`` nodes.
|
Contains ``production`` nodes.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class production(nodes.Part, nodes.Inline, nodes.TextElement):
|
class production(nodes.Part, nodes.Inline, nodes.TextElement):
|
||||||
"""Node for a single grammar production rule."""
|
"""Node for a single grammar production rule."""
|
||||||
|
|
||||||
@ -107,26 +120,33 @@ class index(nodes.Invisible, nodes.Inline, nodes.TextElement):
|
|||||||
*entrytype* is one of "single", "pair", "double", "triple".
|
*entrytype* is one of "single", "pair", "double", "triple".
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class centered(nodes.Part, nodes.TextElement):
|
class centered(nodes.Part, nodes.TextElement):
|
||||||
"""Deprecated."""
|
"""Deprecated."""
|
||||||
|
|
||||||
|
|
||||||
class acks(nodes.Element):
|
class acks(nodes.Element):
|
||||||
"""Special node for "acks" lists."""
|
"""Special node for "acks" lists."""
|
||||||
|
|
||||||
|
|
||||||
class hlist(nodes.Element):
|
class hlist(nodes.Element):
|
||||||
"""Node for "horizontal lists", i.e. lists that should be compressed to
|
"""Node for "horizontal lists", i.e. lists that should be compressed to
|
||||||
take up less vertical space.
|
take up less vertical space.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class hlistcol(nodes.Element):
|
class hlistcol(nodes.Element):
|
||||||
"""Node for one column in a horizontal list."""
|
"""Node for one column in a horizontal list."""
|
||||||
|
|
||||||
|
|
||||||
class compact_paragraph(nodes.paragraph):
|
class compact_paragraph(nodes.paragraph):
|
||||||
"""Node for a compact paragraph (which never makes a <p> node)."""
|
"""Node for a compact paragraph (which never makes a <p> node)."""
|
||||||
|
|
||||||
|
|
||||||
class glossary(nodes.Element):
|
class glossary(nodes.Element):
|
||||||
"""Node to insert a glossary."""
|
"""Node to insert a glossary."""
|
||||||
|
|
||||||
|
|
||||||
class only(nodes.Element):
|
class only(nodes.Element):
|
||||||
"""Node for "only" directives (conditional inclusion based on tags)."""
|
"""Node for "only" directives (conditional inclusion based on tags)."""
|
||||||
|
|
||||||
@ -136,14 +156,17 @@ class only(nodes.Element):
|
|||||||
class start_of_file(nodes.Element):
|
class start_of_file(nodes.Element):
|
||||||
"""Node to mark start of a new file, used in the LaTeX builder only."""
|
"""Node to mark start of a new file, used in the LaTeX builder only."""
|
||||||
|
|
||||||
|
|
||||||
class highlightlang(nodes.Element):
|
class highlightlang(nodes.Element):
|
||||||
"""Inserted to set the highlight language and line number options for
|
"""Inserted to set the highlight language and line number options for
|
||||||
subsequent code blocks.
|
subsequent code blocks.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class tabular_col_spec(nodes.Element):
|
class tabular_col_spec(nodes.Element):
|
||||||
"""Node for specifying tabular columns, used for LaTeX output."""
|
"""Node for specifying tabular columns, used for LaTeX output."""
|
||||||
|
|
||||||
|
|
||||||
class meta(nodes.Special, nodes.PreBibliographic, nodes.Element):
|
class meta(nodes.Special, nodes.PreBibliographic, nodes.Element):
|
||||||
"""Node for meta directive -- same as docutils' standard meta node,
|
"""Node for meta directive -- same as docutils' standard meta node,
|
||||||
but pickleable.
|
but pickleable.
|
||||||
@ -160,22 +183,27 @@ class pending_xref(nodes.Inline, nodes.Element):
|
|||||||
BuildEnvironment.resolve_references.
|
BuildEnvironment.resolve_references.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class download_reference(nodes.reference):
|
class download_reference(nodes.reference):
|
||||||
"""Node for download references, similar to pending_xref."""
|
"""Node for download references, similar to pending_xref."""
|
||||||
|
|
||||||
|
|
||||||
class literal_emphasis(nodes.emphasis):
|
class literal_emphasis(nodes.emphasis):
|
||||||
"""Node that behaves like `emphasis`, but further text processors are not
|
"""Node that behaves like `emphasis`, but further text processors are not
|
||||||
applied (e.g. smartypants for HTML output).
|
applied (e.g. smartypants for HTML output).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class literal_strong(nodes.strong):
|
class literal_strong(nodes.strong):
|
||||||
"""Node that behaves like `strong`, but further text processors are not
|
"""Node that behaves like `strong`, but further text processors are not
|
||||||
applied (e.g. smartypants for HTML output).
|
applied (e.g. smartypants for HTML output).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class abbreviation(nodes.Inline, nodes.TextElement):
|
class abbreviation(nodes.Inline, nodes.TextElement):
|
||||||
"""Node for abbreviations with explanations."""
|
"""Node for abbreviations with explanations."""
|
||||||
|
|
||||||
|
|
||||||
class termsep(nodes.Structural, nodes.Element):
|
class termsep(nodes.Structural, nodes.Element):
|
||||||
"""Separates two terms within a <term> node."""
|
"""Separates two terms within a <term> node."""
|
||||||
|
|
||||||
|
@ -88,7 +88,7 @@ def create_module_file(package, module, opts):
|
|||||||
text = format_heading(1, '%s module' % module)
|
text = format_heading(1, '%s module' % module)
|
||||||
else:
|
else:
|
||||||
text = ''
|
text = ''
|
||||||
#text += format_heading(2, ':mod:`%s` Module' % module)
|
# text += format_heading(2, ':mod:`%s` Module' % module)
|
||||||
text += format_directive(module, package)
|
text += format_directive(module, package)
|
||||||
write_file(makename(package, module), text, opts)
|
write_file(makename(package, module), text, opts)
|
||||||
|
|
||||||
@ -173,7 +173,7 @@ def shall_skip(module, opts):
|
|||||||
# skip if it has a "private" name and this is selected
|
# skip if it has a "private" name and this is selected
|
||||||
filename = path.basename(module)
|
filename = path.basename(module)
|
||||||
if filename != '__init__.py' and filename.startswith('_') and \
|
if filename != '__init__.py' and filename.startswith('_') and \
|
||||||
not opts.includeprivate:
|
not opts.includeprivate:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -218,7 +218,7 @@ def recurse_tree(rootpath, excludes, opts):
|
|||||||
if is_pkg:
|
if is_pkg:
|
||||||
# we are in a package with something to document
|
# we are in a package with something to document
|
||||||
if subs or len(py_files) > 1 or not \
|
if subs or len(py_files) > 1 or not \
|
||||||
shall_skip(path.join(root, INITPY), opts):
|
shall_skip(path.join(root, INITPY), opts):
|
||||||
subpackage = root[len(rootpath):].lstrip(path.sep).\
|
subpackage = root[len(rootpath):].lstrip(path.sep).\
|
||||||
replace(path.sep, '.')
|
replace(path.sep, '.')
|
||||||
create_package_file(root, root_package, subpackage,
|
create_package_file(root, root_package, subpackage,
|
||||||
@ -318,7 +318,7 @@ Note: By default this script will not overwrite already created files.""")
|
|||||||
(opts, args) = parser.parse_args(argv[1:])
|
(opts, args) = parser.parse_args(argv[1:])
|
||||||
|
|
||||||
if opts.show_version:
|
if opts.show_version:
|
||||||
print('Sphinx (sphinx-apidoc) %s' % __version__)
|
print('Sphinx (sphinx-apidoc) %s' % __version__)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
if not args:
|
if not args:
|
||||||
|
@ -200,8 +200,8 @@ class Sphinx(object):
|
|||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
self.info(bold('loading pickled environment... '), nonl=True)
|
self.info(bold('loading pickled environment... '), nonl=True)
|
||||||
self.env = BuildEnvironment.frompickle(self.config,
|
self.env = BuildEnvironment.frompickle(
|
||||||
path.join(self.doctreedir, ENV_PICKLE_FILENAME))
|
self.config, path.join(self.doctreedir, ENV_PICKLE_FILENAME))
|
||||||
self.env.domains = {}
|
self.env.domains = {}
|
||||||
for domain in self.domains.keys():
|
for domain in self.domains.keys():
|
||||||
# this can raise if the data version doesn't fit
|
# this can raise if the data version doesn't fit
|
||||||
@ -291,7 +291,7 @@ class Sphinx(object):
|
|||||||
else:
|
else:
|
||||||
location = None
|
location = None
|
||||||
warntext = location and '%s: %s%s\n' % (location, prefix, message) or \
|
warntext = location and '%s: %s%s\n' % (location, prefix, message) or \
|
||||||
'%s%s\n' % (prefix, message)
|
'%s%s\n' % (prefix, message)
|
||||||
if self.warningiserror:
|
if self.warningiserror:
|
||||||
raise SphinxWarning(warntext)
|
raise SphinxWarning(warntext)
|
||||||
self._warncount += 1
|
self._warncount += 1
|
||||||
@ -461,7 +461,7 @@ class Sphinx(object):
|
|||||||
else:
|
else:
|
||||||
raise ExtensionError(
|
raise ExtensionError(
|
||||||
'Builder %r already exists (in module %s)' % (
|
'Builder %r already exists (in module %s)' % (
|
||||||
builder.name, self.builderclasses[builder.name].__module__))
|
builder.name, self.builderclasses[builder.name].__module__))
|
||||||
self.builderclasses[builder.name] = builder
|
self.builderclasses[builder.name] = builder
|
||||||
|
|
||||||
def add_config_value(self, name, default, rebuild):
|
def add_config_value(self, name, default, rebuild):
|
||||||
|
@ -503,7 +503,8 @@ class BuildEnvironment:
|
|||||||
"""Custom decoding error handler that warns and replaces."""
|
"""Custom decoding error handler that warns and replaces."""
|
||||||
linestart = error.object.rfind(b'\n', 0, error.start)
|
linestart = error.object.rfind(b'\n', 0, error.start)
|
||||||
lineend = error.object.find(b'\n', error.start)
|
lineend = error.object.find(b'\n', error.start)
|
||||||
if lineend == -1: lineend = len(error.object)
|
if lineend == -1:
|
||||||
|
lineend = len(error.object)
|
||||||
lineno = error.object.count(b'\n', 0, error.start) + 1
|
lineno = error.object.count(b'\n', 0, error.start) + 1
|
||||||
self.warn(self.docname, 'undecodable source characters, '
|
self.warn(self.docname, 'undecodable source characters, '
|
||||||
'replacing with "?": %r' %
|
'replacing with "?": %r' %
|
||||||
|
@ -10,13 +10,16 @@
|
|||||||
"""
|
"""
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import sys, os, time, re
|
import re
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
from os import path
|
from os import path
|
||||||
from io import open
|
from io import open
|
||||||
|
|
||||||
TERM_ENCODING = getattr(sys.stdin, 'encoding', None)
|
TERM_ENCODING = getattr(sys.stdin, 'encoding', None)
|
||||||
|
|
||||||
#try to import readline, unix specific enhancement
|
# try to import readline, unix specific enhancement
|
||||||
try:
|
try:
|
||||||
import readline
|
import readline
|
||||||
if readline.__doc__ and 'libedit' in readline.__doc__:
|
if readline.__doc__ and 'libedit' in readline.__doc__:
|
||||||
@ -33,7 +36,7 @@ from docutils.utils import column_width
|
|||||||
from sphinx import __version__
|
from sphinx import __version__
|
||||||
from sphinx.util.osutil import make_filename
|
from sphinx.util.osutil import make_filename
|
||||||
from sphinx.util.console import purple, bold, red, turquoise, \
|
from sphinx.util.console import purple, bold, red, turquoise, \
|
||||||
nocolor, color_terminal
|
nocolor, color_terminal
|
||||||
from sphinx.util import texescape
|
from sphinx.util import texescape
|
||||||
|
|
||||||
# function to get input from terminal -- overridden by the test suite
|
# function to get input from terminal -- overridden by the test suite
|
||||||
@ -972,17 +975,20 @@ def mkdir_p(dir):
|
|||||||
class ValidationError(Exception):
|
class ValidationError(Exception):
|
||||||
"""Raised for validation errors."""
|
"""Raised for validation errors."""
|
||||||
|
|
||||||
|
|
||||||
def is_path(x):
|
def is_path(x):
|
||||||
x = path.expanduser(x)
|
x = path.expanduser(x)
|
||||||
if path.exists(x) and not path.isdir(x):
|
if path.exists(x) and not path.isdir(x):
|
||||||
raise ValidationError("Please enter a valid path name.")
|
raise ValidationError("Please enter a valid path name.")
|
||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
||||||
def nonempty(x):
|
def nonempty(x):
|
||||||
if not x:
|
if not x:
|
||||||
raise ValidationError("Please enter some text.")
|
raise ValidationError("Please enter some text.")
|
||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
||||||
def choice(*l):
|
def choice(*l):
|
||||||
def val(x):
|
def val(x):
|
||||||
if x not in l:
|
if x not in l:
|
||||||
@ -990,17 +996,20 @@ def choice(*l):
|
|||||||
return x
|
return x
|
||||||
return val
|
return val
|
||||||
|
|
||||||
|
|
||||||
def boolean(x):
|
def boolean(x):
|
||||||
if x.upper() not in ('Y', 'YES', 'N', 'NO'):
|
if x.upper() not in ('Y', 'YES', 'N', 'NO'):
|
||||||
raise ValidationError("Please enter either 'y' or 'n'.")
|
raise ValidationError("Please enter either 'y' or 'n'.")
|
||||||
return x.upper() in ('Y', 'YES')
|
return x.upper() in ('Y', 'YES')
|
||||||
|
|
||||||
|
|
||||||
def suffix(x):
|
def suffix(x):
|
||||||
if not (x[0:1] == '.' and len(x) > 1):
|
if not (x[0:1] == '.' and len(x) > 1):
|
||||||
raise ValidationError("Please enter a file suffix, "
|
raise ValidationError("Please enter a file suffix, "
|
||||||
"e.g. '.rst' or '.txt'.")
|
"e.g. '.rst' or '.txt'.")
|
||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
||||||
def ok(x):
|
def ok(x):
|
||||||
return x
|
return x
|
||||||
|
|
||||||
@ -1097,7 +1106,7 @@ Enter the root path for documentation.''')
|
|||||||
do_prompt(d, 'path', 'Root path for the documentation', '.', is_path)
|
do_prompt(d, 'path', 'Root path for the documentation', '.', is_path)
|
||||||
|
|
||||||
while path.isfile(path.join(d['path'], 'conf.py')) or \
|
while path.isfile(path.join(d['path'], 'conf.py')) or \
|
||||||
path.isfile(path.join(d['path'], 'source', 'conf.py')):
|
path.isfile(path.join(d['path'], 'source', 'conf.py')):
|
||||||
print()
|
print()
|
||||||
print(bold('Error: an existing conf.py has been found in the '
|
print(bold('Error: an existing conf.py has been found in the '
|
||||||
'selected root path.'))
|
'selected root path.'))
|
||||||
@ -1169,7 +1178,7 @@ document is a custom template, you can also set this to another filename.''')
|
|||||||
'index')
|
'index')
|
||||||
|
|
||||||
while path.isfile(path.join(d['path'], d['master']+d['suffix'])) or \
|
while path.isfile(path.join(d['path'], d['master']+d['suffix'])) or \
|
||||||
path.isfile(path.join(d['path'], 'source', d['master']+d['suffix'])):
|
path.isfile(path.join(d['path'], 'source', d['master']+d['suffix'])):
|
||||||
print()
|
print()
|
||||||
print(bold('Error: the master file %s has already been found in the '
|
print(bold('Error: the master file %s has already been found in the '
|
||||||
'selected root path.' % (d['master']+d['suffix'])))
|
'selected root path.' % (d['master']+d['suffix'])))
|
||||||
@ -1256,10 +1265,10 @@ def generate(d, overwrite=True, silent=False):
|
|||||||
d['extensions'] = extensions
|
d['extensions'] = extensions
|
||||||
d['copyright'] = time.strftime('%Y') + ', ' + d['author']
|
d['copyright'] = time.strftime('%Y') + ', ' + d['author']
|
||||||
d['author_texescaped'] = text_type(d['author']).\
|
d['author_texescaped'] = text_type(d['author']).\
|
||||||
translate(texescape.tex_escape_map)
|
translate(texescape.tex_escape_map)
|
||||||
d['project_doc'] = d['project'] + ' Documentation'
|
d['project_doc'] = d['project'] + ' Documentation'
|
||||||
d['project_doc_texescaped'] = text_type(d['project'] + ' Documentation').\
|
d['project_doc_texescaped'] = text_type(d['project'] + ' Documentation').\
|
||||||
translate(texescape.tex_escape_map)
|
translate(texescape.tex_escape_map)
|
||||||
|
|
||||||
# escape backslashes and single quotes in strings that are put into
|
# escape backslashes and single quotes in strings that are put into
|
||||||
# a Python string literal
|
# a Python string literal
|
||||||
|
@ -236,6 +236,7 @@ def indexmarkup_role(typ, rawtext, text, lineno, inliner,
|
|||||||
|
|
||||||
_amp_re = re.compile(r'(?<!&)&(?![&\s])')
|
_amp_re = re.compile(r'(?<!&)&(?![&\s])')
|
||||||
|
|
||||||
|
|
||||||
def menusel_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
def menusel_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
||||||
text = utils.unescape(text)
|
text = utils.unescape(text)
|
||||||
if typ == 'menuselection':
|
if typ == 'menuselection':
|
||||||
@ -264,6 +265,7 @@ def menusel_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
|||||||
|
|
||||||
_litvar_re = re.compile('{([^}]+)}')
|
_litvar_re = re.compile('{([^}]+)}')
|
||||||
|
|
||||||
|
|
||||||
def emph_literal_role(typ, rawtext, text, lineno, inliner,
|
def emph_literal_role(typ, rawtext, text, lineno, inliner,
|
||||||
options={}, content=[]):
|
options={}, content=[]):
|
||||||
text = utils.unescape(text)
|
text = utils.unescape(text)
|
||||||
@ -282,6 +284,7 @@ def emph_literal_role(typ, rawtext, text, lineno, inliner,
|
|||||||
|
|
||||||
_abbr_re = re.compile('\((.*)\)$', re.S)
|
_abbr_re = re.compile('\((.*)\)$', re.S)
|
||||||
|
|
||||||
|
|
||||||
def abbr_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
def abbr_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
||||||
text = utils.unescape(text)
|
text = utils.unescape(text)
|
||||||
m = _abbr_re.search(text)
|
m = _abbr_re.search(text)
|
||||||
|
@ -30,6 +30,7 @@ from sphinx.errors import ThemeError
|
|||||||
NODEFAULT = object()
|
NODEFAULT = object()
|
||||||
THEMECONF = 'theme.conf'
|
THEMECONF = 'theme.conf'
|
||||||
|
|
||||||
|
|
||||||
class Theme(object):
|
class Theme(object):
|
||||||
"""
|
"""
|
||||||
Represents the theme chosen in the configuration.
|
Represents the theme chosen in the configuration.
|
||||||
@ -94,7 +95,8 @@ class Theme(object):
|
|||||||
self.themedir = tempfile.mkdtemp('sxt')
|
self.themedir = tempfile.mkdtemp('sxt')
|
||||||
self.themedir_created = True
|
self.themedir_created = True
|
||||||
for name in tinfo.namelist():
|
for name in tinfo.namelist():
|
||||||
if name.endswith('/'): continue
|
if name.endswith('/'):
|
||||||
|
continue
|
||||||
dirname = path.dirname(name)
|
dirname = path.dirname(name)
|
||||||
if not path.isdir(path.join(self.themedir, dirname)):
|
if not path.isdir(path.join(self.themedir, dirname)):
|
||||||
os.makedirs(path.join(self.themedir, dirname))
|
os.makedirs(path.join(self.themedir, dirname))
|
||||||
|
@ -34,6 +34,7 @@ default_substitutions = set([
|
|||||||
'today',
|
'today',
|
||||||
])
|
])
|
||||||
|
|
||||||
|
|
||||||
class DefaultSubstitutions(Transform):
|
class DefaultSubstitutions(Transform):
|
||||||
"""
|
"""
|
||||||
Replace some substitutions if they aren't defined in the document.
|
Replace some substitutions if they aren't defined in the document.
|
||||||
@ -69,9 +70,9 @@ class MoveModuleTargets(Transform):
|
|||||||
if not node['ids']:
|
if not node['ids']:
|
||||||
continue
|
continue
|
||||||
if ('ismod' in node and
|
if ('ismod' in node and
|
||||||
node.parent.__class__ is nodes.section and
|
node.parent.__class__ is nodes.section and
|
||||||
# index 0 is the section title node
|
# index 0 is the section title node
|
||||||
node.parent.index(node) == 1):
|
node.parent.index(node) == 1):
|
||||||
node.parent['ids'][0:0] = node['ids']
|
node.parent['ids'][0:0] = node['ids']
|
||||||
node.parent.remove(node)
|
node.parent.remove(node)
|
||||||
|
|
||||||
@ -86,10 +87,10 @@ class HandleCodeBlocks(Transform):
|
|||||||
# move doctest blocks out of blockquotes
|
# move doctest blocks out of blockquotes
|
||||||
for node in self.document.traverse(nodes.block_quote):
|
for node in self.document.traverse(nodes.block_quote):
|
||||||
if all(isinstance(child, nodes.doctest_block) for child
|
if all(isinstance(child, nodes.doctest_block) for child
|
||||||
in node.children):
|
in node.children):
|
||||||
node.replace_self(node.children)
|
node.replace_self(node.children)
|
||||||
# combine successive doctest blocks
|
# combine successive doctest blocks
|
||||||
#for node in self.document.traverse(nodes.doctest_block):
|
# for node in self.document.traverse(nodes.doctest_block):
|
||||||
# if node not in node.parent.children:
|
# if node not in node.parent.children:
|
||||||
# continue
|
# continue
|
||||||
# parindex = node.parent.index(node)
|
# parindex = node.parent.index(node)
|
||||||
@ -173,7 +174,7 @@ class Locale(Transform):
|
|||||||
|
|
||||||
parser = RSTParser()
|
parser = RSTParser()
|
||||||
|
|
||||||
#phase1: replace reference ids with translated names
|
# phase1: replace reference ids with translated names
|
||||||
for node, msg in extract_messages(self.document):
|
for node, msg in extract_messages(self.document):
|
||||||
msgstr = catalog.gettext(msg)
|
msgstr = catalog.gettext(msg)
|
||||||
# XXX add marker to untranslated parts
|
# XXX add marker to untranslated parts
|
||||||
@ -198,7 +199,7 @@ class Locale(Transform):
|
|||||||
pass
|
pass
|
||||||
# XXX doctest and other block markup
|
# XXX doctest and other block markup
|
||||||
if not isinstance(patch, nodes.paragraph):
|
if not isinstance(patch, nodes.paragraph):
|
||||||
continue # skip for now
|
continue # skip for now
|
||||||
|
|
||||||
processed = False # skip flag
|
processed = False # skip flag
|
||||||
|
|
||||||
@ -281,15 +282,14 @@ class Locale(Transform):
|
|||||||
node.children = patch.children
|
node.children = patch.children
|
||||||
node['translated'] = True
|
node['translated'] = True
|
||||||
|
|
||||||
|
# phase2: translation
|
||||||
#phase2: translation
|
|
||||||
for node, msg in extract_messages(self.document):
|
for node, msg in extract_messages(self.document):
|
||||||
if node.get('translated', False):
|
if node.get('translated', False):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
msgstr = catalog.gettext(msg)
|
msgstr = catalog.gettext(msg)
|
||||||
# XXX add marker to untranslated parts
|
# XXX add marker to untranslated parts
|
||||||
if not msgstr or msgstr == msg: # as-of-yet untranslated
|
if not msgstr or msgstr == msg: # as-of-yet untranslated
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Avoid "Literal block expected; none found." warnings.
|
# Avoid "Literal block expected; none found." warnings.
|
||||||
@ -309,12 +309,13 @@ class Locale(Transform):
|
|||||||
pass
|
pass
|
||||||
# XXX doctest and other block markup
|
# XXX doctest and other block markup
|
||||||
if not isinstance(patch, nodes.paragraph):
|
if not isinstance(patch, nodes.paragraph):
|
||||||
continue # skip for now
|
continue # skip for now
|
||||||
|
|
||||||
# auto-numbered foot note reference should use original 'ids'.
|
# auto-numbered foot note reference should use original 'ids'.
|
||||||
def is_autonumber_footnote_ref(node):
|
def is_autonumber_footnote_ref(node):
|
||||||
return isinstance(node, nodes.footnote_reference) and \
|
return isinstance(node, nodes.footnote_reference) and \
|
||||||
node.get('auto') == 1
|
node.get('auto') == 1
|
||||||
|
|
||||||
def list_replace_or_append(lst, old, new):
|
def list_replace_or_append(lst, old, new):
|
||||||
if old in lst:
|
if old in lst:
|
||||||
lst[lst.index(old)] = new
|
lst[lst.index(old)] = new
|
||||||
@ -339,7 +340,7 @@ class Locale(Transform):
|
|||||||
for id in new['ids']:
|
for id in new['ids']:
|
||||||
self.document.ids[id] = new
|
self.document.ids[id] = new
|
||||||
list_replace_or_append(
|
list_replace_or_append(
|
||||||
self.document.autofootnote_refs, old, new)
|
self.document.autofootnote_refs, old, new)
|
||||||
if refname:
|
if refname:
|
||||||
list_replace_or_append(
|
list_replace_or_append(
|
||||||
self.document.footnote_refs.setdefault(refname, []),
|
self.document.footnote_refs.setdefault(refname, []),
|
||||||
@ -404,6 +405,7 @@ class Locale(Transform):
|
|||||||
if len(old_refs) != len(new_refs):
|
if len(old_refs) != len(new_refs):
|
||||||
env.warn_node('inconsistent term references in '
|
env.warn_node('inconsistent term references in '
|
||||||
'translated message', node)
|
'translated message', node)
|
||||||
|
|
||||||
def get_ref_key(node):
|
def get_ref_key(node):
|
||||||
case = node["refdomain"], node["reftype"]
|
case = node["refdomain"], node["reftype"]
|
||||||
if case == ('std', 'term'):
|
if case == ('std', 'term'):
|
||||||
|
Loading…
Reference in New Issue
Block a user