diff --git a/sphinx/apidoc.py b/sphinx/apidoc.py index ca1420980..36b349eb1 100644 --- a/sphinx/apidoc.py +++ b/sphinx/apidoc.py @@ -91,7 +91,7 @@ def create_package_file(root, master_package, subroot, py_files, opts, subs): master_package) text += '\n' - # build a list of directories that are packages (they contain an INITPY file) + # build a list of directories that are packages (contain an INITPY file) subs = [sub for sub in subs if path.isfile(path.join(root, sub, INITPY))] # if there are some package directories, add a TOC for theses subpackages if subs: diff --git a/sphinx/ext/autodoc.py b/sphinx/ext/autodoc.py index bf3fa79b2..e00f6fd2c 100644 --- a/sphinx/ext/autodoc.py +++ b/sphinx/ext/autodoc.py @@ -427,7 +427,8 @@ class Documenter(object): if isinstance(docstring, unicode): return [prepare_docstring(docstring, ignore)] elif docstring: - return [prepare_docstring(force_decode(docstring, encoding), ignore)] + return [prepare_docstring(force_decode(docstring, encoding), + ignore)] return [] def process_doc(self, docstrings): diff --git a/sphinx/ext/doctest.py b/sphinx/ext/doctest.py index 4bcda5c55..2952388fe 100644 --- a/sphinx/ext/doctest.py +++ b/sphinx/ext/doctest.py @@ -335,7 +335,8 @@ Doctest summary self.total_failures += res_f self.total_tries += res_t if self.cleanup_runner.tries: - res_f, res_t = self.cleanup_runner.summarize(self._out, verbose=True) + res_f, res_t = self.cleanup_runner.summarize(self._out, + verbose=True) self.cleanup_failures += res_f self.cleanup_tries += res_t diff --git a/sphinx/search/en.py b/sphinx/search/en.py index 16e93e891..c342f39cd 100644 --- a/sphinx/search/en.py +++ b/sphinx/search/en.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """ - sphinx.search_languages.en - ~~~~~~~~~~~~~~~~~~~~~~~~~~ + sphinx.search.en + ~~~~~~~~~~~~~~~~ English search language: includes the JS porter stemmer. @@ -151,7 +151,8 @@ var Stemmer = function() { } // Step 2 - re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|\ +ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; if (re.test(w)) { var fp = re.exec(w); stem = fp[1]; @@ -173,7 +174,8 @@ var Stemmer = function() { } // Step 4 - re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|\ +iti|ous|ive|ize)$/; re2 = /^(.+?)(s|t)(ion)$/; if (re.test(w)) { var fp = re.exec(w); diff --git a/sphinx/search/ja.py b/sphinx/search/ja.py index 18d21bf01..0a7d83a1c 100644 --- a/sphinx/search/ja.py +++ b/sphinx/search/ja.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """ - sphinx.search_languages.ja - ~~~~~~~~~~~~~~~~~~~~~~~~~~ + sphinx.search.ja + ~~~~~~~~~~~~~~~~ Japanese search language: includes routine to split words. diff --git a/sphinx/themes/basic/genindex-single.html b/sphinx/themes/basic/genindex-single.html index fedd3ea0e..eff8c1ce2 100644 --- a/sphinx/themes/basic/genindex-single.html +++ b/sphinx/themes/basic/genindex-single.html @@ -10,7 +10,7 @@ {% macro indexentries(firstname, links) %}
{%- if links -%} - + {%- if links[0][0] %}{% endif -%} {{ firstname|e }} {%- if links[0][0] %}{% endif -%} diff --git a/sphinx/themes/basic/genindex.html b/sphinx/themes/basic/genindex.html index 0cabdc182..7bc002b6c 100644 --- a/sphinx/themes/basic/genindex.html +++ b/sphinx/themes/basic/genindex.html @@ -10,7 +10,7 @@ {% macro indexentries(firstname, links) %}
{%- if links -%} - + {%- if links[0][0] %}{% endif -%} {{ firstname|e }} {%- if links[0][0] %}{% endif -%} diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py index 43dad160c..ccd7a8bf2 100644 --- a/sphinx/writers/latex.py +++ b/sphinx/writers/latex.py @@ -1080,9 +1080,11 @@ class LaTeXTranslator(nodes.NodeVisitor): self.body.append(r'\index{%s!%s%s}\index{%s!%s%s}' % (p1, p2, m, p2, p1, m)) elif type == 'triple': - p1, p2, p3 = map(self.encode, split_into(3, 'triple', string)) + p1, p2, p3 = map(self.encode, + split_into(3, 'triple', string)) self.body.append( - r'\index{%s!%s %s%s}\index{%s!%s, %s%s}\index{%s!%s %s%s}' % + r'\index{%s!%s %s%s}\index{%s!%s, %s%s}' + r'\index{%s!%s %s%s}' % (p1, p2, p3, m, p2, p3, p1, m, p3, p1, p2, m)) elif type == 'see': p1, p2 = map(self.encode, split_into(2, 'see', string)) @@ -1091,7 +1093,8 @@ class LaTeXTranslator(nodes.NodeVisitor): p1, p2 = map(self.encode, split_into(2, 'seealso', string)) self.body.append(r'\index{%s|see{%s}}' % (p1, p2)) else: - self.builder.warn('unknown index entry type %s found' % type) + self.builder.warn( + 'unknown index entry type %s found' % type) except ValueError, err: self.builder.warn(str(err)) raise nodes.SkipNode