mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Improve static typing strictness (#10569)
This commit is contained in:
parent
a340427ba4
commit
a504ac6100
@ -943,7 +943,7 @@ class Sphinx:
|
||||
"""
|
||||
self.registry.add_post_transform(transform)
|
||||
|
||||
def add_js_file(self, filename: str, priority: int = 500,
|
||||
def add_js_file(self, filename: Optional[str], priority: int = 500,
|
||||
loading_method: Optional[str] = None, **kwargs: Any) -> None:
|
||||
"""Register a JavaScript file to include in the HTML output.
|
||||
|
||||
|
@ -301,7 +301,9 @@ class Builder:
|
||||
summary=__('targets for %d source files that are out of date') %
|
||||
len(to_build))
|
||||
|
||||
def build(self, docnames: Iterable[str], summary: str = None, method: str = 'update') -> None: # NOQA
|
||||
def build(
|
||||
self, docnames: Iterable[str], summary: Optional[str] = None, method: str = 'update'
|
||||
) -> None:
|
||||
"""Main build method.
|
||||
|
||||
First updates the environment, and then calls :meth:`write`.
|
||||
|
@ -4,7 +4,7 @@ import html
|
||||
import os
|
||||
import re
|
||||
from os import path
|
||||
from typing import Any, Dict, List, NamedTuple, Set, Tuple
|
||||
from typing import Any, Dict, List, NamedTuple, Optional, Set, Tuple
|
||||
from zipfile import ZIP_DEFLATED, ZIP_STORED, ZipFile
|
||||
|
||||
from docutils import nodes
|
||||
@ -446,7 +446,7 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
pass
|
||||
|
||||
def handle_page(self, pagename: str, addctx: Dict, templatename: str = 'page.html',
|
||||
outfilename: str = None, event_arg: Any = None) -> None:
|
||||
outfilename: Optional[str] = None, event_arg: Any = None) -> None:
|
||||
"""Create a rendered page.
|
||||
|
||||
This method is overwritten for genindex pages in order to fix href link
|
||||
|
@ -127,7 +127,7 @@ class ChangesBuilder(Builder):
|
||||
with open(targetfn, 'w', encoding='utf-8') as f:
|
||||
text = ''.join(hl(i + 1, line) for (i, line) in enumerate(lines))
|
||||
ctx = {
|
||||
'filename': self.env.doc2path(docname, None),
|
||||
'filename': self.env.doc2path(docname, False),
|
||||
'text': text
|
||||
}
|
||||
f.write(self.templates.render('changes/rstsource.html', ctx))
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Directory HTML builders."""
|
||||
|
||||
from os import path
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from sphinx.application import Sphinx
|
||||
from sphinx.builders.html import StandaloneHTMLBuilder
|
||||
@ -19,7 +19,7 @@ class DirectoryHTMLBuilder(StandaloneHTMLBuilder):
|
||||
"""
|
||||
name = 'dirhtml'
|
||||
|
||||
def get_target_uri(self, docname: str, typ: str = None) -> str:
|
||||
def get_target_uri(self, docname: str, typ: Optional[str] = None) -> str:
|
||||
if docname == 'index':
|
||||
return ''
|
||||
if docname.endswith(SEP + 'index'):
|
||||
|
@ -1,6 +1,6 @@
|
||||
"""Do syntax checks, but no writing."""
|
||||
|
||||
from typing import Any, Dict, Set
|
||||
from typing import Any, Dict, Optional, Set
|
||||
|
||||
from docutils.nodes import Node
|
||||
|
||||
@ -21,7 +21,7 @@ class DummyBuilder(Builder):
|
||||
def get_outdated_docs(self) -> Set[str]:
|
||||
return self.env.found_docs
|
||||
|
||||
def get_target_uri(self, docname: str, typ: str = None) -> str:
|
||||
def get_target_uri(self, docname: str, typ: Optional[str] = None) -> str:
|
||||
return ''
|
||||
|
||||
def prepare_writing(self, docnames: Set[str]) -> None:
|
||||
|
@ -77,7 +77,7 @@ class Epub3Builder(_epub_base.EpubBuilder):
|
||||
self.build_toc()
|
||||
self.build_epub()
|
||||
|
||||
def content_metadata(self) -> Dict:
|
||||
def content_metadata(self) -> Dict[str, Any]:
|
||||
"""Create a dictionary with all metadata for the content.opf
|
||||
file properly escaped.
|
||||
"""
|
||||
@ -144,11 +144,11 @@ class Epub3Builder(_epub_base.EpubBuilder):
|
||||
|
||||
return navstack[0].children
|
||||
|
||||
def navigation_doc_metadata(self, navlist: List[NavPoint]) -> Dict:
|
||||
def navigation_doc_metadata(self, navlist: List[NavPoint]) -> Dict[str, Any]:
|
||||
"""Create a dictionary with all metadata for the nav.xhtml file
|
||||
properly escaped.
|
||||
"""
|
||||
metadata: Dict = {}
|
||||
metadata = {}
|
||||
metadata['lang'] = html.escape(self.config.epub_language)
|
||||
metadata['toc_locale'] = html.escape(self.guide_titles['toc'])
|
||||
metadata['navlist'] = navlist
|
||||
@ -217,7 +217,7 @@ def validate_config_values(app: Sphinx) -> None:
|
||||
|
||||
def convert_epub_css_files(app: Sphinx, config: Config) -> None:
|
||||
"""This converts string styled epub_css_files to tuple styled one."""
|
||||
epub_css_files: List[Tuple[str, Dict]] = []
|
||||
epub_css_files: List[Tuple[str, Dict[str, Any]]] = []
|
||||
for entry in config.epub_css_files:
|
||||
if isinstance(entry, str):
|
||||
epub_css_files.append((entry, {}))
|
||||
@ -236,7 +236,7 @@ def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
app.add_builder(Epub3Builder)
|
||||
|
||||
# config values
|
||||
app.add_config_value('epub_basename', lambda self: make_filename(self.project), None)
|
||||
app.add_config_value('epub_basename', lambda self: make_filename(self.project), False)
|
||||
app.add_config_value('epub_version', 3.0, 'epub') # experimental
|
||||
app.add_config_value('epub_theme', 'epub', 'epub')
|
||||
app.add_config_value('epub_theme_options', {}, 'epub')
|
||||
|
@ -5,7 +5,8 @@ from collections import OrderedDict, defaultdict
|
||||
from datetime import datetime, timedelta, tzinfo
|
||||
from os import getenv, path, walk
|
||||
from time import time
|
||||
from typing import Any, DefaultDict, Dict, Generator, Iterable, List, Set, Tuple, Union
|
||||
from typing import (Any, DefaultDict, Dict, Generator, Iterable, List, Optional, Set, Tuple,
|
||||
Union)
|
||||
from uuid import uuid4
|
||||
|
||||
from docutils import nodes
|
||||
@ -78,7 +79,9 @@ class MsgOrigin:
|
||||
|
||||
|
||||
class GettextRenderer(SphinxRenderer):
|
||||
def __init__(self, template_path: str = None, outdir: str = None) -> None:
|
||||
def __init__(
|
||||
self, template_path: Optional[str] = None, outdir: Optional[str] = None
|
||||
) -> None:
|
||||
self.outdir = outdir
|
||||
if template_path is None:
|
||||
template_path = path.join(package_dir, 'templates', 'gettext')
|
||||
@ -93,7 +96,7 @@ class GettextRenderer(SphinxRenderer):
|
||||
self.env.filters['e'] = escape
|
||||
self.env.filters['escape'] = escape
|
||||
|
||||
def render(self, filename: str, context: Dict) -> str:
|
||||
def render(self, filename: str, context: Dict[str, Any]) -> str:
|
||||
def _relpath(s: str) -> str:
|
||||
return canon_path(relpath(s, self.outdir))
|
||||
|
||||
@ -127,7 +130,7 @@ class I18nBuilder(Builder):
|
||||
self.tags = I18nTags()
|
||||
self.catalogs: DefaultDict[str, Catalog] = defaultdict(Catalog)
|
||||
|
||||
def get_target_uri(self, docname: str, typ: str = None) -> str:
|
||||
def get_target_uri(self, docname: str, typ: Optional[str] = None) -> str:
|
||||
return ''
|
||||
|
||||
def get_outdated_docs(self) -> Set[str]:
|
||||
@ -179,10 +182,10 @@ class LocalTimeZone(tzinfo):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.tzdelta = tzdelta
|
||||
|
||||
def utcoffset(self, dt: datetime) -> timedelta:
|
||||
def utcoffset(self, dt: Optional[datetime]) -> timedelta:
|
||||
return self.tzdelta
|
||||
|
||||
def dst(self, dt: datetime) -> timedelta:
|
||||
def dst(self, dt: Optional[datetime]) -> timedelta:
|
||||
return timedelta(0)
|
||||
|
||||
|
||||
@ -249,7 +252,9 @@ class MessageCatalogBuilder(I18nBuilder):
|
||||
except Exception as exc:
|
||||
raise ThemeError('%s: %r' % (template, exc)) from exc
|
||||
|
||||
def build(self, docnames: Iterable[str], summary: str = None, method: str = 'update') -> None: # NOQA
|
||||
def build(
|
||||
self, docnames: Iterable[str], summary: Optional[str] = None, method: str = 'update'
|
||||
) -> None:
|
||||
self._extract_from_template()
|
||||
super().build(docnames, summary, method)
|
||||
|
||||
|
@ -823,7 +823,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
if jsfile:
|
||||
copyfile(jsfile, path.join(self.outdir, '_static', '_stemmer.js'))
|
||||
|
||||
def copy_theme_static_files(self, context: Dict) -> None:
|
||||
def copy_theme_static_files(self, context: Dict[str, Any]) -> None:
|
||||
def onerror(filename: str, error: Exception) -> None:
|
||||
logger.warning(__('Failed to copy a file in html_static_file: %s: %r'),
|
||||
filename, error)
|
||||
|
@ -4,6 +4,7 @@ import json
|
||||
import re
|
||||
import socket
|
||||
import time
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
from email.utils import parsedate_to_datetime
|
||||
from html.parser import HTMLParser
|
||||
@ -117,7 +118,7 @@ class CheckExternalLinksBuilder(DummyBuilder):
|
||||
socket.setdefaulttimeout(5.0)
|
||||
|
||||
def process_result(self, result: CheckResult) -> None:
|
||||
filename = self.env.doc2path(result.docname, None)
|
||||
filename = self.env.doc2path(result.docname, False)
|
||||
|
||||
linkstat = {"filename": filename, "lineno": result.lineno,
|
||||
"status": result.status, "code": result.code, "uri": result.uri,
|
||||
@ -202,7 +203,7 @@ class HyperlinkAvailabilityChecker:
|
||||
self.rate_limits: Dict[str, RateLimit] = {}
|
||||
self.rqueue: Queue = Queue()
|
||||
self.workers: List[Thread] = []
|
||||
self.wqueue: PriorityQueue = PriorityQueue()
|
||||
self.wqueue: PriorityQueue[CheckRequest] = PriorityQueue()
|
||||
|
||||
self.to_ignore = [re.compile(x) for x in self.config.linkcheck_ignore]
|
||||
|
||||
@ -267,7 +268,7 @@ class HyperlinkAvailabilityCheckWorker(Thread):
|
||||
if self.config.linkcheck_timeout:
|
||||
kwargs['timeout'] = self.config.linkcheck_timeout
|
||||
|
||||
def get_request_headers() -> Dict:
|
||||
def get_request_headers() -> Dict[str, str]:
|
||||
url = urlparse(uri)
|
||||
candidates = ["%s://%s" % (url.scheme, url.netloc),
|
||||
"%s://%s/" % (url.scheme, url.netloc),
|
||||
@ -276,7 +277,7 @@ class HyperlinkAvailabilityCheckWorker(Thread):
|
||||
|
||||
for u in candidates:
|
||||
if u in self.config.linkcheck_request_headers:
|
||||
headers = dict(DEFAULT_REQUEST_HEADERS)
|
||||
headers = deepcopy(DEFAULT_REQUEST_HEADERS)
|
||||
headers.update(self.config.linkcheck_request_headers[u])
|
||||
return headers
|
||||
|
||||
@ -561,19 +562,19 @@ def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
app.add_builder(CheckExternalLinksBuilder)
|
||||
app.add_post_transform(HyperlinkCollector)
|
||||
|
||||
app.add_config_value('linkcheck_ignore', [], None)
|
||||
app.add_config_value('linkcheck_exclude_documents', [], None)
|
||||
app.add_config_value('linkcheck_allowed_redirects', {}, None)
|
||||
app.add_config_value('linkcheck_auth', [], None)
|
||||
app.add_config_value('linkcheck_request_headers', {}, None)
|
||||
app.add_config_value('linkcheck_retries', 1, None)
|
||||
app.add_config_value('linkcheck_timeout', None, None, [int])
|
||||
app.add_config_value('linkcheck_workers', 5, None)
|
||||
app.add_config_value('linkcheck_anchors', True, None)
|
||||
app.add_config_value('linkcheck_ignore', [], False)
|
||||
app.add_config_value('linkcheck_exclude_documents', [], False)
|
||||
app.add_config_value('linkcheck_allowed_redirects', {}, False)
|
||||
app.add_config_value('linkcheck_auth', [], False)
|
||||
app.add_config_value('linkcheck_request_headers', {}, False)
|
||||
app.add_config_value('linkcheck_retries', 1, False)
|
||||
app.add_config_value('linkcheck_timeout', None, False, [int])
|
||||
app.add_config_value('linkcheck_workers', 5, False)
|
||||
app.add_config_value('linkcheck_anchors', True, False)
|
||||
# Anchors starting with ! are ignored since they are
|
||||
# commonly used for dynamic pages
|
||||
app.add_config_value('linkcheck_anchors_ignore', ["^!"], None)
|
||||
app.add_config_value('linkcheck_rate_limit_timeout', 300.0, None)
|
||||
app.add_config_value('linkcheck_anchors_ignore', ["^!"], False)
|
||||
app.add_config_value('linkcheck_rate_limit_timeout', 300.0, False)
|
||||
|
||||
app.add_event('linkcheck-process-uri')
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
import warnings
|
||||
from os import path
|
||||
from typing import Any, Dict, List, Set, Tuple, Union
|
||||
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
||||
|
||||
from docutils.frontend import OptionParser
|
||||
from docutils.io import FileOutput
|
||||
@ -40,7 +40,7 @@ class ManualPageBuilder(Builder):
|
||||
def get_outdated_docs(self) -> Union[str, List[str]]:
|
||||
return 'all manpages' # for now
|
||||
|
||||
def get_target_uri(self, docname: str, typ: str = None) -> str:
|
||||
def get_target_uri(self, docname: str, typ: Optional[str] = None) -> str:
|
||||
return ''
|
||||
|
||||
@progress_message(__('writing'))
|
||||
@ -111,9 +111,9 @@ def default_man_pages(config: Config) -> List[Tuple[str, str, str, List[str], in
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
app.add_builder(ManualPageBuilder)
|
||||
|
||||
app.add_config_value('man_pages', default_man_pages, None)
|
||||
app.add_config_value('man_show_urls', False, None)
|
||||
app.add_config_value('man_make_section_directory', False, None)
|
||||
app.add_config_value('man_pages', default_man_pages, False)
|
||||
app.add_config_value('man_show_urls', False, False)
|
||||
app.add_config_value('man_make_section_directory', False, False)
|
||||
|
||||
return {
|
||||
'version': 'builtin',
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Single HTML builders."""
|
||||
|
||||
from os import path
|
||||
from typing import Any, Dict, List, Tuple, Union
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Node
|
||||
@ -27,10 +27,10 @@ class SingleFileHTMLBuilder(StandaloneHTMLBuilder):
|
||||
|
||||
copysource = False
|
||||
|
||||
def get_outdated_docs(self) -> Union[str, List[str]]: # type: ignore
|
||||
def get_outdated_docs(self) -> Union[str, List[str]]: # type: ignore[override]
|
||||
return 'all documents'
|
||||
|
||||
def get_target_uri(self, docname: str, typ: str = None) -> str:
|
||||
def get_target_uri(self, docname: str, typ: Optional[str] = None) -> str:
|
||||
if docname in self.env.all_docs:
|
||||
# all references are on the same page...
|
||||
return self.config.root_doc + self.out_suffix + \
|
||||
@ -39,7 +39,7 @@ class SingleFileHTMLBuilder(StandaloneHTMLBuilder):
|
||||
# chances are this is a html_additional_page
|
||||
return docname + self.out_suffix
|
||||
|
||||
def get_relative_uri(self, from_: str, to: str, typ: str = None) -> str:
|
||||
def get_relative_uri(self, from_: str, to: str, typ: Optional[str] = None) -> str:
|
||||
# ignore source
|
||||
return self.get_target_uri(to, typ)
|
||||
|
||||
@ -113,7 +113,7 @@ class SingleFileHTMLBuilder(StandaloneHTMLBuilder):
|
||||
|
||||
return {self.config.root_doc: new_fignumbers}
|
||||
|
||||
def get_doc_context(self, docname: str, body: str, metatags: str) -> Dict:
|
||||
def get_doc_context(self, docname: str, body: str, metatags: str) -> Dict[str, Any]:
|
||||
# no relation links...
|
||||
toctree = TocTree(self.env).get_toctree_for(self.config.root_doc, self, False)
|
||||
# if there is no toctree, toc is None
|
||||
|
@ -3,7 +3,7 @@
|
||||
import os
|
||||
import warnings
|
||||
from os import path
|
||||
from typing import Any, Dict, Iterable, List, Tuple, Union
|
||||
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.frontend import OptionParser
|
||||
@ -52,13 +52,13 @@ class TexinfoBuilder(Builder):
|
||||
def get_outdated_docs(self) -> Union[str, List[str]]:
|
||||
return 'all documents' # for now
|
||||
|
||||
def get_target_uri(self, docname: str, typ: str = None) -> str:
|
||||
def get_target_uri(self, docname: str, typ: Optional[str] = None) -> str:
|
||||
if docname not in self.docnames:
|
||||
raise NoUri(docname, typ)
|
||||
else:
|
||||
return '%' + docname
|
||||
|
||||
def get_relative_uri(self, from_: str, to: str, typ: str = None) -> str:
|
||||
def get_relative_uri(self, from_: str, to: str, typ: Optional[str] = None) -> str:
|
||||
# ignore source path
|
||||
return self.get_target_uri(to, typ)
|
||||
|
||||
@ -202,13 +202,13 @@ def default_texinfo_documents(config: Config) -> List[Tuple[str, str, str, str,
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
app.add_builder(TexinfoBuilder)
|
||||
|
||||
app.add_config_value('texinfo_documents', default_texinfo_documents, None)
|
||||
app.add_config_value('texinfo_appendices', [], None)
|
||||
app.add_config_value('texinfo_elements', {}, None)
|
||||
app.add_config_value('texinfo_domain_indices', True, None, [list])
|
||||
app.add_config_value('texinfo_show_urls', 'footnote', None)
|
||||
app.add_config_value('texinfo_no_detailmenu', False, None)
|
||||
app.add_config_value('texinfo_cross_references', True, None)
|
||||
app.add_config_value('texinfo_documents', default_texinfo_documents, False)
|
||||
app.add_config_value('texinfo_appendices', [], False)
|
||||
app.add_config_value('texinfo_elements', {}, False)
|
||||
app.add_config_value('texinfo_domain_indices', True, False, [list])
|
||||
app.add_config_value('texinfo_show_urls', 'footnote', False)
|
||||
app.add_config_value('texinfo_no_detailmenu', False, False)
|
||||
app.add_config_value('texinfo_cross_references', True, False)
|
||||
|
||||
return {
|
||||
'version': 'builtin',
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Plain-text Sphinx builder."""
|
||||
|
||||
from os import path
|
||||
from typing import Any, Dict, Iterator, Set, Tuple
|
||||
from typing import Any, Dict, Iterator, Optional, Set, Tuple
|
||||
|
||||
from docutils.io import StringOutput
|
||||
from docutils.nodes import Node
|
||||
@ -25,7 +25,7 @@ class TextBuilder(Builder):
|
||||
allow_parallel = True
|
||||
default_translator_class = TextTranslator
|
||||
|
||||
current_docname: str = None
|
||||
current_docname: Optional[str] = None
|
||||
|
||||
def init(self) -> None:
|
||||
# section numbers for headings in the currently visited document
|
||||
@ -49,7 +49,7 @@ class TextBuilder(Builder):
|
||||
# source doesn't exist anymore
|
||||
pass
|
||||
|
||||
def get_target_uri(self, docname: str, typ: str = None) -> str:
|
||||
def get_target_uri(self, docname: str, typ: Optional[str] = None) -> str:
|
||||
return ''
|
||||
|
||||
def prepare_writing(self, docnames: Set[str]) -> None:
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Docutils-native XML and pseudo-XML builders."""
|
||||
|
||||
from os import path
|
||||
from typing import Any, Dict, Iterator, Set, Type, Union
|
||||
from typing import Any, Dict, Iterator, Optional, Set, Type, Union
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.io import StringOutput
|
||||
@ -53,7 +53,7 @@ class XMLBuilder(Builder):
|
||||
# source doesn't exist anymore
|
||||
pass
|
||||
|
||||
def get_target_uri(self, docname: str, typ: str = None) -> str:
|
||||
def get_target_uri(self, docname: str, typ: Optional[str] = None) -> str:
|
||||
return docname
|
||||
|
||||
def prepare_writing(self, docnames: Set[str]) -> None:
|
||||
|
@ -183,7 +183,7 @@ class QuickstartRenderer(SphinxRenderer):
|
||||
return super().render(template_name, context)
|
||||
|
||||
|
||||
def ask_user(d: Dict) -> None:
|
||||
def ask_user(d: Dict[str, Any]) -> None:
|
||||
"""Ask the user for quickstart values missing from *d*.
|
||||
|
||||
Values are:
|
||||
|
@ -158,7 +158,9 @@ class Config:
|
||||
self.extensions: List[str] = config.get('extensions', [])
|
||||
|
||||
@classmethod
|
||||
def read(cls, confdir: str, overrides: Dict = None, tags: Tags = None) -> "Config":
|
||||
def read(
|
||||
cls, confdir: str, overrides: Optional[Dict] = None, tags: Optional[Tags] = None
|
||||
) -> "Config":
|
||||
"""Create a Config object from configuration file."""
|
||||
filename = path.join(confdir, CONFIG_FILENAME)
|
||||
if not path.isfile(filename):
|
||||
|
@ -1,7 +1,7 @@
|
||||
import sys
|
||||
import textwrap
|
||||
from difflib import unified_diff
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Tuple
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Element, Node
|
||||
@ -48,7 +48,9 @@ class Highlight(SphinxDirective):
|
||||
linenothreshold=linenothreshold)]
|
||||
|
||||
|
||||
def dedent_lines(lines: List[str], dedent: int, location: Tuple[str, int] = None) -> List[str]:
|
||||
def dedent_lines(
|
||||
lines: List[str], dedent: Optional[int], location: Optional[Tuple[str, int]] = None
|
||||
) -> List[str]:
|
||||
if dedent is None:
|
||||
return textwrap.dedent(''.join(lines)).splitlines(True)
|
||||
|
||||
@ -186,7 +188,7 @@ class LiteralIncludeReader:
|
||||
('diff', 'end-at'),
|
||||
]
|
||||
|
||||
def __init__(self, filename: str, options: Dict, config: Config) -> None:
|
||||
def __init__(self, filename: str, options: Dict[str, Any], config: Config) -> None:
|
||||
self.filename = filename
|
||||
self.options = options
|
||||
self.encoding = options.get('encoding', config.source_encoding)
|
||||
@ -200,7 +202,9 @@ class LiteralIncludeReader:
|
||||
raise ValueError(__('Cannot use both "%s" and "%s" options') %
|
||||
(option1, option2))
|
||||
|
||||
def read_file(self, filename: str, location: Tuple[str, int] = None) -> List[str]:
|
||||
def read_file(
|
||||
self, filename: str, location: Optional[Tuple[str, int]] = None
|
||||
) -> List[str]:
|
||||
try:
|
||||
with open(filename, encoding=self.encoding, errors='strict') as f:
|
||||
text = f.read()
|
||||
@ -216,7 +220,7 @@ class LiteralIncludeReader:
|
||||
'be wrong, try giving an :encoding: option') %
|
||||
(self.encoding, filename)) from exc
|
||||
|
||||
def read(self, location: Tuple[str, int] = None) -> Tuple[str, int]:
|
||||
def read(self, location: Optional[Tuple[str, int]] = None) -> Tuple[str, int]:
|
||||
if 'diff' in self.options:
|
||||
lines = self.show_diff()
|
||||
else:
|
||||
@ -233,14 +237,16 @@ class LiteralIncludeReader:
|
||||
|
||||
return ''.join(lines), len(lines)
|
||||
|
||||
def show_diff(self, location: Tuple[str, int] = None) -> List[str]:
|
||||
def show_diff(self, location: Optional[Tuple[str, int]] = None) -> List[str]:
|
||||
new_lines = self.read_file(self.filename)
|
||||
old_filename = self.options.get('diff')
|
||||
old_lines = self.read_file(old_filename)
|
||||
diff = unified_diff(old_lines, new_lines, old_filename, self.filename)
|
||||
return list(diff)
|
||||
|
||||
def pyobject_filter(self, lines: List[str], location: Tuple[str, int] = None) -> List[str]:
|
||||
def pyobject_filter(
|
||||
self, lines: List[str], location: Optional[Tuple[str, int]] = None
|
||||
) -> List[str]:
|
||||
pyobject = self.options.get('pyobject')
|
||||
if pyobject:
|
||||
from sphinx.pycode import ModuleAnalyzer
|
||||
@ -258,7 +264,9 @@ class LiteralIncludeReader:
|
||||
|
||||
return lines
|
||||
|
||||
def lines_filter(self, lines: List[str], location: Tuple[str, int] = None) -> List[str]:
|
||||
def lines_filter(
|
||||
self, lines: List[str], location: Optional[Tuple[str, int]] = None
|
||||
) -> List[str]:
|
||||
linespec = self.options.get('lines')
|
||||
if linespec:
|
||||
linelist = parselinenos(linespec, len(lines))
|
||||
@ -282,7 +290,9 @@ class LiteralIncludeReader:
|
||||
|
||||
return lines
|
||||
|
||||
def start_filter(self, lines: List[str], location: Tuple[str, int] = None) -> List[str]:
|
||||
def start_filter(
|
||||
self, lines: List[str], location: Optional[Tuple[str, int]] = None
|
||||
) -> List[str]:
|
||||
if 'start-at' in self.options:
|
||||
start = self.options.get('start-at')
|
||||
inclusive = False
|
||||
@ -313,7 +323,9 @@ class LiteralIncludeReader:
|
||||
|
||||
return lines
|
||||
|
||||
def end_filter(self, lines: List[str], location: Tuple[str, int] = None) -> List[str]:
|
||||
def end_filter(
|
||||
self, lines: List[str], location: Optional[Tuple[str, int]] = None
|
||||
) -> List[str]:
|
||||
if 'end-at' in self.options:
|
||||
end = self.options.get('end-at')
|
||||
inclusive = True
|
||||
@ -340,21 +352,27 @@ class LiteralIncludeReader:
|
||||
|
||||
return lines
|
||||
|
||||
def prepend_filter(self, lines: List[str], location: Tuple[str, int] = None) -> List[str]:
|
||||
def prepend_filter(
|
||||
self, lines: List[str], location: Optional[Tuple[str, int]] = None
|
||||
) -> List[str]:
|
||||
prepend = self.options.get('prepend')
|
||||
if prepend:
|
||||
lines.insert(0, prepend + '\n')
|
||||
|
||||
return lines
|
||||
|
||||
def append_filter(self, lines: List[str], location: Tuple[str, int] = None) -> List[str]:
|
||||
def append_filter(
|
||||
self, lines: List[str], location: Optional[Tuple[str, int]] = None
|
||||
) -> List[str]:
|
||||
append = self.options.get('append')
|
||||
if append:
|
||||
lines.append(append + '\n')
|
||||
|
||||
return lines
|
||||
|
||||
def dedent_filter(self, lines: List[str], location: Tuple[str, int] = None) -> List[str]:
|
||||
def dedent_filter(
|
||||
self, lines: List[str], location: Optional[Tuple[str, int]] = None
|
||||
) -> List[str]:
|
||||
if 'dedent' in self.options:
|
||||
return dedent_lines(lines, self.options.get('dedent'), location=location)
|
||||
else:
|
||||
|
@ -119,7 +119,7 @@ class TocTree(SphinxDirective):
|
||||
if url_re.match(ref) or ref == 'self':
|
||||
toctree['entries'].append((title, ref))
|
||||
elif docname not in self.env.found_docs:
|
||||
if excluded(self.env.doc2path(docname, None)):
|
||||
if excluded(self.env.doc2path(docname, False)):
|
||||
message = __('toctree contains reference to excluded document %r')
|
||||
subtype = 'excluded'
|
||||
else:
|
||||
|
@ -99,8 +99,8 @@ def parse_reftarget(reftarget: str, suppress_prefix: bool = False
|
||||
return reftype, reftarget, title, refspecific
|
||||
|
||||
|
||||
def type_to_xref(target: str, env: BuildEnvironment = None, suppress_prefix: bool = False
|
||||
) -> addnodes.pending_xref:
|
||||
def type_to_xref(target: str, env: Optional[BuildEnvironment] = None,
|
||||
suppress_prefix: bool = False) -> addnodes.pending_xref:
|
||||
"""Convert a type string to a cross reference node."""
|
||||
if env:
|
||||
kwargs = {'py:module': env.ref_context.get('py:module'),
|
||||
@ -239,7 +239,9 @@ def _parse_annotation(annotation: str, env: BuildEnvironment) -> List[Node]:
|
||||
return [type_to_xref(annotation, env)]
|
||||
|
||||
|
||||
def _parse_arglist(arglist: str, env: BuildEnvironment = None) -> addnodes.desc_parameterlist:
|
||||
def _parse_arglist(
|
||||
arglist: str, env: Optional[BuildEnvironment] = None
|
||||
) -> addnodes.desc_parameterlist:
|
||||
"""Parse a list of arguments using AST parser"""
|
||||
params = addnodes.desc_parameterlist(arglist)
|
||||
sig = signature_from_str('(%s)' % arglist)
|
||||
|
@ -3,7 +3,7 @@
|
||||
import re
|
||||
import unicodedata
|
||||
from itertools import groupby
|
||||
from typing import Any, Dict, List, Pattern, Tuple, cast
|
||||
from typing import Any, Dict, List, Optional, Pattern, Tuple, cast
|
||||
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.domains.index import IndexDomain
|
||||
@ -26,7 +26,7 @@ class IndexEntries:
|
||||
new: Dict[str, List] = {}
|
||||
|
||||
def add_entry(word: str, subword: str, main: str, link: bool = True,
|
||||
dic: Dict = new, key: str = None) -> None:
|
||||
dic: Dict[str, List] = new, key: Optional[str] = None) -> None:
|
||||
# Force the word to be unicode if it's a ASCII bytestring.
|
||||
# This will solve problems with unicode normalization later.
|
||||
# For instance the RFC role will add bytestrings at the moment
|
||||
@ -118,7 +118,7 @@ class IndexEntries:
|
||||
# (in module foo)
|
||||
# (in module bar)
|
||||
oldkey = ''
|
||||
oldsubitems: Dict[str, List] = None
|
||||
oldsubitems: Optional[Dict[str, List]] = None
|
||||
i = 0
|
||||
while i < len(newlist):
|
||||
key, (targets, subitems, _key) = newlist[i]
|
||||
|
@ -164,7 +164,7 @@ class TocTree:
|
||||
ref, location=toctreenode)
|
||||
except KeyError:
|
||||
# this is raised if the included file does not exist
|
||||
if excluded(self.env.doc2path(ref, None)):
|
||||
if excluded(self.env.doc2path(ref, False)):
|
||||
message = __('toctree contains reference to excluded document %r')
|
||||
else:
|
||||
message = __('toctree contains reference to nonexisting document %r')
|
||||
|
@ -1,6 +1,6 @@
|
||||
"""Contains SphinxError and a few subclasses."""
|
||||
|
||||
from typing import Any
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
class SphinxError(Exception):
|
||||
@ -39,7 +39,9 @@ class ApplicationError(SphinxError):
|
||||
class ExtensionError(SphinxError):
|
||||
"""Extension error."""
|
||||
|
||||
def __init__(self, message: str, orig_exc: Exception = None, modname: str = None) -> None:
|
||||
def __init__(
|
||||
self, message: str, orig_exc: Optional[Exception] = None, modname: Optional[str] = None
|
||||
) -> None:
|
||||
super().__init__(message)
|
||||
self.message = message
|
||||
self.orig_exc = orig_exc
|
||||
|
@ -18,7 +18,7 @@ from copy import copy
|
||||
from fnmatch import fnmatch
|
||||
from importlib.machinery import EXTENSION_SUFFIXES
|
||||
from os import path
|
||||
from typing import Any, Generator, List, Tuple
|
||||
from typing import Any, Generator, List, Optional, Tuple
|
||||
|
||||
import sphinx.locale
|
||||
from sphinx import __display_version__, package_dir
|
||||
@ -58,7 +58,7 @@ def module_join(*modnames: str) -> str:
|
||||
return '.'.join(filter(None, modnames))
|
||||
|
||||
|
||||
def is_packagedir(dirname: str = None, files: List[str] = None) -> bool:
|
||||
def is_packagedir(dirname: Optional[str] = None, files: Optional[List[str]] = None) -> bool:
|
||||
"""Check given *files* contains __init__ file."""
|
||||
if files is None and dirname is None:
|
||||
return False
|
||||
@ -88,7 +88,7 @@ def write_file(name: str, text: str, opts: Any) -> None:
|
||||
|
||||
|
||||
def create_module_file(package: str, basename: str, opts: Any,
|
||||
user_template_dir: str = None) -> None:
|
||||
user_template_dir: Optional[str] = None) -> None:
|
||||
"""Build the text of the file and write the file."""
|
||||
options = copy(OPTIONS)
|
||||
if opts.includeprivate and 'private-members' not in options:
|
||||
@ -107,7 +107,8 @@ def create_module_file(package: str, basename: str, opts: Any,
|
||||
|
||||
def create_package_file(root: str, master_package: str, subroot: str, py_files: List[str],
|
||||
opts: Any, subs: List[str], is_namespace: bool,
|
||||
excludes: List[str] = [], user_template_dir: str = None) -> None:
|
||||
excludes: List[str] = [], user_template_dir: Optional[str] = None
|
||||
) -> None:
|
||||
"""Build the text of the file and write the file."""
|
||||
# build a list of sub packages (directories containing an __init__ file)
|
||||
subpackages = [module_join(master_package, subroot, pkgname)
|
||||
@ -145,7 +146,7 @@ def create_package_file(root: str, master_package: str, subroot: str, py_files:
|
||||
|
||||
|
||||
def create_modules_toc_file(modules: List[str], opts: Any, name: str = 'modules',
|
||||
user_template_dir: str = None) -> None:
|
||||
user_template_dir: Optional[str] = None) -> None:
|
||||
"""Create the module's index."""
|
||||
modules.sort()
|
||||
prev_module = ''
|
||||
@ -231,7 +232,7 @@ def has_child_module(rootpath: str, excludes: List[str], opts: Any) -> bool:
|
||||
|
||||
|
||||
def recurse_tree(rootpath: str, excludes: List[str], opts: Any,
|
||||
user_template_dir: str = None) -> List[str]:
|
||||
user_template_dir: Optional[str] = None) -> List[str]:
|
||||
"""
|
||||
Look for every file in the directory tree and create the corresponding
|
||||
ReST files.
|
||||
|
@ -34,7 +34,6 @@ from sphinx.util.typing import stringify as stringify_typehint
|
||||
if TYPE_CHECKING:
|
||||
from sphinx.ext.autodoc.directive import DocumenterBridge
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -623,7 +622,7 @@ class Documenter:
|
||||
return False, [] # type: ignore
|
||||
# specific members given
|
||||
selected = []
|
||||
for name in self.options.members: # type: str
|
||||
for name in self.options.members:
|
||||
if name in members:
|
||||
selected.append((name, members[name].value))
|
||||
else:
|
||||
@ -1677,7 +1676,7 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
||||
return False, [] # type: ignore
|
||||
# specific members given
|
||||
selected = []
|
||||
for name in self.options.members: # type: str
|
||||
for name in self.options.members:
|
||||
if name in members:
|
||||
selected.append(members[name])
|
||||
else:
|
||||
|
@ -74,6 +74,7 @@ from sphinx.ext.autodoc import INSTANCEATTR, Documenter
|
||||
from sphinx.ext.autodoc.directive import DocumenterBridge, Options
|
||||
from sphinx.ext.autodoc.importer import import_module
|
||||
from sphinx.ext.autodoc.mock import mock
|
||||
from sphinx.extension import Extension
|
||||
from sphinx.locale import __
|
||||
from sphinx.project import Project
|
||||
from sphinx.pycode import ModuleAnalyzer, PycodeError
|
||||
@ -145,10 +146,10 @@ deprecated_alias('sphinx.ext.autosummary',
|
||||
|
||||
|
||||
class FakeApplication:
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
self.doctreedir = None
|
||||
self.events = None
|
||||
self.extensions = {}
|
||||
self.extensions: Dict[str, Extension] = {}
|
||||
self.srcdir = None
|
||||
self.config = Config()
|
||||
self.project = Project(None, None)
|
||||
@ -243,7 +244,7 @@ class Autosummary(SphinxDirective):
|
||||
docname = posixpath.join(tree_prefix, real_name)
|
||||
docname = posixpath.normpath(posixpath.join(dirname, docname))
|
||||
if docname not in self.env.found_docs:
|
||||
if excluded(self.env.doc2path(docname, None)):
|
||||
if excluded(self.env.doc2path(docname, False)):
|
||||
msg = __('autosummary references excluded document %r. Ignored.')
|
||||
else:
|
||||
msg = __('autosummary: stub file not found %r. '
|
||||
|
@ -24,12 +24,12 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# utility
|
||||
def write_header(f: IO, text: str, char: str = '-') -> None:
|
||||
def write_header(f: IO[str], text: str, char: str = '-') -> None:
|
||||
f.write(text + '\n')
|
||||
f.write(char * len(text) + '\n')
|
||||
|
||||
|
||||
def compile_regex_list(name: str, exps: str) -> List[Pattern]:
|
||||
def compile_regex_list(name: str, exps: str) -> List[Pattern[str]]:
|
||||
lst = []
|
||||
for exp in exps:
|
||||
try:
|
||||
@ -53,14 +53,14 @@ class CoverageBuilder(Builder):
|
||||
pattern = path.join(self.srcdir, pattern)
|
||||
self.c_sourcefiles.extend(glob.glob(pattern))
|
||||
|
||||
self.c_regexes: List[Tuple[str, Pattern]] = []
|
||||
self.c_regexes: List[Tuple[str, Pattern[str]]] = []
|
||||
for (name, exp) in self.config.coverage_c_regexes.items():
|
||||
try:
|
||||
self.c_regexes.append((name, re.compile(exp)))
|
||||
except Exception:
|
||||
logger.warning(__('invalid regex %r in coverage_c_regexes'), exp)
|
||||
|
||||
self.c_ignorexps: Dict[str, List[Pattern]] = {}
|
||||
self.c_ignorexps: Dict[str, List[Pattern[str]]] = {}
|
||||
for (name, exps) in self.config.coverage_ignore_c_items.items():
|
||||
self.c_ignorexps[name] = compile_regex_list('coverage_ignore_c_items',
|
||||
exps)
|
||||
|
@ -9,8 +9,8 @@ import sys
|
||||
import time
|
||||
from io import StringIO
|
||||
from os import path
|
||||
from typing import (TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Sequence, Set, Tuple,
|
||||
Type)
|
||||
from typing import (TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Sequence,
|
||||
Set, Tuple, Type)
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Element, Node, TextElement
|
||||
@ -225,7 +225,7 @@ class TestGroup:
|
||||
|
||||
class TestCode:
|
||||
def __init__(self, code: str, type: str, filename: str,
|
||||
lineno: int, options: Dict = None) -> None:
|
||||
lineno: int, options: Optional[Dict] = None) -> None:
|
||||
self.code = code
|
||||
self.type = type
|
||||
self.filename = filename
|
||||
@ -317,7 +317,7 @@ class DocTestBuilder(Builder):
|
||||
logger.info(text, nonl=True)
|
||||
self.outfile.write(text)
|
||||
|
||||
def get_target_uri(self, docname: str, typ: str = None) -> str:
|
||||
def get_target_uri(self, docname: str, typ: Optional[str] = None) -> str:
|
||||
return ''
|
||||
|
||||
def get_outdated_docs(self) -> Set[str]:
|
||||
@ -362,11 +362,11 @@ Doctest summary
|
||||
filename = relpath(node.source, self.env.srcdir)\
|
||||
.rsplit(':docstring of ', maxsplit=1)[0]
|
||||
except Exception:
|
||||
filename = self.env.doc2path(docname, base=None)
|
||||
filename = self.env.doc2path(docname, False)
|
||||
return filename
|
||||
|
||||
@staticmethod
|
||||
def get_line_number(node: Node) -> int:
|
||||
def get_line_number(node: Node) -> Optional[int]:
|
||||
"""Get the real line number or admit we don't know."""
|
||||
# TODO: Work out how to store or calculate real (file-relative)
|
||||
# line numbers for doctest blocks in docstrings.
|
||||
|
@ -6,7 +6,7 @@ import re
|
||||
import subprocess
|
||||
from os import path
|
||||
from subprocess import PIPE, CalledProcessError
|
||||
from typing import Any, Dict, List, Tuple
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Node
|
||||
@ -210,7 +210,7 @@ class GraphvizSimple(SphinxDirective):
|
||||
|
||||
|
||||
def render_dot(self: SphinxTranslator, code: str, options: Dict, format: str,
|
||||
prefix: str = 'graphviz', filename: str = None) -> Tuple[str, str]:
|
||||
prefix: str = 'graphviz', filename: Optional[str] = None) -> Tuple[str, str]:
|
||||
"""Render graphviz code into a PNG or PDF output file."""
|
||||
graphviz_dot = options.get('graphviz_dot', self.builder.config.graphviz_dot)
|
||||
hashkey = (code + str(options) + str(graphviz_dot) +
|
||||
|
@ -7,7 +7,7 @@ import subprocess
|
||||
import tempfile
|
||||
from os import path
|
||||
from subprocess import PIPE, CalledProcessError
|
||||
from typing import Any, Dict, List, Tuple
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Element
|
||||
@ -34,7 +34,9 @@ templates_path = path.join(package_dir, 'templates', 'imgmath')
|
||||
class MathExtError(SphinxError):
|
||||
category = 'Math extension error'
|
||||
|
||||
def __init__(self, msg: str, stderr: str = None, stdout: str = None) -> None:
|
||||
def __init__(
|
||||
self, msg: str, stderr: Optional[str] = None, stdout: Optional[str] = None
|
||||
) -> None:
|
||||
if stderr:
|
||||
msg += '\n[stderr]\n' + stderr
|
||||
if stdout:
|
||||
@ -53,7 +55,7 @@ depthsvg_re = re.compile(r'.*, depth=(.*)pt')
|
||||
depthsvgcomment_re = re.compile(r'<!-- DEPTH=(-?\d+) -->')
|
||||
|
||||
|
||||
def read_svg_depth(filename: str) -> int:
|
||||
def read_svg_depth(filename: str) -> Optional[int]:
|
||||
"""Read the depth from comment at last line of SVG file
|
||||
"""
|
||||
with open(filename, encoding="utf-8") as f:
|
||||
|
@ -32,7 +32,7 @@ import builtins
|
||||
import inspect
|
||||
import re
|
||||
from importlib import import_module
|
||||
from typing import Any, Dict, Iterable, List, Tuple, cast
|
||||
from typing import Any, Dict, Iterable, List, Optional, Tuple, cast
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Node
|
||||
@ -131,8 +131,9 @@ class InheritanceGraph:
|
||||
graphviz dot graph from them.
|
||||
"""
|
||||
def __init__(self, class_names: List[str], currmodule: str, show_builtins: bool = False,
|
||||
private_bases: bool = False, parts: int = 0, aliases: Dict[str, str] = None,
|
||||
top_classes: List[Any] = []) -> None:
|
||||
private_bases: bool = False, parts: int = 0,
|
||||
aliases: Optional[Dict[str, str]] = None, top_classes: List[Any] = []
|
||||
) -> None:
|
||||
"""*class_names* is a list of child classes to show bases from.
|
||||
|
||||
If *show_builtins* is True, then Python builtins will be shown
|
||||
@ -212,7 +213,9 @@ class InheritanceGraph:
|
||||
|
||||
return list(all_classes.values())
|
||||
|
||||
def class_name(self, cls: Any, parts: int = 0, aliases: Dict[str, str] = None) -> str:
|
||||
def class_name(
|
||||
self, cls: Any, parts: int = 0, aliases: Optional[Dict[str, str]] = None
|
||||
) -> str:
|
||||
"""Given a class object, return a fully-qualified name.
|
||||
|
||||
This works for things I've tested in matplotlib so far, but may not be
|
||||
@ -256,13 +259,14 @@ class InheritanceGraph:
|
||||
'style': '"setlinewidth(0.5)"',
|
||||
}
|
||||
|
||||
def _format_node_attrs(self, attrs: Dict) -> str:
|
||||
def _format_node_attrs(self, attrs: Dict[str, Any]) -> str:
|
||||
return ','.join(['%s=%s' % x for x in sorted(attrs.items())])
|
||||
|
||||
def _format_graph_attrs(self, attrs: Dict) -> str:
|
||||
def _format_graph_attrs(self, attrs: Dict[str, Any]) -> str:
|
||||
return ''.join(['%s=%s;\n' % x for x in sorted(attrs.items())])
|
||||
|
||||
def generate_dot(self, name: str, urls: Dict = {}, env: BuildEnvironment = None,
|
||||
def generate_dot(self, name: str, urls: Dict[str, str] = {},
|
||||
env: Optional[BuildEnvironment] = None,
|
||||
graph_attrs: Dict = {}, node_attrs: Dict = {}, edge_attrs: Dict = {}
|
||||
) -> str:
|
||||
"""Generate a graphviz dot graph from the classes that were passed in
|
||||
|
@ -98,7 +98,7 @@ def _strip_basic_auth(url: str) -> str:
|
||||
return urlunsplit(frags)
|
||||
|
||||
|
||||
def _read_from_url(url: str, config: Config = None) -> IO:
|
||||
def _read_from_url(url: str, config: Optional[Config] = None) -> IO:
|
||||
"""Reads data from *url* with an HTTP *GET*.
|
||||
|
||||
This function supports fetching from resources which use basic HTTP auth as
|
||||
@ -652,7 +652,7 @@ def inspect_main(argv: List[str]) -> None:
|
||||
sys.exit(1)
|
||||
|
||||
class MockConfig:
|
||||
intersphinx_timeout: int = None
|
||||
intersphinx_timeout: Optional[int] = None
|
||||
tls_verify = False
|
||||
user_agent = None
|
||||
|
||||
|
@ -65,7 +65,7 @@ def html_visit_displaymath(self: HTMLTranslator, node: nodes.math_block) -> None
|
||||
raise nodes.SkipNode
|
||||
|
||||
|
||||
def install_mathjax(app: Sphinx, pagename: str, templatename: str, context: Dict,
|
||||
def install_mathjax(app: Sphinx, pagename: str, templatename: str, context: Dict[str, Any],
|
||||
event_arg: Any) -> None:
|
||||
if app.builder.format != 'html' or app.builder.math_renderer_name != 'mathjax': # type: ignore # NOQA
|
||||
return
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from functools import partial
|
||||
from importlib import import_module
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from packaging import version
|
||||
from pygments import __version__ as pygmentsversion
|
||||
@ -99,7 +99,7 @@ class PygmentsBridge:
|
||||
latex_formatter = LatexFormatter
|
||||
|
||||
def __init__(self, dest: str = 'html', stylename: str = 'sphinx',
|
||||
latex_engine: str = None) -> None:
|
||||
latex_engine: Optional[str] = None) -> None:
|
||||
self.dest = dest
|
||||
self.latex_engine = latex_engine
|
||||
|
||||
@ -126,7 +126,7 @@ class PygmentsBridge:
|
||||
kwargs.update(self.formatter_args)
|
||||
return self.formatter(**kwargs)
|
||||
|
||||
def get_lexer(self, source: str, lang: str, opts: Dict = None,
|
||||
def get_lexer(self, source: str, lang: str, opts: Optional[Dict] = None,
|
||||
force: bool = False, location: Any = None) -> Lexer:
|
||||
if not opts:
|
||||
opts = {}
|
||||
@ -165,7 +165,7 @@ class PygmentsBridge:
|
||||
|
||||
return lexer
|
||||
|
||||
def highlight_block(self, source: str, lang: str, opts: Dict = None,
|
||||
def highlight_block(self, source: str, lang: str, opts: Optional[Dict] = None,
|
||||
force: bool = False, location: Any = None, **kwargs: Any) -> str:
|
||||
if not isinstance(source, str):
|
||||
source = source.decode()
|
||||
|
@ -167,7 +167,7 @@ def init_console(locale_dir: str, catalog: str) -> Tuple[NullTranslations, bool]
|
||||
"""
|
||||
try:
|
||||
# encoding is ignored
|
||||
language, _ = locale.getlocale(locale.LC_MESSAGES) # type: Tuple[Optional[str], Any]
|
||||
language, _ = locale.getlocale(locale.LC_MESSAGES)
|
||||
except AttributeError:
|
||||
# LC_MESSAGES is not always defined. Fallback to the default language
|
||||
# in case it is not.
|
||||
@ -191,7 +191,7 @@ def _lazy_translate(catalog: str, namespace: str, message: str) -> str:
|
||||
return translator.gettext(message)
|
||||
|
||||
|
||||
def get_translation(catalog: str, namespace: str = 'general') -> Callable:
|
||||
def get_translation(catalog: str, namespace: str = 'general') -> Callable[[str], str]:
|
||||
"""Get a translation function based on the *catalog* and *namespace*.
|
||||
|
||||
The extension can use this API to translate the messages on the
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Handlers for additional ReST roles."""
|
||||
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Tuple, Type
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type
|
||||
|
||||
import docutils.parsers.rst.directives
|
||||
import docutils.parsers.rst.roles
|
||||
@ -64,7 +64,8 @@ class XRefRole(ReferenceRole):
|
||||
innernodeclass: Type[TextElement] = nodes.literal
|
||||
|
||||
def __init__(self, fix_parens: bool = False, lowercase: bool = False,
|
||||
nodeclass: Type[Element] = None, innernodeclass: Type[TextElement] = None,
|
||||
nodeclass: Optional[Type[Element]] = None,
|
||||
innernodeclass: Optional[Type[TextElement]] = None,
|
||||
warn_dangling: bool = False) -> None:
|
||||
self.fix_parens = fix_parens
|
||||
self.lowercase = lowercase
|
||||
|
@ -7,7 +7,7 @@ import os
|
||||
import sys
|
||||
import warnings
|
||||
from io import StringIO
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from sphinx.application import Sphinx
|
||||
from sphinx.cmd.build import handle_exception
|
||||
@ -90,15 +90,15 @@ class BuildDoc(Command):
|
||||
def initialize_options(self) -> None:
|
||||
self.fresh_env = self.all_files = False
|
||||
self.pdb = False
|
||||
self.source_dir: str = None
|
||||
self.build_dir: str = None
|
||||
self.source_dir: Optional[str] = None
|
||||
self.build_dir: Optional[str] = None
|
||||
self.builder = 'html'
|
||||
self.warning_is_error = False
|
||||
self.project = ''
|
||||
self.version = ''
|
||||
self.release = ''
|
||||
self.today = ''
|
||||
self.config_dir: str = None
|
||||
self.config_dir: Optional[str] = None
|
||||
self.link_index = False
|
||||
self.copyright = ''
|
||||
# Link verbosity to distutils' (which uses 1 by default).
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""Utility functions for math."""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx.builders.html import HTMLTranslator
|
||||
@ -20,7 +22,7 @@ def get_node_equation_number(writer: HTMLTranslator, node: nodes.math_block) ->
|
||||
return node['number']
|
||||
|
||||
|
||||
def wrap_displaymath(text: str, label: str, numbering: bool) -> str:
|
||||
def wrap_displaymath(text: str, label: Optional[str], numbering: bool) -> str:
|
||||
def is_equation(part: str) -> str:
|
||||
return part.strip()
|
||||
|
||||
|
@ -513,7 +513,7 @@ _non_id_translate_digraphs = {
|
||||
|
||||
|
||||
def make_id(env: "BuildEnvironment", document: nodes.document,
|
||||
prefix: str = '', term: str = None) -> str:
|
||||
prefix: str = '', term: Optional[str] = None) -> str:
|
||||
"""Generate an appropriate node_id for given *prefix* and *term*."""
|
||||
node_id = None
|
||||
if prefix:
|
||||
@ -550,7 +550,8 @@ def find_pending_xref_condition(node: addnodes.pending_xref, condition: str
|
||||
|
||||
|
||||
def make_refnode(builder: "Builder", fromdocname: str, todocname: str, targetid: str,
|
||||
child: Union[Node, List[Node]], title: str = None) -> nodes.reference:
|
||||
child: Union[Node, List[Node]], title: Optional[str] = None
|
||||
) -> nodes.reference:
|
||||
"""Shortcut to create a reference node."""
|
||||
node = nodes.reference('', '', internal=True)
|
||||
if fromdocname == todocname and targetid:
|
||||
|
@ -112,7 +112,7 @@ def make_filename_from_project(project: str) -> str:
|
||||
return make_filename(project_suffix_re.sub('', project)).lower()
|
||||
|
||||
|
||||
def relpath(path: str, start: str = os.curdir) -> str:
|
||||
def relpath(path: str, start: Optional[str] = os.curdir) -> str:
|
||||
"""Return a relative filepath to *path* either from the current directory or
|
||||
from an optional *start* directory.
|
||||
|
||||
|
@ -81,7 +81,9 @@ class ParallelTasks:
|
||||
logging.convert_serializable(collector.logs)
|
||||
pipe.send((failed, collector.logs, ret))
|
||||
|
||||
def add_task(self, task_func: Callable, arg: Any = None, result_func: Callable = None) -> None: # NOQA
|
||||
def add_task(
|
||||
self, task_func: Callable, arg: Any = None, result_func: Optional[Callable] = None
|
||||
) -> None:
|
||||
tid = self._taskid
|
||||
self._taskid += 1
|
||||
self._result_funcs[tid] = result_func or (lambda arg, result: None)
|
||||
|
@ -8,7 +8,7 @@ from sphinx.deprecation import RemovedInSphinx70Warning
|
||||
|
||||
|
||||
class PorterStemmer:
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
warnings.warn(f"{self.__class__.__name__} is deprecated, use "
|
||||
"snowballstemmer.stemmer('porter') instead.",
|
||||
RemovedInSphinx70Warning, stacklevel=2)
|
||||
@ -22,7 +22,7 @@ class PorterStemmer:
|
||||
|
||||
|
||||
class BaseStemmer:
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
warnings.warn(f"{self.__class__.__name__} is deprecated, use "
|
||||
"snowballstemmer.stemmer('porter') instead.",
|
||||
RemovedInSphinx70Warning, stacklevel=3)
|
||||
@ -32,7 +32,7 @@ class BaseStemmer:
|
||||
|
||||
|
||||
class PyStemmer(BaseStemmer):
|
||||
def __init__(self): # NoQA
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.stemmer = snowballstemmer.stemmer('porter')
|
||||
|
||||
@ -44,7 +44,7 @@ class PyStemmer(BaseStemmer):
|
||||
|
||||
|
||||
class StandardStemmer(BaseStemmer):
|
||||
def __init__(self): # NoQA
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.stemmer = snowballstemmer.stemmer('porter')
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
from typing import Iterator, List
|
||||
from typing import Iterator, List, Optional
|
||||
|
||||
from jinja2 import nodes
|
||||
from jinja2.environment import Environment
|
||||
@ -35,7 +35,7 @@ class BooleanParser(Parser):
|
||||
|
||||
|
||||
class Tags:
|
||||
def __init__(self, tags: List[str] = None) -> None:
|
||||
def __init__(self, tags: Optional[List[str]] = None) -> None:
|
||||
self.tags = dict.fromkeys(tags or [], True)
|
||||
|
||||
def has(self, tag: str) -> bool:
|
||||
|
@ -3,7 +3,7 @@
|
||||
import os
|
||||
from functools import partial
|
||||
from os import path
|
||||
from typing import Callable, Dict, List, Tuple, Union
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
||||
|
||||
from jinja2 import TemplateNotFound
|
||||
from jinja2.environment import Environment
|
||||
@ -17,15 +17,15 @@ from sphinx.util import rst, texescape
|
||||
|
||||
|
||||
class BaseRenderer:
|
||||
def __init__(self, loader: BaseLoader = None) -> None:
|
||||
def __init__(self, loader: Optional[BaseLoader] = None) -> None:
|
||||
self.env = SandboxedEnvironment(loader=loader, extensions=['jinja2.ext.i18n'])
|
||||
self.env.filters['repr'] = repr
|
||||
self.env.install_gettext_translations(get_translator())
|
||||
|
||||
def render(self, template_name: str, context: Dict) -> str:
|
||||
def render(self, template_name: str, context: Dict[str, Any]) -> str:
|
||||
return self.env.get_template(template_name).render(context)
|
||||
|
||||
def render_string(self, source: str, context: Dict) -> str:
|
||||
def render_string(self, source: str, context: Dict[str, Any]) -> str:
|
||||
return self.env.from_string(source).render(context)
|
||||
|
||||
|
||||
@ -41,25 +41,27 @@ class FileRenderer(BaseRenderer):
|
||||
super().__init__(loader)
|
||||
|
||||
@classmethod
|
||||
def render_from_file(cls, filename: str, context: Dict) -> str:
|
||||
def render_from_file(cls, filename: str, context: Dict[str, Any]) -> str:
|
||||
dirname = os.path.dirname(filename)
|
||||
basename = os.path.basename(filename)
|
||||
return cls(dirname).render(basename, context)
|
||||
|
||||
|
||||
class SphinxRenderer(FileRenderer):
|
||||
def __init__(self, template_path: Union[str, List[str]] = None) -> None:
|
||||
def __init__(self, template_path: Union[None, str, List[str]] = None) -> None:
|
||||
if template_path is None:
|
||||
template_path = os.path.join(package_dir, 'templates')
|
||||
super().__init__(template_path)
|
||||
|
||||
@classmethod
|
||||
def render_from_file(cls, filename: str, context: Dict) -> str:
|
||||
def render_from_file(cls, filename: str, context: Dict[str, Any]) -> str:
|
||||
return FileRenderer.render_from_file(filename, context)
|
||||
|
||||
|
||||
class LaTeXRenderer(SphinxRenderer):
|
||||
def __init__(self, template_path: str = None, latex_engine: str = None) -> None:
|
||||
def __init__(
|
||||
self, template_path: Optional[str] = None, latex_engine: Optional[str] = None
|
||||
) -> None:
|
||||
if template_path is None:
|
||||
template_path = os.path.join(package_dir, 'templates', 'latex')
|
||||
super().__init__(template_path)
|
||||
@ -81,7 +83,9 @@ class LaTeXRenderer(SphinxRenderer):
|
||||
|
||||
|
||||
class ReSTRenderer(SphinxRenderer):
|
||||
def __init__(self, template_path: Union[str, List[str]] = None, language: str = None) -> None: # NOQA
|
||||
def __init__(
|
||||
self, template_path: Union[None, str, List[str]] = None, language: Optional[str] = None
|
||||
) -> None:
|
||||
super().__init__(template_path)
|
||||
|
||||
# add language to environment
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""TeX escaping helper."""
|
||||
|
||||
import re
|
||||
from typing import Dict
|
||||
from typing import Dict, Optional
|
||||
|
||||
tex_replacements = [
|
||||
# map TeX special chars
|
||||
@ -100,7 +100,7 @@ _tex_hlescape_map: Dict[int, str] = {}
|
||||
_tex_hlescape_map_without_unicode: Dict[int, str] = {}
|
||||
|
||||
|
||||
def escape(s: str, latex_engine: str = None) -> str:
|
||||
def escape(s: str, latex_engine: Optional[str] = None) -> str:
|
||||
"""Escape text for LaTeX output."""
|
||||
if latex_engine in ('lualatex', 'xelatex'):
|
||||
# unicode based LaTeX engine
|
||||
@ -109,7 +109,7 @@ def escape(s: str, latex_engine: str = None) -> str:
|
||||
return s.translate(_tex_escape_map)
|
||||
|
||||
|
||||
def hlescape(s: str, latex_engine: str = None) -> str:
|
||||
def hlescape(s: str, latex_engine: Optional[str] = None) -> str:
|
||||
"""Escape text for LaTeX highlighter."""
|
||||
if latex_engine in ('lualatex', 'xelatex'):
|
||||
# unicode based LaTeX engine
|
||||
|
@ -69,7 +69,9 @@ InventoryItem = Tuple[str, str, str, str]
|
||||
Inventory = Dict[str, Dict[str, InventoryItem]]
|
||||
|
||||
|
||||
def get_type_hints(obj: Any, globalns: Dict = None, localns: Dict = None) -> Dict[str, Any]:
|
||||
def get_type_hints(
|
||||
obj: Any, globalns: Optional[Dict[str, Any]] = None, localns: Optional[Dict] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Return a dictionary containing type hints for a function, method, module or class object.
|
||||
|
||||
This is a simple wrapper of `typing.get_type_hints()` that does not raise an error on
|
||||
|
@ -837,13 +837,13 @@ class HTML5Translator(SphinxTranslator, BaseTranslator):
|
||||
node['ids'].remove(id)
|
||||
|
||||
@property
|
||||
def _fieldlist_row_index(self):
|
||||
def _fieldlist_row_index(self) -> int:
|
||||
warnings.warn('_fieldlist_row_index is deprecated',
|
||||
RemovedInSphinx60Warning, stacklevel=2)
|
||||
return self._fieldlist_row_indices[-1]
|
||||
|
||||
@property
|
||||
def _table_row_index(self):
|
||||
def _table_row_index(self) -> int:
|
||||
warnings.warn('_table_row_index is deprecated',
|
||||
RemovedInSphinx60Warning, stacklevel=2)
|
||||
return self._table_row_indices[-1]
|
||||
|
@ -8,7 +8,7 @@ import re
|
||||
import warnings
|
||||
from collections import defaultdict
|
||||
from os import path
|
||||
from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Set, Tuple, cast
|
||||
from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Set, Tuple, cast
|
||||
|
||||
from docutils import nodes, writers
|
||||
from docutils.nodes import Element, Node, Text
|
||||
@ -172,7 +172,9 @@ class Table:
|
||||
assert self.cells[(self.row + row, self.col + col)] == 0
|
||||
self.cells[(self.row + row, self.col + col)] = self.cell_id
|
||||
|
||||
def cell(self, row: int = None, col: int = None) -> "TableCell":
|
||||
def cell(
|
||||
self, row: Optional[int] = None, col: Optional[int] = None
|
||||
) -> Optional["TableCell"]:
|
||||
"""Returns a cell object (i.e. rectangular area) containing given position.
|
||||
|
||||
If no option arguments: ``row`` or ``col`` are given, the current position;
|
||||
@ -496,7 +498,7 @@ class LaTeXTranslator(SphinxTranslator):
|
||||
return renderer.render(template_name, variables)
|
||||
|
||||
@property
|
||||
def table(self) -> Table:
|
||||
def table(self) -> Optional[Table]:
|
||||
"""Get current table."""
|
||||
if self.tables:
|
||||
return self.tables[-1]
|
||||
|
@ -3,8 +3,8 @@
|
||||
import re
|
||||
import textwrap
|
||||
from os import path
|
||||
from typing import (TYPE_CHECKING, Any, Dict, Iterable, Iterator, List, Pattern, Set, Tuple,
|
||||
Union, cast)
|
||||
from typing import (TYPE_CHECKING, Any, Dict, Iterable, Iterator, List, Optional, Pattern, Set,
|
||||
Tuple, Union, cast)
|
||||
|
||||
from docutils import nodes, writers
|
||||
from docutils.nodes import Element, Node, Text
|
||||
@ -91,7 +91,7 @@ def find_subsections(section: Element) -> List[nodes.section]:
|
||||
return result
|
||||
|
||||
|
||||
def smart_capwords(s: str, sep: str = None) -> str:
|
||||
def smart_capwords(s: str, sep: Optional[str] = None) -> str:
|
||||
"""Like string.capwords() but does not capitalize words that already
|
||||
contain a capital letter."""
|
||||
words = s.split(sep)
|
||||
@ -173,7 +173,7 @@ class TexinfoTranslator(SphinxTranslator):
|
||||
self.body: List[str] = []
|
||||
self.context: List[str] = []
|
||||
self.descs: List[addnodes.desc] = []
|
||||
self.previous_section: nodes.section = None
|
||||
self.previous_section: Optional[nodes.section] = None
|
||||
self.section_level = 0
|
||||
self.seen_title = False
|
||||
self.next_section_ids: Set[str] = set()
|
||||
@ -184,7 +184,7 @@ class TexinfoTranslator(SphinxTranslator):
|
||||
self.in_footnote = 0
|
||||
self.in_samp = 0
|
||||
self.handled_abbrs: Set[str] = set()
|
||||
self.colwidths: List[int] = None
|
||||
self.colwidths: Optional[List[int]] = None
|
||||
|
||||
def finish(self) -> None:
|
||||
if self.previous_section is None:
|
||||
|
Loading…
Reference in New Issue
Block a user