2008-08-04 12:31:25 -05:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
"""
|
|
|
|
sphinx.ext.intersphinx
|
|
|
|
~~~~~~~~~~~~~~~~~~~~~~
|
|
|
|
|
2009-09-07 15:52:26 -05:00
|
|
|
Insert links to objects documented in remote Sphinx documentation.
|
2008-08-04 12:31:25 -05:00
|
|
|
|
|
|
|
This works as follows:
|
|
|
|
|
2009-09-07 15:52:26 -05:00
|
|
|
* Each Sphinx HTML build creates a file named "objects.inv" that contains a
|
|
|
|
mapping from object names to URIs relative to the HTML set's root.
|
2008-08-04 12:31:25 -05:00
|
|
|
|
|
|
|
* Projects using the Intersphinx extension can specify links to such mapping
|
|
|
|
files in the `intersphinx_mapping` config value. The mapping will then be
|
2009-09-07 15:52:26 -05:00
|
|
|
used to resolve otherwise missing references to objects into links to the
|
|
|
|
other documentation.
|
2008-08-04 12:31:25 -05:00
|
|
|
|
|
|
|
* By default, the mapping file is assumed to be at the same location as the
|
|
|
|
rest of the documentation; however, the location of the mapping file can
|
|
|
|
also be specified individually, e.g. if the docs should be buildable
|
|
|
|
without Internet access.
|
|
|
|
|
2017-12-31 10:06:58 -06:00
|
|
|
:copyright: Copyright 2007-2018 by the Sphinx team, see AUTHORS.
|
2008-12-27 05:19:17 -06:00
|
|
|
:license: BSD, see LICENSE for details.
|
2008-08-04 12:31:25 -05:00
|
|
|
"""
|
|
|
|
|
2015-07-25 06:03:33 -05:00
|
|
|
from __future__ import print_function
|
|
|
|
|
2017-01-15 09:51:40 -06:00
|
|
|
import functools
|
2008-08-04 12:31:25 -05:00
|
|
|
import posixpath
|
2018-01-27 10:52:16 -06:00
|
|
|
import sys
|
|
|
|
import time
|
2008-08-04 12:31:25 -05:00
|
|
|
from os import path
|
2018-11-11 10:02:14 -06:00
|
|
|
from urllib.parse import urlsplit, urlunsplit
|
2008-08-04 12:31:25 -05:00
|
|
|
|
|
|
|
from docutils import nodes
|
2013-09-16 03:34:29 -05:00
|
|
|
from docutils.utils import relative_path
|
2018-11-20 21:50:08 -06:00
|
|
|
from six import text_type
|
2008-08-04 12:31:25 -05:00
|
|
|
|
2014-09-03 09:39:30 -05:00
|
|
|
import sphinx
|
2008-12-05 05:27:08 -06:00
|
|
|
from sphinx.builders.html import INVENTORY_FILENAME
|
2018-02-25 07:16:09 -06:00
|
|
|
from sphinx.locale import _, __
|
2016-12-21 22:41:56 -06:00
|
|
|
from sphinx.util import requests, logging
|
2017-03-02 07:51:42 -06:00
|
|
|
from sphinx.util.inventory import InventoryFile
|
2008-08-04 12:31:25 -05:00
|
|
|
|
2018-03-13 09:01:11 -05:00
|
|
|
if False:
|
|
|
|
# For type annotation
|
2017-03-03 09:22:17 -06:00
|
|
|
from typing import Any, Dict, IO, List, Tuple, Union # NOQA
|
2016-11-11 04:37:14 -06:00
|
|
|
from sphinx.application import Sphinx # NOQA
|
2016-11-22 22:31:02 -06:00
|
|
|
from sphinx.config import Config # NOQA
|
2016-11-11 04:37:14 -06:00
|
|
|
from sphinx.environment import BuildEnvironment # NOQA
|
2018-11-24 05:14:43 -06:00
|
|
|
from sphinx.util.typing import unicode # NOQA
|
2016-11-11 04:37:14 -06:00
|
|
|
|
2018-09-07 20:52:24 -05:00
|
|
|
Inventory = Dict[text_type, Dict[text_type, Tuple[text_type, text_type, text_type, text_type]]] # NOQA
|
2016-11-11 04:37:14 -06:00
|
|
|
|
2016-12-21 22:41:56 -06:00
|
|
|
logger = logging.getLogger(__name__)
|
2010-06-06 18:34:01 -05:00
|
|
|
|
2009-03-19 09:49:03 -05:00
|
|
|
|
2018-09-11 08:48:35 -05:00
|
|
|
class InventoryAdapter:
|
2017-01-29 10:55:39 -06:00
|
|
|
"""Inventory adapter for environment"""
|
|
|
|
|
|
|
|
def __init__(self, env):
|
2018-01-22 07:05:38 -06:00
|
|
|
# type: (BuildEnvironment) -> None
|
2017-01-29 10:55:39 -06:00
|
|
|
self.env = env
|
|
|
|
|
|
|
|
if not hasattr(env, 'intersphinx_cache'):
|
2018-01-22 07:05:38 -06:00
|
|
|
self.env.intersphinx_cache = {} # type: ignore
|
|
|
|
self.env.intersphinx_inventory = {} # type: ignore
|
|
|
|
self.env.intersphinx_named_inventory = {} # type: ignore
|
2017-01-29 10:55:39 -06:00
|
|
|
|
|
|
|
@property
|
|
|
|
def cache(self):
|
|
|
|
# type: () -> Dict[unicode, Tuple[unicode, int, Inventory]]
|
2018-01-22 07:05:38 -06:00
|
|
|
return self.env.intersphinx_cache # type: ignore
|
2017-01-29 10:55:39 -06:00
|
|
|
|
|
|
|
@property
|
|
|
|
def main_inventory(self):
|
|
|
|
# type: () -> Inventory
|
2018-01-22 07:05:38 -06:00
|
|
|
return self.env.intersphinx_inventory # type: ignore
|
2017-01-29 10:55:39 -06:00
|
|
|
|
|
|
|
@property
|
|
|
|
def named_inventory(self):
|
|
|
|
# type: () -> Dict[unicode, Inventory]
|
2018-01-22 07:05:38 -06:00
|
|
|
return self.env.intersphinx_named_inventory # type: ignore
|
2017-01-29 10:55:39 -06:00
|
|
|
|
|
|
|
def clear(self):
|
2018-01-22 07:05:38 -06:00
|
|
|
# type: () -> None
|
|
|
|
self.env.intersphinx_inventory.clear() # type: ignore
|
|
|
|
self.env.intersphinx_named_inventory.clear() # type: ignore
|
2017-01-29 10:55:39 -06:00
|
|
|
|
|
|
|
|
2015-10-15 16:37:55 -05:00
|
|
|
def _strip_basic_auth(url):
|
2016-11-11 04:37:14 -06:00
|
|
|
# type: (unicode) -> unicode
|
2015-10-15 16:37:55 -05:00
|
|
|
"""Returns *url* with basic auth credentials removed. Also returns the
|
|
|
|
basic auth username and password if they're present in *url*.
|
|
|
|
|
|
|
|
E.g.: https://user:pass@example.com => https://example.com
|
|
|
|
|
|
|
|
*url* need not include basic auth credentials.
|
|
|
|
|
|
|
|
:param url: url which may or may not contain basic auth credentials
|
|
|
|
:type url: ``str``
|
|
|
|
|
2016-08-17 22:41:38 -05:00
|
|
|
:return: *url* with any basic auth creds removed
|
|
|
|
:rtype: ``str``
|
2015-10-15 16:37:55 -05:00
|
|
|
"""
|
2016-08-17 22:41:38 -05:00
|
|
|
frags = list(urlsplit(url))
|
2015-10-15 16:37:55 -05:00
|
|
|
# swap out "user[:pass]@hostname" for "hostname"
|
2016-08-17 22:41:38 -05:00
|
|
|
if '@' in frags[1]:
|
|
|
|
frags[1] = frags[1].split('@')[1]
|
|
|
|
return urlunsplit(frags)
|
2015-10-15 16:37:55 -05:00
|
|
|
|
|
|
|
|
2016-11-16 08:23:18 -06:00
|
|
|
def _read_from_url(url, config=None):
|
2016-11-22 22:31:02 -06:00
|
|
|
# type: (unicode, Config) -> IO
|
2015-10-15 16:37:55 -05:00
|
|
|
"""Reads data from *url* with an HTTP *GET*.
|
|
|
|
|
|
|
|
This function supports fetching from resources which use basic HTTP auth as
|
|
|
|
laid out by RFC1738 § 3.1. See § 5 for grammar definitions for URLs.
|
|
|
|
|
|
|
|
.. seealso:
|
|
|
|
|
|
|
|
https://www.ietf.org/rfc/rfc1738.txt
|
|
|
|
|
|
|
|
:param url: URL of an HTTP resource
|
|
|
|
:type url: ``str``
|
|
|
|
|
|
|
|
:return: data read from resource described by *url*
|
|
|
|
:rtype: ``file``-like object
|
|
|
|
"""
|
2016-11-16 08:23:18 -06:00
|
|
|
r = requests.get(url, stream=True, config=config, timeout=config.intersphinx_timeout)
|
2016-08-17 22:33:28 -05:00
|
|
|
r.raise_for_status()
|
|
|
|
r.raw.url = r.url
|
2017-01-15 09:51:40 -06:00
|
|
|
# decode content-body based on the header.
|
|
|
|
# ref: https://github.com/kennethreitz/requests/issues/2155
|
|
|
|
r.raw.read = functools.partial(r.raw.read, decode_content=True)
|
2016-08-17 22:33:28 -05:00
|
|
|
return r.raw
|
2015-10-15 16:37:55 -05:00
|
|
|
|
|
|
|
|
|
|
|
def _get_safe_url(url):
|
2016-11-11 04:37:14 -06:00
|
|
|
# type: (unicode) -> unicode
|
2015-10-15 16:37:55 -05:00
|
|
|
"""Gets version of *url* with basic auth passwords obscured. This function
|
|
|
|
returns results suitable for printing and logging.
|
|
|
|
|
2016-08-17 10:58:17 -05:00
|
|
|
E.g.: https://user:12345@example.com => https://user@example.com
|
2015-10-15 16:37:55 -05:00
|
|
|
|
|
|
|
:param url: a url
|
|
|
|
:type url: ``str``
|
|
|
|
|
2016-08-17 10:58:17 -05:00
|
|
|
:return: *url* with password removed
|
2015-10-15 16:37:55 -05:00
|
|
|
:rtype: ``str``
|
|
|
|
"""
|
2016-08-17 11:25:29 -05:00
|
|
|
parts = urlsplit(url)
|
|
|
|
if parts.username is None:
|
|
|
|
return url
|
|
|
|
else:
|
|
|
|
frags = list(parts)
|
|
|
|
if parts.port:
|
|
|
|
frags[1] = '{0}@{1}:{2}'.format(parts.username, parts.hostname, parts.port)
|
|
|
|
else:
|
|
|
|
frags[1] = '{0}@{1}'.format(parts.username, parts.hostname)
|
|
|
|
|
|
|
|
return urlunsplit(frags)
|
2015-10-15 16:37:55 -05:00
|
|
|
|
|
|
|
|
2008-08-04 12:31:25 -05:00
|
|
|
def fetch_inventory(app, uri, inv):
|
2016-11-11 04:37:14 -06:00
|
|
|
# type: (Sphinx, unicode, Any) -> Any
|
2008-08-04 12:31:25 -05:00
|
|
|
"""Fetch, parse and return an intersphinx inventory file."""
|
|
|
|
# both *uri* (base URI of the links to generate) and *inv* (actual
|
|
|
|
# location of the inventory file) can be local or remote URIs
|
2015-10-22 01:09:16 -05:00
|
|
|
localuri = '://' not in uri
|
|
|
|
if not localuri:
|
2015-10-15 16:37:55 -05:00
|
|
|
# case: inv URI points to remote resource; strip any existing auth
|
2016-08-17 22:41:38 -05:00
|
|
|
uri = _strip_basic_auth(uri)
|
2008-08-04 12:31:25 -05:00
|
|
|
try:
|
2015-10-22 01:09:16 -05:00
|
|
|
if '://' in inv:
|
2016-11-16 08:23:18 -06:00
|
|
|
f = _read_from_url(inv, config=app.config)
|
2008-08-04 12:31:25 -05:00
|
|
|
else:
|
2010-06-06 18:34:01 -05:00
|
|
|
f = open(path.join(app.srcdir, inv), 'rb')
|
2014-01-19 04:17:10 -06:00
|
|
|
except Exception as err:
|
2017-07-21 21:15:52 -05:00
|
|
|
err.args = ('intersphinx inventory %r not fetchable due to %s: %s',
|
|
|
|
inv, err.__class__, err)
|
|
|
|
raise
|
2008-08-04 12:31:25 -05:00
|
|
|
try:
|
2016-08-17 22:33:28 -05:00
|
|
|
if hasattr(f, 'url'):
|
2016-11-11 04:37:14 -06:00
|
|
|
newinv = f.url # type: ignore
|
2016-08-10 21:19:42 -05:00
|
|
|
if inv != newinv:
|
2016-12-24 10:03:06 -06:00
|
|
|
logger.info('intersphinx inventory has moved: %s -> %s', inv, newinv)
|
2016-08-10 21:19:42 -05:00
|
|
|
|
|
|
|
if uri in (inv, path.dirname(inv), path.dirname(inv) + '/'):
|
|
|
|
uri = path.dirname(newinv)
|
2016-08-17 10:56:40 -05:00
|
|
|
with f:
|
|
|
|
try:
|
|
|
|
join = localuri and path.join or posixpath.join
|
2017-03-02 02:26:07 -06:00
|
|
|
invdata = InventoryFile.load(f, uri, join)
|
2017-01-07 10:41:12 -06:00
|
|
|
except ValueError as exc:
|
|
|
|
raise ValueError('unknown or unsupported inventory version: %r' % exc)
|
2014-01-19 04:17:10 -06:00
|
|
|
except Exception as err:
|
2017-07-21 21:15:52 -05:00
|
|
|
err.args = ('intersphinx inventory %r not readable due to %s: %s',
|
|
|
|
inv, err.__class__.__name__, err)
|
|
|
|
raise
|
2008-08-04 12:31:25 -05:00
|
|
|
else:
|
|
|
|
return invdata
|
|
|
|
|
|
|
|
|
|
|
|
def load_mappings(app):
|
2016-11-11 04:37:14 -06:00
|
|
|
# type: (Sphinx) -> None
|
2008-08-04 12:31:25 -05:00
|
|
|
"""Load all intersphinx mappings into the environment."""
|
|
|
|
now = int(time.time())
|
|
|
|
cache_time = now - app.config.intersphinx_cache_limit * 86400
|
2017-01-29 10:55:39 -06:00
|
|
|
inventories = InventoryAdapter(app.builder.env)
|
2008-08-04 12:31:25 -05:00
|
|
|
update = False
|
2018-09-11 07:50:55 -05:00
|
|
|
for key, value in app.config.intersphinx_mapping.items():
|
2016-11-11 04:37:14 -06:00
|
|
|
name = None # type: unicode
|
|
|
|
uri = None # type: unicode
|
|
|
|
inv = None # type: Union[unicode, Tuple[unicode, ...]]
|
|
|
|
|
2017-01-07 23:35:44 -06:00
|
|
|
if isinstance(value, (list, tuple)):
|
2010-05-23 06:06:01 -05:00
|
|
|
# new format
|
2017-10-20 23:44:36 -05:00
|
|
|
name, (uri, inv) = key, value
|
2018-11-20 21:50:08 -06:00
|
|
|
if not isinstance(name, str):
|
2018-02-25 07:16:09 -06:00
|
|
|
logger.warning(__('intersphinx identifier %r is not string. Ignored'), name)
|
2016-05-22 22:11:10 -05:00
|
|
|
continue
|
2010-05-23 06:06:01 -05:00
|
|
|
else:
|
|
|
|
# old format, no name
|
|
|
|
name, uri, inv = None, key, value
|
2008-08-04 12:31:25 -05:00
|
|
|
# we can safely assume that the uri<->inv mapping is not changed
|
|
|
|
# during partial rebuilds since a changed intersphinx_mapping
|
|
|
|
# setting will cause a full environment reread
|
2014-08-19 12:56:47 -05:00
|
|
|
if not isinstance(inv, tuple):
|
|
|
|
invs = (inv, )
|
|
|
|
else:
|
2016-11-11 04:37:14 -06:00
|
|
|
invs = inv # type: ignore
|
2014-08-19 12:56:47 -05:00
|
|
|
|
2017-07-21 21:15:52 -05:00
|
|
|
failures = []
|
2014-08-19 12:56:47 -05:00
|
|
|
for inv in invs:
|
|
|
|
if not inv:
|
|
|
|
inv = posixpath.join(uri, INVENTORY_FILENAME)
|
|
|
|
# decide whether the inventory must be read: always read local
|
|
|
|
# files; remote ones only if the cache time is expired
|
2017-01-29 10:55:39 -06:00
|
|
|
if '://' not in inv or uri not in inventories.cache \
|
|
|
|
or inventories.cache[uri][1] < cache_time:
|
2016-11-11 04:37:14 -06:00
|
|
|
safe_inv_url = _get_safe_url(inv) # type: ignore
|
2016-12-24 10:03:06 -06:00
|
|
|
logger.info('loading intersphinx inventory from %s...', safe_inv_url)
|
2017-07-21 21:15:52 -05:00
|
|
|
try:
|
|
|
|
invdata = fetch_inventory(app, uri, inv)
|
|
|
|
except Exception as err:
|
|
|
|
failures.append(err.args)
|
|
|
|
continue
|
|
|
|
|
2014-08-19 12:56:47 -05:00
|
|
|
if invdata:
|
2017-01-29 10:55:39 -06:00
|
|
|
inventories.cache[uri] = (name, now, invdata)
|
2014-08-19 14:12:45 -05:00
|
|
|
update = True
|
2014-08-21 06:47:32 -05:00
|
|
|
break
|
2014-08-19 12:56:47 -05:00
|
|
|
|
2018-01-30 10:21:46 -06:00
|
|
|
if failures == []:
|
|
|
|
pass
|
|
|
|
elif len(failures) < len(invs):
|
2017-07-21 21:15:52 -05:00
|
|
|
logger.info("encountered some issues with some of the inventories,"
|
|
|
|
" but they had working alternatives:")
|
|
|
|
for fail in failures:
|
|
|
|
logger.info(*fail)
|
|
|
|
else:
|
2018-02-25 07:16:09 -06:00
|
|
|
logger.warning(__("failed to reach any of the inventories "
|
|
|
|
"with the following issues:"))
|
2017-07-21 21:15:52 -05:00
|
|
|
for fail in failures:
|
|
|
|
logger.warning(*fail)
|
|
|
|
|
2008-08-04 12:31:25 -05:00
|
|
|
if update:
|
2017-01-29 10:55:39 -06:00
|
|
|
inventories.clear()
|
|
|
|
|
2012-11-01 11:58:47 -05:00
|
|
|
# Duplicate values in different inventories will shadow each
|
|
|
|
# other; which one will override which can vary between builds
|
|
|
|
# since they are specified using an unordered dict. To make
|
|
|
|
# it more consistent, we sort the named inventories and then
|
|
|
|
# add the unnamed inventories last. This means that the
|
|
|
|
# unnamed inventories will shadow the named ones but the named
|
|
|
|
# ones can still be accessed when the name is specified.
|
2017-01-29 10:55:39 -06:00
|
|
|
cached_vals = list(inventories.cache.values())
|
2012-11-01 11:58:47 -05:00
|
|
|
named_vals = sorted(v for v in cached_vals if v[0])
|
|
|
|
unnamed_vals = [v for v in cached_vals if not v[0]]
|
2015-03-08 11:03:00 -05:00
|
|
|
for name, _x, invdata in named_vals + unnamed_vals:
|
2010-05-23 06:06:01 -05:00
|
|
|
if name:
|
2017-01-29 10:55:39 -06:00
|
|
|
inventories.named_inventory[name] = invdata
|
2018-09-11 07:50:55 -05:00
|
|
|
for type, objects in invdata.items():
|
2017-01-29 10:55:39 -06:00
|
|
|
inventories.main_inventory.setdefault(type, {}).update(objects)
|
2008-08-04 12:31:25 -05:00
|
|
|
|
|
|
|
|
|
|
|
def missing_reference(app, env, node, contnode):
|
2018-11-27 11:25:04 -06:00
|
|
|
# type: (Sphinx, BuildEnvironment, nodes.Element, nodes.TextElement) -> None
|
2008-08-04 12:31:25 -05:00
|
|
|
"""Attempt to resolve a missing reference via intersphinx references."""
|
|
|
|
target = node['reftarget']
|
2017-01-29 10:55:39 -06:00
|
|
|
inventories = InventoryAdapter(env)
|
2016-11-11 04:37:14 -06:00
|
|
|
objtypes = None # type: List[unicode]
|
2014-09-19 06:31:55 -05:00
|
|
|
if node['reftype'] == 'any':
|
|
|
|
# we search anything!
|
|
|
|
objtypes = ['%s:%s' % (domain.name, objtype)
|
|
|
|
for domain in env.domains.values()
|
|
|
|
for objtype in domain.object_types]
|
2015-11-16 09:59:39 -06:00
|
|
|
domain = None
|
2014-09-19 06:31:55 -05:00
|
|
|
else:
|
|
|
|
domain = node.get('refdomain')
|
|
|
|
if not domain:
|
|
|
|
# only objects in domains are in the inventory
|
|
|
|
return
|
2016-11-17 06:20:58 -06:00
|
|
|
objtypes = env.get_domain(domain).objtypes_for_role(node['reftype'])
|
2014-09-19 06:31:55 -05:00
|
|
|
if not objtypes:
|
|
|
|
return
|
|
|
|
objtypes = ['%s:%s' % (domain, objtype) for objtype in objtypes]
|
2017-03-03 04:05:04 -06:00
|
|
|
if 'std:cmdoption' in objtypes:
|
|
|
|
# until Sphinx-1.6, cmdoptions are stored as std:option
|
|
|
|
objtypes.append('std:option')
|
2017-01-29 10:55:39 -06:00
|
|
|
to_try = [(inventories.main_inventory, target)]
|
2017-01-29 10:16:10 -06:00
|
|
|
if domain:
|
|
|
|
full_qualified_name = env.get_domain(domain).get_full_qualified_name(node)
|
|
|
|
if full_qualified_name:
|
|
|
|
to_try.append((inventories.main_inventory, full_qualified_name))
|
2010-07-26 12:32:44 -05:00
|
|
|
in_set = None
|
2010-05-23 06:06:01 -05:00
|
|
|
if ':' in target:
|
|
|
|
# first part may be the foreign doc set name
|
|
|
|
setname, newtarget = target.split(':', 1)
|
2017-01-29 10:55:39 -06:00
|
|
|
if setname in inventories.named_inventory:
|
2010-07-26 12:32:44 -05:00
|
|
|
in_set = setname
|
2017-01-29 10:55:39 -06:00
|
|
|
to_try.append((inventories.named_inventory[setname], newtarget))
|
2017-01-29 10:16:10 -06:00
|
|
|
if domain:
|
2018-01-01 08:29:10 -06:00
|
|
|
node['reftarget'] = newtarget
|
2017-01-29 10:16:10 -06:00
|
|
|
full_qualified_name = env.get_domain(domain).get_full_qualified_name(node)
|
|
|
|
if full_qualified_name:
|
|
|
|
to_try.append((inventories.named_inventory[setname], full_qualified_name))
|
2010-05-23 06:06:01 -05:00
|
|
|
for inventory, target in to_try:
|
|
|
|
for objtype in objtypes:
|
|
|
|
if objtype not in inventory or target not in inventory[objtype]:
|
|
|
|
continue
|
|
|
|
proj, version, uri, dispname = inventory[objtype][target]
|
2013-09-16 03:34:29 -05:00
|
|
|
if '://' not in uri and node.get('refdoc'):
|
|
|
|
# get correct path in case of subdirectories
|
2015-09-28 15:29:46 -05:00
|
|
|
uri = path.join(relative_path(node['refdoc'], '.'), uri)
|
2018-01-29 09:14:53 -06:00
|
|
|
if version:
|
|
|
|
reftitle = _('(in %s v%s)') % (proj, version)
|
|
|
|
else:
|
|
|
|
reftitle = _('(in %s)') % (proj,)
|
|
|
|
newnode = nodes.reference('', '', internal=False, refuri=uri, reftitle=reftitle)
|
2010-08-05 04:58:43 -05:00
|
|
|
if node.get('refexplicit'):
|
|
|
|
# use whatever title was given
|
|
|
|
newnode.append(contnode)
|
2011-09-22 11:59:54 -05:00
|
|
|
elif dispname == '-' or \
|
|
|
|
(domain == 'std' and node['reftype'] == 'keyword'):
|
2010-08-05 04:58:43 -05:00
|
|
|
# use whatever title was given, but strip prefix
|
|
|
|
title = contnode.astext()
|
2017-01-11 19:07:05 -06:00
|
|
|
if in_set and title.startswith(in_set + ':'):
|
|
|
|
newnode.append(contnode.__class__(title[len(in_set) + 1:],
|
|
|
|
title[len(in_set) + 1:]))
|
2010-08-05 04:58:43 -05:00
|
|
|
else:
|
|
|
|
newnode.append(contnode)
|
|
|
|
else:
|
|
|
|
# else use the given display name (used for :ref:)
|
|
|
|
newnode.append(contnode.__class__(dispname, dispname))
|
2010-05-23 06:06:01 -05:00
|
|
|
return newnode
|
2010-07-27 06:20:58 -05:00
|
|
|
# at least get rid of the ':' in the target if no explicit title given
|
|
|
|
if in_set is not None and not node.get('refexplicit', True):
|
2010-07-26 12:32:44 -05:00
|
|
|
if len(contnode) and isinstance(contnode[0], nodes.Text):
|
|
|
|
contnode[0] = nodes.Text(newtarget, contnode[0].rawsource)
|
2008-08-04 12:31:25 -05:00
|
|
|
|
|
|
|
|
|
|
|
def setup(app):
|
2016-11-11 04:37:14 -06:00
|
|
|
# type: (Sphinx) -> Dict[unicode, Any]
|
2008-08-04 12:31:25 -05:00
|
|
|
app.add_config_value('intersphinx_mapping', {}, True)
|
|
|
|
app.add_config_value('intersphinx_cache_limit', 5, False)
|
2016-09-01 08:59:17 -05:00
|
|
|
app.add_config_value('intersphinx_timeout', None, False)
|
2008-08-04 12:31:25 -05:00
|
|
|
app.connect('missing-reference', missing_reference)
|
|
|
|
app.connect('builder-inited', load_mappings)
|
2018-01-24 06:37:54 -06:00
|
|
|
return {
|
|
|
|
'version': sphinx.__display_version__,
|
|
|
|
'env_version': 1,
|
|
|
|
'parallel_read_safe': True
|
|
|
|
}
|
2015-07-25 06:03:33 -05:00
|
|
|
|
|
|
|
|
2018-02-22 09:49:08 -06:00
|
|
|
def inspect_main(argv):
|
2017-03-21 19:49:48 -05:00
|
|
|
# type: (List[unicode]) -> None
|
2017-03-21 10:03:05 -05:00
|
|
|
"""Debug functionality to print out an inventory"""
|
2018-02-22 09:13:17 -06:00
|
|
|
if len(argv) < 1:
|
2017-03-21 10:03:05 -05:00
|
|
|
print("Print out an inventory file.\n"
|
|
|
|
"Error: must specify local path or URL to an inventory file.",
|
|
|
|
file=sys.stderr)
|
|
|
|
sys.exit(1)
|
|
|
|
|
2018-09-11 08:48:35 -05:00
|
|
|
class MockConfig:
|
2017-03-21 10:03:05 -05:00
|
|
|
intersphinx_timeout = None # type: int
|
|
|
|
tls_verify = False
|
2015-07-25 06:03:33 -05:00
|
|
|
|
2018-09-11 08:48:35 -05:00
|
|
|
class MockApp:
|
2015-07-25 06:03:33 -05:00
|
|
|
srcdir = ''
|
2017-03-21 10:03:05 -05:00
|
|
|
config = MockConfig()
|
2015-07-25 06:03:33 -05:00
|
|
|
|
|
|
|
def warn(self, msg):
|
2018-01-22 07:05:38 -06:00
|
|
|
# type: (unicode) -> None
|
2015-07-25 06:03:33 -05:00
|
|
|
print(msg, file=sys.stderr)
|
|
|
|
|
2018-08-22 01:28:16 -05:00
|
|
|
try:
|
|
|
|
filename = argv[0]
|
|
|
|
invdata = fetch_inventory(MockApp(), '', filename) # type: ignore
|
|
|
|
for key in sorted(invdata or {}):
|
|
|
|
print(key)
|
|
|
|
for entry, einfo in sorted(invdata[key].items()):
|
|
|
|
print('\t%-40s %s%s' % (entry,
|
|
|
|
einfo[3] != '-' and '%-40s: ' % einfo[3] or '',
|
|
|
|
einfo[2]))
|
|
|
|
except ValueError as exc:
|
|
|
|
print(exc.args[0] % exc.args[1:])
|
|
|
|
except Exception as exc:
|
|
|
|
print('Unknown error: %r' % exc)
|
2017-03-21 10:03:05 -05:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2017-04-14 07:56:48 -05:00
|
|
|
import logging # type: ignore
|
2018-09-17 21:55:22 -05:00
|
|
|
logging.basicConfig() # type: ignore
|
2017-04-14 07:56:48 -05:00
|
|
|
|
2018-02-22 09:49:08 -06:00
|
|
|
inspect_main(argv=sys.argv[1:]) # type: ignore
|