2008-08-04 12:31:25 -05:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
"""
|
|
|
|
sphinx.ext.intersphinx
|
|
|
|
~~~~~~~~~~~~~~~~~~~~~~
|
|
|
|
|
2009-09-07 15:52:26 -05:00
|
|
|
Insert links to objects documented in remote Sphinx documentation.
|
2008-08-04 12:31:25 -05:00
|
|
|
|
|
|
|
This works as follows:
|
|
|
|
|
2009-09-07 15:52:26 -05:00
|
|
|
* Each Sphinx HTML build creates a file named "objects.inv" that contains a
|
|
|
|
mapping from object names to URIs relative to the HTML set's root.
|
2008-08-04 12:31:25 -05:00
|
|
|
|
|
|
|
* Projects using the Intersphinx extension can specify links to such mapping
|
|
|
|
files in the `intersphinx_mapping` config value. The mapping will then be
|
2009-09-07 15:52:26 -05:00
|
|
|
used to resolve otherwise missing references to objects into links to the
|
|
|
|
other documentation.
|
2008-08-04 12:31:25 -05:00
|
|
|
|
|
|
|
* By default, the mapping file is assumed to be at the same location as the
|
|
|
|
rest of the documentation; however, the location of the mapping file can
|
|
|
|
also be specified individually, e.g. if the docs should be buildable
|
|
|
|
without Internet access.
|
|
|
|
|
2016-01-14 15:54:04 -06:00
|
|
|
:copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
|
2008-12-27 05:19:17 -06:00
|
|
|
:license: BSD, see LICENSE for details.
|
2008-08-04 12:31:25 -05:00
|
|
|
"""
|
|
|
|
|
2015-07-25 06:03:33 -05:00
|
|
|
from __future__ import print_function
|
|
|
|
|
2008-08-04 12:31:25 -05:00
|
|
|
import time
|
2009-09-07 17:40:06 -05:00
|
|
|
import zlib
|
2010-07-28 11:54:24 -05:00
|
|
|
import codecs
|
2008-08-04 12:31:25 -05:00
|
|
|
import posixpath
|
|
|
|
from os import path
|
2012-02-26 09:49:07 -06:00
|
|
|
import re
|
2008-08-04 12:31:25 -05:00
|
|
|
|
2016-05-22 22:11:10 -05:00
|
|
|
from six import iteritems, string_types
|
2016-06-17 20:25:43 -05:00
|
|
|
from six.moves.urllib.parse import urlsplit, urlunsplit
|
2008-08-04 12:31:25 -05:00
|
|
|
from docutils import nodes
|
2013-09-16 03:34:29 -05:00
|
|
|
from docutils.utils import relative_path
|
2008-08-04 12:31:25 -05:00
|
|
|
|
2014-09-03 09:39:30 -05:00
|
|
|
import sphinx
|
2011-09-22 11:59:54 -05:00
|
|
|
from sphinx.locale import _
|
2008-12-05 05:27:08 -06:00
|
|
|
from sphinx.builders.html import INVENTORY_FILENAME
|
2016-11-16 08:23:18 -06:00
|
|
|
from sphinx.util import requests
|
2008-08-04 12:31:25 -05:00
|
|
|
|
2010-06-06 18:34:01 -05:00
|
|
|
|
|
|
|
UTF8StreamReader = codecs.lookup('utf-8')[2]
|
|
|
|
|
2009-03-19 09:49:03 -05:00
|
|
|
|
2009-09-09 14:56:53 -05:00
|
|
|
def read_inventory_v1(f, uri, join):
|
2010-06-06 18:34:01 -05:00
|
|
|
f = UTF8StreamReader(f)
|
2009-09-07 15:52:26 -05:00
|
|
|
invdata = {}
|
2014-04-29 07:54:25 -05:00
|
|
|
line = next(f)
|
2010-06-06 18:34:01 -05:00
|
|
|
projname = line.rstrip()[11:]
|
2014-04-29 07:54:25 -05:00
|
|
|
line = next(f)
|
2009-09-07 15:52:26 -05:00
|
|
|
version = line.rstrip()[11:]
|
|
|
|
for line in f:
|
|
|
|
name, type, location = line.rstrip().split(None, 2)
|
|
|
|
location = join(uri, location)
|
|
|
|
# version 1 did not add anchors to the location
|
|
|
|
if type == 'mod':
|
|
|
|
type = 'py:module'
|
|
|
|
location += '#module-' + name
|
|
|
|
else:
|
2009-09-09 14:56:53 -05:00
|
|
|
type = 'py:' + type
|
2009-09-07 15:52:26 -05:00
|
|
|
location += '#' + name
|
2010-05-23 06:06:01 -05:00
|
|
|
invdata.setdefault(type, {})[name] = (projname, version, location, '-')
|
2009-09-07 15:52:26 -05:00
|
|
|
return invdata
|
|
|
|
|
|
|
|
|
2017-01-11 19:07:05 -06:00
|
|
|
def read_inventory_v2(f, uri, join, bufsize=16 * 1024):
|
2009-09-07 15:52:26 -05:00
|
|
|
invdata = {}
|
2009-09-07 17:40:06 -05:00
|
|
|
line = f.readline()
|
2009-09-07 15:52:26 -05:00
|
|
|
projname = line.rstrip()[11:].decode('utf-8')
|
2009-09-07 17:40:06 -05:00
|
|
|
line = f.readline()
|
2010-05-26 12:51:09 -05:00
|
|
|
version = line.rstrip()[11:].decode('utf-8')
|
2010-06-06 18:34:01 -05:00
|
|
|
line = f.readline().decode('utf-8')
|
2009-09-07 17:40:06 -05:00
|
|
|
if 'zlib' not in line:
|
|
|
|
raise ValueError
|
|
|
|
|
|
|
|
def read_chunks():
|
|
|
|
decompressor = zlib.decompressobj()
|
2014-05-01 08:54:09 -05:00
|
|
|
for chunk in iter(lambda: f.read(bufsize), b''):
|
2009-09-07 17:40:06 -05:00
|
|
|
yield decompressor.decompress(chunk)
|
|
|
|
yield decompressor.flush()
|
|
|
|
|
|
|
|
def split_lines(iter):
|
2014-05-01 08:54:09 -05:00
|
|
|
buf = b''
|
2009-09-07 17:40:06 -05:00
|
|
|
for chunk in iter:
|
|
|
|
buf += chunk
|
2014-05-01 08:54:09 -05:00
|
|
|
lineend = buf.find(b'\n')
|
2009-09-07 17:40:06 -05:00
|
|
|
while lineend != -1:
|
2010-05-26 12:51:09 -05:00
|
|
|
yield buf[:lineend].decode('utf-8')
|
2017-01-11 19:07:05 -06:00
|
|
|
buf = buf[lineend + 1:]
|
2014-05-01 08:54:09 -05:00
|
|
|
lineend = buf.find(b'\n')
|
2009-09-07 17:40:06 -05:00
|
|
|
assert not buf
|
|
|
|
|
|
|
|
for line in split_lines(read_chunks()):
|
2012-02-26 09:49:07 -06:00
|
|
|
# be careful to handle names with embedded spaces correctly
|
2016-06-24 07:01:35 -05:00
|
|
|
m = re.match(r'(?x)(.+?)\s+(\S*:\S*)\s+(-?\d+)\s+(\S+)\s+(.*)',
|
2012-02-26 09:49:07 -06:00
|
|
|
line.rstrip())
|
|
|
|
if not m:
|
|
|
|
continue
|
|
|
|
name, type, prio, location, dispname = m.groups()
|
2013-09-16 00:54:12 -05:00
|
|
|
if type == 'py:module' and type in invdata and \
|
2015-03-08 11:03:00 -05:00
|
|
|
name in invdata[type]: # due to a bug in 1.1 and below,
|
|
|
|
# two inventory entries are created
|
|
|
|
# for Python modules, and the first
|
|
|
|
# one is correct
|
2013-09-16 00:54:12 -05:00
|
|
|
continue
|
2010-05-26 12:51:09 -05:00
|
|
|
if location.endswith(u'$'):
|
2009-09-07 17:40:06 -05:00
|
|
|
location = location[:-1] + name
|
2009-09-07 15:52:26 -05:00
|
|
|
location = join(uri, location)
|
2010-05-23 06:06:01 -05:00
|
|
|
invdata.setdefault(type, {})[name] = (projname, version,
|
|
|
|
location, dispname)
|
2009-09-07 15:52:26 -05:00
|
|
|
return invdata
|
|
|
|
|
|
|
|
|
2017-01-11 19:07:05 -06:00
|
|
|
def read_inventory(f, uri, join, bufsize=16 * 1024):
|
2016-08-17 10:56:40 -05:00
|
|
|
line = f.readline().rstrip().decode('utf-8')
|
|
|
|
if line == '# Sphinx inventory version 1':
|
|
|
|
return read_inventory_v1(f, uri, join)
|
|
|
|
elif line == '# Sphinx inventory version 2':
|
|
|
|
return read_inventory_v2(f, uri, join, bufsize=bufsize)
|
|
|
|
|
|
|
|
|
2015-10-15 16:37:55 -05:00
|
|
|
def _strip_basic_auth(url):
|
|
|
|
"""Returns *url* with basic auth credentials removed. Also returns the
|
|
|
|
basic auth username and password if they're present in *url*.
|
|
|
|
|
|
|
|
E.g.: https://user:pass@example.com => https://example.com
|
|
|
|
|
|
|
|
*url* need not include basic auth credentials.
|
|
|
|
|
|
|
|
:param url: url which may or may not contain basic auth credentials
|
|
|
|
:type url: ``str``
|
|
|
|
|
2016-08-17 22:41:38 -05:00
|
|
|
:return: *url* with any basic auth creds removed
|
|
|
|
:rtype: ``str``
|
2015-10-15 16:37:55 -05:00
|
|
|
"""
|
2016-08-17 22:41:38 -05:00
|
|
|
frags = list(urlsplit(url))
|
2015-10-15 16:37:55 -05:00
|
|
|
# swap out "user[:pass]@hostname" for "hostname"
|
2016-08-17 22:41:38 -05:00
|
|
|
if '@' in frags[1]:
|
|
|
|
frags[1] = frags[1].split('@')[1]
|
|
|
|
return urlunsplit(frags)
|
2015-10-15 16:37:55 -05:00
|
|
|
|
|
|
|
|
2016-11-16 08:23:18 -06:00
|
|
|
def _read_from_url(url, config=None):
|
2015-10-15 16:37:55 -05:00
|
|
|
"""Reads data from *url* with an HTTP *GET*.
|
|
|
|
|
|
|
|
This function supports fetching from resources which use basic HTTP auth as
|
|
|
|
laid out by RFC1738 § 3.1. See § 5 for grammar definitions for URLs.
|
|
|
|
|
|
|
|
.. seealso:
|
|
|
|
|
|
|
|
https://www.ietf.org/rfc/rfc1738.txt
|
|
|
|
|
|
|
|
:param url: URL of an HTTP resource
|
|
|
|
:type url: ``str``
|
|
|
|
|
|
|
|
:return: data read from resource described by *url*
|
|
|
|
:rtype: ``file``-like object
|
|
|
|
"""
|
2016-11-16 08:23:18 -06:00
|
|
|
r = requests.get(url, stream=True, config=config, timeout=config.intersphinx_timeout)
|
2016-08-17 22:33:28 -05:00
|
|
|
r.raise_for_status()
|
|
|
|
r.raw.url = r.url
|
|
|
|
return r.raw
|
2015-10-15 16:37:55 -05:00
|
|
|
|
|
|
|
|
|
|
|
def _get_safe_url(url):
|
|
|
|
"""Gets version of *url* with basic auth passwords obscured. This function
|
|
|
|
returns results suitable for printing and logging.
|
|
|
|
|
2016-08-17 10:58:17 -05:00
|
|
|
E.g.: https://user:12345@example.com => https://user@example.com
|
2015-10-15 16:37:55 -05:00
|
|
|
|
|
|
|
:param url: a url
|
|
|
|
:type url: ``str``
|
|
|
|
|
2016-08-17 10:58:17 -05:00
|
|
|
:return: *url* with password removed
|
2015-10-15 16:37:55 -05:00
|
|
|
:rtype: ``str``
|
|
|
|
"""
|
2016-08-17 11:25:29 -05:00
|
|
|
parts = urlsplit(url)
|
|
|
|
if parts.username is None:
|
|
|
|
return url
|
|
|
|
else:
|
|
|
|
frags = list(parts)
|
|
|
|
if parts.port:
|
|
|
|
frags[1] = '{0}@{1}:{2}'.format(parts.username, parts.hostname, parts.port)
|
|
|
|
else:
|
|
|
|
frags[1] = '{0}@{1}'.format(parts.username, parts.hostname)
|
|
|
|
|
|
|
|
return urlunsplit(frags)
|
2015-10-15 16:37:55 -05:00
|
|
|
|
|
|
|
|
2008-08-04 12:31:25 -05:00
|
|
|
def fetch_inventory(app, uri, inv):
|
|
|
|
"""Fetch, parse and return an intersphinx inventory file."""
|
|
|
|
# both *uri* (base URI of the links to generate) and *inv* (actual
|
|
|
|
# location of the inventory file) can be local or remote URIs
|
2015-10-22 01:09:16 -05:00
|
|
|
localuri = '://' not in uri
|
|
|
|
if not localuri:
|
2015-10-15 16:37:55 -05:00
|
|
|
# case: inv URI points to remote resource; strip any existing auth
|
2016-08-17 22:41:38 -05:00
|
|
|
uri = _strip_basic_auth(uri)
|
2008-08-04 12:31:25 -05:00
|
|
|
try:
|
2015-10-22 01:09:16 -05:00
|
|
|
if '://' in inv:
|
2016-11-16 08:23:18 -06:00
|
|
|
f = _read_from_url(inv, config=app.config)
|
2008-08-04 12:31:25 -05:00
|
|
|
else:
|
2010-06-06 18:34:01 -05:00
|
|
|
f = open(path.join(app.srcdir, inv), 'rb')
|
2014-01-19 04:17:10 -06:00
|
|
|
except Exception as err:
|
2008-08-04 12:31:25 -05:00
|
|
|
app.warn('intersphinx inventory %r not fetchable due to '
|
|
|
|
'%s: %s' % (inv, err.__class__, err))
|
|
|
|
return
|
|
|
|
try:
|
2016-08-17 22:33:28 -05:00
|
|
|
if hasattr(f, 'url'):
|
|
|
|
newinv = f.url
|
2016-08-10 21:19:42 -05:00
|
|
|
if inv != newinv:
|
|
|
|
app.info('intersphinx inventory has moved: %s -> %s' % (inv, newinv))
|
|
|
|
|
|
|
|
if uri in (inv, path.dirname(inv), path.dirname(inv) + '/'):
|
|
|
|
uri = path.dirname(newinv)
|
2016-08-17 10:56:40 -05:00
|
|
|
with f:
|
|
|
|
try:
|
|
|
|
join = localuri and path.join or posixpath.join
|
|
|
|
invdata = read_inventory(f, uri, join)
|
2017-01-07 10:41:12 -06:00
|
|
|
except ValueError as exc:
|
|
|
|
raise ValueError('unknown or unsupported inventory version: %r' % exc)
|
2014-01-19 04:17:10 -06:00
|
|
|
except Exception as err:
|
2008-08-04 12:31:25 -05:00
|
|
|
app.warn('intersphinx inventory %r not readable due to '
|
2009-09-07 15:52:26 -05:00
|
|
|
'%s: %s' % (inv, err.__class__.__name__, err))
|
2008-08-04 12:31:25 -05:00
|
|
|
else:
|
|
|
|
return invdata
|
|
|
|
|
|
|
|
|
|
|
|
def load_mappings(app):
|
|
|
|
"""Load all intersphinx mappings into the environment."""
|
|
|
|
now = int(time.time())
|
|
|
|
cache_time = now - app.config.intersphinx_cache_limit * 86400
|
|
|
|
env = app.builder.env
|
|
|
|
if not hasattr(env, 'intersphinx_cache'):
|
|
|
|
env.intersphinx_cache = {}
|
2010-08-25 05:05:23 -05:00
|
|
|
env.intersphinx_inventory = {}
|
|
|
|
env.intersphinx_named_inventory = {}
|
2008-08-04 12:31:25 -05:00
|
|
|
cache = env.intersphinx_cache
|
|
|
|
update = False
|
2014-04-29 07:20:56 -05:00
|
|
|
for key, value in iteritems(app.config.intersphinx_mapping):
|
2017-01-07 23:35:44 -06:00
|
|
|
if isinstance(value, (list, tuple)):
|
2010-05-23 06:06:01 -05:00
|
|
|
# new format
|
|
|
|
name, (uri, inv) = key, value
|
2016-05-22 22:11:10 -05:00
|
|
|
if not isinstance(name, string_types):
|
|
|
|
app.warn('intersphinx identifier %r is not string. Ignored' % name)
|
|
|
|
continue
|
2010-05-23 06:06:01 -05:00
|
|
|
else:
|
|
|
|
# old format, no name
|
|
|
|
name, uri, inv = None, key, value
|
2008-08-04 12:31:25 -05:00
|
|
|
# we can safely assume that the uri<->inv mapping is not changed
|
|
|
|
# during partial rebuilds since a changed intersphinx_mapping
|
|
|
|
# setting will cause a full environment reread
|
2014-08-19 12:56:47 -05:00
|
|
|
if not isinstance(inv, tuple):
|
|
|
|
invs = (inv, )
|
|
|
|
else:
|
|
|
|
invs = inv
|
|
|
|
|
|
|
|
for inv in invs:
|
|
|
|
if not inv:
|
|
|
|
inv = posixpath.join(uri, INVENTORY_FILENAME)
|
|
|
|
# decide whether the inventory must be read: always read local
|
|
|
|
# files; remote ones only if the cache time is expired
|
|
|
|
if '://' not in inv or uri not in cache \
|
|
|
|
or cache[uri][1] < cache_time:
|
2015-10-15 16:37:55 -05:00
|
|
|
safe_inv_url = _get_safe_url(inv)
|
|
|
|
app.info(
|
|
|
|
'loading intersphinx inventory from %s...' % safe_inv_url)
|
2014-08-19 12:56:47 -05:00
|
|
|
invdata = fetch_inventory(app, uri, inv)
|
|
|
|
if invdata:
|
|
|
|
cache[uri] = (name, now, invdata)
|
2014-08-19 14:12:45 -05:00
|
|
|
update = True
|
2014-08-21 06:47:32 -05:00
|
|
|
break
|
2014-08-19 12:56:47 -05:00
|
|
|
|
2008-08-04 12:31:25 -05:00
|
|
|
if update:
|
|
|
|
env.intersphinx_inventory = {}
|
2010-05-23 06:06:01 -05:00
|
|
|
env.intersphinx_named_inventory = {}
|
2012-11-01 11:58:47 -05:00
|
|
|
# Duplicate values in different inventories will shadow each
|
|
|
|
# other; which one will override which can vary between builds
|
|
|
|
# since they are specified using an unordered dict. To make
|
|
|
|
# it more consistent, we sort the named inventories and then
|
|
|
|
# add the unnamed inventories last. This means that the
|
|
|
|
# unnamed inventories will shadow the named ones but the named
|
|
|
|
# ones can still be accessed when the name is specified.
|
2014-04-29 07:20:56 -05:00
|
|
|
cached_vals = list(cache.values())
|
2012-11-01 11:58:47 -05:00
|
|
|
named_vals = sorted(v for v in cached_vals if v[0])
|
|
|
|
unnamed_vals = [v for v in cached_vals if not v[0]]
|
2015-03-08 11:03:00 -05:00
|
|
|
for name, _x, invdata in named_vals + unnamed_vals:
|
2010-05-23 06:06:01 -05:00
|
|
|
if name:
|
|
|
|
env.intersphinx_named_inventory[name] = invdata
|
2014-04-29 07:20:56 -05:00
|
|
|
for type, objects in iteritems(invdata):
|
2012-11-01 11:58:47 -05:00
|
|
|
env.intersphinx_inventory.setdefault(
|
|
|
|
type, {}).update(objects)
|
2008-08-04 12:31:25 -05:00
|
|
|
|
|
|
|
|
|
|
|
def missing_reference(app, env, node, contnode):
|
|
|
|
"""Attempt to resolve a missing reference via intersphinx references."""
|
|
|
|
target = node['reftarget']
|
2014-09-19 06:31:55 -05:00
|
|
|
if node['reftype'] == 'any':
|
|
|
|
# we search anything!
|
|
|
|
objtypes = ['%s:%s' % (domain.name, objtype)
|
|
|
|
for domain in env.domains.values()
|
|
|
|
for objtype in domain.object_types]
|
2015-11-16 09:59:39 -06:00
|
|
|
domain = None
|
2015-07-25 05:11:23 -05:00
|
|
|
elif node['reftype'] == 'doc':
|
|
|
|
domain = 'std' # special case
|
|
|
|
objtypes = ['std:doc']
|
2014-09-19 06:31:55 -05:00
|
|
|
else:
|
|
|
|
domain = node.get('refdomain')
|
|
|
|
if not domain:
|
|
|
|
# only objects in domains are in the inventory
|
|
|
|
return
|
|
|
|
objtypes = env.domains[domain].objtypes_for_role(node['reftype'])
|
|
|
|
if not objtypes:
|
|
|
|
return
|
|
|
|
objtypes = ['%s:%s' % (domain, objtype) for objtype in objtypes]
|
2010-05-23 06:06:01 -05:00
|
|
|
to_try = [(env.intersphinx_inventory, target)]
|
2010-07-26 12:32:44 -05:00
|
|
|
in_set = None
|
2010-05-23 06:06:01 -05:00
|
|
|
if ':' in target:
|
|
|
|
# first part may be the foreign doc set name
|
|
|
|
setname, newtarget = target.split(':', 1)
|
|
|
|
if setname in env.intersphinx_named_inventory:
|
2010-07-26 12:32:44 -05:00
|
|
|
in_set = setname
|
2010-05-23 06:06:01 -05:00
|
|
|
to_try.append((env.intersphinx_named_inventory[setname], newtarget))
|
|
|
|
for inventory, target in to_try:
|
|
|
|
for objtype in objtypes:
|
|
|
|
if objtype not in inventory or target not in inventory[objtype]:
|
|
|
|
continue
|
|
|
|
proj, version, uri, dispname = inventory[objtype][target]
|
2013-09-16 03:34:29 -05:00
|
|
|
if '://' not in uri and node.get('refdoc'):
|
|
|
|
# get correct path in case of subdirectories
|
2015-09-28 15:29:46 -05:00
|
|
|
uri = path.join(relative_path(node['refdoc'], '.'), uri)
|
2010-05-24 05:57:07 -05:00
|
|
|
newnode = nodes.reference('', '', internal=False, refuri=uri,
|
2014-09-19 06:31:55 -05:00
|
|
|
reftitle=_('(in %s v%s)') % (proj, version))
|
2010-08-05 04:58:43 -05:00
|
|
|
if node.get('refexplicit'):
|
|
|
|
# use whatever title was given
|
|
|
|
newnode.append(contnode)
|
2011-09-22 11:59:54 -05:00
|
|
|
elif dispname == '-' or \
|
|
|
|
(domain == 'std' and node['reftype'] == 'keyword'):
|
2010-08-05 04:58:43 -05:00
|
|
|
# use whatever title was given, but strip prefix
|
|
|
|
title = contnode.astext()
|
2017-01-11 19:07:05 -06:00
|
|
|
if in_set and title.startswith(in_set + ':'):
|
|
|
|
newnode.append(contnode.__class__(title[len(in_set) + 1:],
|
|
|
|
title[len(in_set) + 1:]))
|
2010-08-05 04:58:43 -05:00
|
|
|
else:
|
|
|
|
newnode.append(contnode)
|
|
|
|
else:
|
|
|
|
# else use the given display name (used for :ref:)
|
|
|
|
newnode.append(contnode.__class__(dispname, dispname))
|
2010-05-23 06:06:01 -05:00
|
|
|
return newnode
|
2010-07-27 06:20:58 -05:00
|
|
|
# at least get rid of the ':' in the target if no explicit title given
|
|
|
|
if in_set is not None and not node.get('refexplicit', True):
|
2010-07-26 12:32:44 -05:00
|
|
|
if len(contnode) and isinstance(contnode[0], nodes.Text):
|
|
|
|
contnode[0] = nodes.Text(newtarget, contnode[0].rawsource)
|
2008-08-04 12:31:25 -05:00
|
|
|
|
|
|
|
|
|
|
|
def setup(app):
|
|
|
|
app.add_config_value('intersphinx_mapping', {}, True)
|
|
|
|
app.add_config_value('intersphinx_cache_limit', 5, False)
|
2016-09-01 08:59:17 -05:00
|
|
|
app.add_config_value('intersphinx_timeout', None, False)
|
2008-08-04 12:31:25 -05:00
|
|
|
app.connect('missing-reference', missing_reference)
|
|
|
|
app.connect('builder-inited', load_mappings)
|
2015-03-14 02:46:24 -05:00
|
|
|
return {'version': sphinx.__display_version__, 'parallel_read_safe': True}
|
2015-07-25 06:03:33 -05:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
# debug functionality to print out an inventory
|
|
|
|
import sys
|
|
|
|
|
|
|
|
class MockApp(object):
|
|
|
|
srcdir = ''
|
|
|
|
|
|
|
|
def warn(self, msg):
|
|
|
|
print(msg, file=sys.stderr)
|
|
|
|
|
|
|
|
filename = sys.argv[1]
|
|
|
|
invdata = fetch_inventory(MockApp(), '', filename)
|
|
|
|
for key in sorted(invdata or {}):
|
|
|
|
print(key)
|
|
|
|
for entry, einfo in sorted(invdata[key].items()):
|
|
|
|
print('\t%-40s %s%s' % (entry,
|
|
|
|
einfo[3] != '-' and '%-40s: ' % einfo[3] or '',
|
|
|
|
einfo[2]))
|