mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Fetch inventories concurrently
This commit is contained in:
parent
8f554c465d
commit
ea3ba6c210
2
AUTHORS
2
AUTHORS
@ -36,7 +36,7 @@ Other contributors, listed alphabetically, are:
|
|||||||
* Hernan Grecco -- search improvements
|
* Hernan Grecco -- search improvements
|
||||||
* Horst Gutmann -- internationalization support
|
* Horst Gutmann -- internationalization support
|
||||||
* Martin Hans -- autodoc improvements
|
* Martin Hans -- autodoc improvements
|
||||||
* Zac Hatfield-Dodds -- doctest reporting improvements
|
* Zac Hatfield-Dodds -- doctest reporting improvements, intersphinx performance
|
||||||
* Doug Hellmann -- graphviz improvements
|
* Doug Hellmann -- graphviz improvements
|
||||||
* Tim Hoffmann -- theme improvements
|
* Tim Hoffmann -- theme improvements
|
||||||
* Antti Kaihola -- doctest extension (skipif option)
|
* Antti Kaihola -- doctest extension (skipif option)
|
||||||
|
1
CHANGES
1
CHANGES
@ -32,6 +32,7 @@ Features added
|
|||||||
* #6000: LaTeX: have backslash also be an inline literal word wrap break
|
* #6000: LaTeX: have backslash also be an inline literal word wrap break
|
||||||
character
|
character
|
||||||
* #6812: Improve a warning message when extensions are not parallel safe
|
* #6812: Improve a warning message when extensions are not parallel safe
|
||||||
|
* #6818: Improve Intersphinx performance for multiple remote inventories.
|
||||||
|
|
||||||
Bugs fixed
|
Bugs fixed
|
||||||
----------
|
----------
|
||||||
|
@ -23,6 +23,7 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import concurrent.futures
|
||||||
import functools
|
import functools
|
||||||
import posixpath
|
import posixpath
|
||||||
import sys
|
import sys
|
||||||
@ -187,21 +188,18 @@ def fetch_inventory(app: Sphinx, uri: str, inv: Any) -> Any:
|
|||||||
return invdata
|
return invdata
|
||||||
|
|
||||||
|
|
||||||
def load_mappings(app: Sphinx) -> None:
|
def fetch_inventory_group(
|
||||||
"""Load all intersphinx mappings into the environment."""
|
name: str, uri: str, invs: Any, cache: Any, app: Any, now: float
|
||||||
now = int(time.time())
|
) -> bool:
|
||||||
cache_time = now - app.config.intersphinx_cache_limit * 86400
|
cache_time = now - app.config.intersphinx_cache_limit * 86400
|
||||||
inventories = InventoryAdapter(app.builder.env)
|
|
||||||
update = False
|
|
||||||
for key, (name, (uri, invs)) in app.config.intersphinx_mapping.items():
|
|
||||||
failures = []
|
failures = []
|
||||||
|
try:
|
||||||
for inv in invs:
|
for inv in invs:
|
||||||
if not inv:
|
if not inv:
|
||||||
inv = posixpath.join(uri, INVENTORY_FILENAME)
|
inv = posixpath.join(uri, INVENTORY_FILENAME)
|
||||||
# decide whether the inventory must be read: always read local
|
# decide whether the inventory must be read: always read local
|
||||||
# files; remote ones only if the cache time is expired
|
# files; remote ones only if the cache time is expired
|
||||||
if '://' not in inv or uri not in inventories.cache \
|
if '://' not in inv or uri not in cache or cache[uri][1] < cache_time:
|
||||||
or inventories.cache[uri][1] < cache_time:
|
|
||||||
safe_inv_url = _get_safe_url(inv)
|
safe_inv_url = _get_safe_url(inv)
|
||||||
logger.info(__('loading intersphinx inventory from %s...'), safe_inv_url)
|
logger.info(__('loading intersphinx inventory from %s...'), safe_inv_url)
|
||||||
try:
|
try:
|
||||||
@ -209,12 +207,11 @@ def load_mappings(app: Sphinx) -> None:
|
|||||||
except Exception as err:
|
except Exception as err:
|
||||||
failures.append(err.args)
|
failures.append(err.args)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if invdata:
|
if invdata:
|
||||||
inventories.cache[uri] = (name, now, invdata)
|
cache[uri] = (name, now, invdata)
|
||||||
update = True
|
return True
|
||||||
break
|
return False
|
||||||
|
finally:
|
||||||
if failures == []:
|
if failures == []:
|
||||||
pass
|
pass
|
||||||
elif len(failures) < len(invs):
|
elif len(failures) < len(invs):
|
||||||
@ -227,7 +224,21 @@ def load_mappings(app: Sphinx) -> None:
|
|||||||
logger.warning(__("failed to reach any of the inventories "
|
logger.warning(__("failed to reach any of the inventories "
|
||||||
"with the following issues:") + "\n" + issues)
|
"with the following issues:") + "\n" + issues)
|
||||||
|
|
||||||
if update:
|
|
||||||
|
def load_mappings(app: Sphinx) -> None:
|
||||||
|
"""Load all intersphinx mappings into the environment."""
|
||||||
|
now = int(time.time())
|
||||||
|
inventories = InventoryAdapter(app.builder.env)
|
||||||
|
|
||||||
|
with concurrent.futures.ThreadPoolExecutor() as pool:
|
||||||
|
futures = []
|
||||||
|
for name, (uri, invs) in app.config.intersphinx_mapping.values():
|
||||||
|
futures.append(pool.submit(
|
||||||
|
fetch_inventory_group, name, uri, invs, inventories.cache, app, now
|
||||||
|
))
|
||||||
|
updated = [f.result() for f in concurrent.futures.as_completed(futures)]
|
||||||
|
|
||||||
|
if any(updated):
|
||||||
inventories.clear()
|
inventories.clear()
|
||||||
|
|
||||||
# Duplicate values in different inventories will shadow each
|
# Duplicate values in different inventories will shadow each
|
||||||
|
Loading…
Reference in New Issue
Block a user