mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
intersphinx: Define a restricted subset of Config as `InvConfig
` (#13210)
This commit is contained in:
parent
619a10efa7
commit
02beccac0a
@ -5,7 +5,7 @@ from __future__ import annotations
|
|||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from sphinx.ext.intersphinx._load import _fetch_inventory
|
from sphinx.ext.intersphinx._load import _fetch_inventory, _InvConfig
|
||||||
|
|
||||||
|
|
||||||
def inspect_main(argv: list[str], /) -> int:
|
def inspect_main(argv: list[str], /) -> int:
|
||||||
@ -18,18 +18,20 @@ def inspect_main(argv: list[str], /) -> int:
|
|||||||
)
|
)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
class MockConfig:
|
filename = argv[0]
|
||||||
intersphinx_timeout: int | None = None
|
config = _InvConfig(
|
||||||
tls_verify = False
|
intersphinx_cache_limit=5,
|
||||||
tls_cacerts: str | dict[str, str] | None = None
|
intersphinx_timeout=None,
|
||||||
user_agent: str = ''
|
tls_verify=False,
|
||||||
|
tls_cacerts=None,
|
||||||
|
user_agent='',
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
filename = argv[0]
|
|
||||||
inv_data = _fetch_inventory(
|
inv_data = _fetch_inventory(
|
||||||
target_uri='',
|
target_uri='',
|
||||||
inv_location=filename,
|
inv_location=filename,
|
||||||
config=MockConfig(), # type: ignore[arg-type]
|
config=config,
|
||||||
srcdir=Path(''),
|
srcdir=Path(''),
|
||||||
)
|
)
|
||||||
for key in sorted(inv_data or {}):
|
for key in sorted(inv_data or {}):
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
|
import dataclasses
|
||||||
import io
|
import io
|
||||||
import os.path
|
import os.path
|
||||||
import posixpath
|
import posixpath
|
||||||
@ -169,6 +170,7 @@ def load_mappings(app: Sphinx) -> None:
|
|||||||
# This happens when the URI in `intersphinx_mapping` is changed.
|
# This happens when the URI in `intersphinx_mapping` is changed.
|
||||||
del intersphinx_cache[uri]
|
del intersphinx_cache[uri]
|
||||||
|
|
||||||
|
inv_config = _InvConfig.from_config(app.config)
|
||||||
with concurrent.futures.ThreadPoolExecutor() as pool:
|
with concurrent.futures.ThreadPoolExecutor() as pool:
|
||||||
futures = [
|
futures = [
|
||||||
pool.submit(
|
pool.submit(
|
||||||
@ -176,7 +178,7 @@ def load_mappings(app: Sphinx) -> None:
|
|||||||
project=project,
|
project=project,
|
||||||
cache=intersphinx_cache,
|
cache=intersphinx_cache,
|
||||||
now=now,
|
now=now,
|
||||||
config=app.config,
|
config=inv_config,
|
||||||
srcdir=app.srcdir,
|
srcdir=app.srcdir,
|
||||||
)
|
)
|
||||||
for project in projects
|
for project in projects
|
||||||
@ -201,12 +203,31 @@ def load_mappings(app: Sphinx) -> None:
|
|||||||
inventories.main_inventory.setdefault(objtype, {}).update(objects)
|
inventories.main_inventory.setdefault(objtype, {}).update(objects)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclasses.dataclass(frozen=True, kw_only=True, slots=True)
|
||||||
|
class _InvConfig:
|
||||||
|
intersphinx_cache_limit: int
|
||||||
|
intersphinx_timeout: int | float | None
|
||||||
|
tls_verify: bool
|
||||||
|
tls_cacerts: str | dict[str, str] | None
|
||||||
|
user_agent: str
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_config(cls, config: Config) -> _InvConfig:
|
||||||
|
return cls(
|
||||||
|
intersphinx_cache_limit=config.intersphinx_cache_limit,
|
||||||
|
intersphinx_timeout=config.intersphinx_timeout,
|
||||||
|
tls_verify=config.tls_verify,
|
||||||
|
tls_cacerts=config.tls_cacerts,
|
||||||
|
user_agent=config.user_agent,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _fetch_inventory_group(
|
def _fetch_inventory_group(
|
||||||
*,
|
*,
|
||||||
project: _IntersphinxProject,
|
project: _IntersphinxProject,
|
||||||
cache: dict[InventoryURI, InventoryCacheEntry],
|
cache: dict[InventoryURI, InventoryCacheEntry],
|
||||||
now: int,
|
now: int,
|
||||||
config: Config,
|
config: _InvConfig,
|
||||||
srcdir: Path,
|
srcdir: Path,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if config.intersphinx_cache_limit >= 0:
|
if config.intersphinx_cache_limit >= 0:
|
||||||
@ -283,13 +304,13 @@ def fetch_inventory(app: Sphinx, uri: InventoryURI, inv: str) -> Inventory:
|
|||||||
return _fetch_inventory(
|
return _fetch_inventory(
|
||||||
target_uri=uri,
|
target_uri=uri,
|
||||||
inv_location=inv,
|
inv_location=inv,
|
||||||
config=app.config,
|
config=_InvConfig.from_config(app.config),
|
||||||
srcdir=app.srcdir,
|
srcdir=app.srcdir,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _fetch_inventory(
|
def _fetch_inventory(
|
||||||
*, target_uri: InventoryURI, inv_location: str, config: Config, srcdir: Path
|
*, target_uri: InventoryURI, inv_location: str, config: _InvConfig, srcdir: Path
|
||||||
) -> Inventory:
|
) -> Inventory:
|
||||||
"""Fetch, parse and return an intersphinx inventory file."""
|
"""Fetch, parse and return an intersphinx inventory file."""
|
||||||
# both *target_uri* (base URI of the links to generate)
|
# both *target_uri* (base URI of the links to generate)
|
||||||
@ -315,7 +336,7 @@ def _fetch_inventory(
|
|||||||
|
|
||||||
|
|
||||||
def _fetch_inventory_url(
|
def _fetch_inventory_url(
|
||||||
*, target_uri: InventoryURI, inv_location: str, config: Config
|
*, target_uri: InventoryURI, inv_location: str, config: _InvConfig
|
||||||
) -> tuple[bytes, str]:
|
) -> tuple[bytes, str]:
|
||||||
try:
|
try:
|
||||||
with requests.get(
|
with requests.get(
|
||||||
|
@ -25,6 +25,7 @@ from sphinx.ext.intersphinx._load import (
|
|||||||
_fetch_inventory,
|
_fetch_inventory,
|
||||||
_fetch_inventory_group,
|
_fetch_inventory_group,
|
||||||
_get_safe_url,
|
_get_safe_url,
|
||||||
|
_InvConfig,
|
||||||
_strip_basic_auth,
|
_strip_basic_auth,
|
||||||
)
|
)
|
||||||
from sphinx.ext.intersphinx._shared import _IntersphinxProject
|
from sphinx.ext.intersphinx._shared import _IntersphinxProject
|
||||||
@ -67,6 +68,7 @@ def set_config(app, mapping):
|
|||||||
app.config.intersphinx_mapping = mapping.copy()
|
app.config.intersphinx_mapping = mapping.copy()
|
||||||
app.config.intersphinx_cache_limit = 0
|
app.config.intersphinx_cache_limit = 0
|
||||||
app.config.intersphinx_disabled_reftypes = []
|
app.config.intersphinx_disabled_reftypes = []
|
||||||
|
app.config.intersphinx_timeout = None
|
||||||
|
|
||||||
|
|
||||||
@mock.patch('sphinx.ext.intersphinx._load.InventoryFile')
|
@mock.patch('sphinx.ext.intersphinx._load.InventoryFile')
|
||||||
@ -82,7 +84,7 @@ def test_fetch_inventory_redirection(get_request, InventoryFile, app):
|
|||||||
_fetch_inventory(
|
_fetch_inventory(
|
||||||
target_uri='https://hostname/',
|
target_uri='https://hostname/',
|
||||||
inv_location='https://hostname/' + INVENTORY_FILENAME,
|
inv_location='https://hostname/' + INVENTORY_FILENAME,
|
||||||
config=app.config,
|
config=_InvConfig.from_config(app.config),
|
||||||
srcdir=app.srcdir,
|
srcdir=app.srcdir,
|
||||||
)
|
)
|
||||||
assert 'intersphinx inventory has moved' not in app.status.getvalue()
|
assert 'intersphinx inventory has moved' not in app.status.getvalue()
|
||||||
@ -96,7 +98,7 @@ def test_fetch_inventory_redirection(get_request, InventoryFile, app):
|
|||||||
_fetch_inventory(
|
_fetch_inventory(
|
||||||
target_uri='https://hostname/',
|
target_uri='https://hostname/',
|
||||||
inv_location='https://hostname/' + INVENTORY_FILENAME,
|
inv_location='https://hostname/' + INVENTORY_FILENAME,
|
||||||
config=app.config,
|
config=_InvConfig.from_config(app.config),
|
||||||
srcdir=app.srcdir,
|
srcdir=app.srcdir,
|
||||||
)
|
)
|
||||||
assert app.status.getvalue() == (
|
assert app.status.getvalue() == (
|
||||||
@ -114,7 +116,7 @@ def test_fetch_inventory_redirection(get_request, InventoryFile, app):
|
|||||||
_fetch_inventory(
|
_fetch_inventory(
|
||||||
target_uri='https://hostname/',
|
target_uri='https://hostname/',
|
||||||
inv_location='https://hostname/new/' + INVENTORY_FILENAME,
|
inv_location='https://hostname/new/' + INVENTORY_FILENAME,
|
||||||
config=app.config,
|
config=_InvConfig.from_config(app.config),
|
||||||
srcdir=app.srcdir,
|
srcdir=app.srcdir,
|
||||||
)
|
)
|
||||||
assert 'intersphinx inventory has moved' not in app.status.getvalue()
|
assert 'intersphinx inventory has moved' not in app.status.getvalue()
|
||||||
@ -128,7 +130,7 @@ def test_fetch_inventory_redirection(get_request, InventoryFile, app):
|
|||||||
_fetch_inventory(
|
_fetch_inventory(
|
||||||
target_uri='https://hostname/',
|
target_uri='https://hostname/',
|
||||||
inv_location='https://hostname/new/' + INVENTORY_FILENAME,
|
inv_location='https://hostname/new/' + INVENTORY_FILENAME,
|
||||||
config=app.config,
|
config=_InvConfig.from_config(app.config),
|
||||||
srcdir=app.srcdir,
|
srcdir=app.srcdir,
|
||||||
)
|
)
|
||||||
assert app.status.getvalue() == (
|
assert app.status.getvalue() == (
|
||||||
@ -761,6 +763,7 @@ def test_intersphinx_cache_limit(app, monkeypatch, cache_limit, expected_expired
|
|||||||
app.config.intersphinx_mapping = {
|
app.config.intersphinx_mapping = {
|
||||||
'inv': (url, None),
|
'inv': (url, None),
|
||||||
}
|
}
|
||||||
|
app.config.intersphinx_timeout = None
|
||||||
# load the inventory and check if it's done correctly
|
# load the inventory and check if it's done correctly
|
||||||
intersphinx_cache: dict[str, InventoryCacheEntry] = {
|
intersphinx_cache: dict[str, InventoryCacheEntry] = {
|
||||||
url: ('inv', 0, {}), # Timestamp of last cache write is zero.
|
url: ('inv', 0, {}), # Timestamp of last cache write is zero.
|
||||||
@ -785,7 +788,7 @@ def test_intersphinx_cache_limit(app, monkeypatch, cache_limit, expected_expired
|
|||||||
project=project,
|
project=project,
|
||||||
cache=intersphinx_cache,
|
cache=intersphinx_cache,
|
||||||
now=now,
|
now=now,
|
||||||
config=app.config,
|
config=_InvConfig.from_config(app.config),
|
||||||
srcdir=app.srcdir,
|
srcdir=app.srcdir,
|
||||||
)
|
)
|
||||||
# If we hadn't mocked `_fetch_inventory`, it would've made
|
# If we hadn't mocked `_fetch_inventory`, it would've made
|
||||||
|
Loading…
Reference in New Issue
Block a user