mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
intersphinx: Add a type for project data (#12657)
This commit is contained in:
parent
34bc4e69bb
commit
46671f2e10
@ -21,7 +21,6 @@ from __future__ import annotations
|
|||||||
__all__ = (
|
__all__ = (
|
||||||
'InventoryAdapter',
|
'InventoryAdapter',
|
||||||
'fetch_inventory',
|
'fetch_inventory',
|
||||||
'fetch_inventory_group',
|
|
||||||
'load_mappings',
|
'load_mappings',
|
||||||
'validate_intersphinx_mapping',
|
'validate_intersphinx_mapping',
|
||||||
'IntersphinxRoleResolver',
|
'IntersphinxRoleResolver',
|
||||||
@ -42,7 +41,6 @@ import sphinx
|
|||||||
from sphinx.ext.intersphinx._cli import inspect_main
|
from sphinx.ext.intersphinx._cli import inspect_main
|
||||||
from sphinx.ext.intersphinx._load import (
|
from sphinx.ext.intersphinx._load import (
|
||||||
fetch_inventory,
|
fetch_inventory,
|
||||||
fetch_inventory_group,
|
|
||||||
load_mappings,
|
load_mappings,
|
||||||
validate_intersphinx_mapping,
|
validate_intersphinx_mapping,
|
||||||
)
|
)
|
||||||
|
@ -4,7 +4,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from sphinx.ext.intersphinx._load import fetch_inventory
|
from sphinx.ext.intersphinx._load import _fetch_inventory
|
||||||
|
|
||||||
|
|
||||||
def inspect_main(argv: list[str], /) -> int:
|
def inspect_main(argv: list[str], /) -> int:
|
||||||
@ -21,13 +21,14 @@ def inspect_main(argv: list[str], /) -> int:
|
|||||||
tls_cacerts: str | dict[str, str] | None = None
|
tls_cacerts: str | dict[str, str] | None = None
|
||||||
user_agent: str = ''
|
user_agent: str = ''
|
||||||
|
|
||||||
class MockApp:
|
|
||||||
srcdir = ''
|
|
||||||
config = MockConfig()
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
filename = argv[0]
|
filename = argv[0]
|
||||||
inv_data = fetch_inventory(MockApp(), '', filename) # type: ignore[arg-type]
|
inv_data = _fetch_inventory(
|
||||||
|
target_uri='',
|
||||||
|
inv_location=filename,
|
||||||
|
config=MockConfig(), # type: ignore[arg-type]
|
||||||
|
srcdir='' # type: ignore[arg-type]
|
||||||
|
)
|
||||||
for key in sorted(inv_data or {}):
|
for key in sorted(inv_data or {}):
|
||||||
print(key)
|
print(key)
|
||||||
inv_entries = sorted(inv_data[key].items())
|
inv_entries = sorted(inv_data[key].items())
|
||||||
|
@ -13,12 +13,13 @@ from urllib.parse import urlsplit, urlunsplit
|
|||||||
|
|
||||||
from sphinx.builders.html import INVENTORY_FILENAME
|
from sphinx.builders.html import INVENTORY_FILENAME
|
||||||
from sphinx.errors import ConfigError
|
from sphinx.errors import ConfigError
|
||||||
from sphinx.ext.intersphinx._shared import LOGGER, InventoryAdapter
|
from sphinx.ext.intersphinx._shared import LOGGER, InventoryAdapter, _IntersphinxProject
|
||||||
from sphinx.locale import __
|
from sphinx.locale import __
|
||||||
from sphinx.util import requests
|
from sphinx.util import requests
|
||||||
from sphinx.util.inventory import InventoryFile
|
from sphinx.util.inventory import InventoryFile
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from pathlib import Path
|
||||||
from typing import IO
|
from typing import IO
|
||||||
|
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
@ -139,8 +140,17 @@ def load_mappings(app: Sphinx) -> None:
|
|||||||
intersphinx_cache: dict[InventoryURI, InventoryCacheEntry] = inventories.cache
|
intersphinx_cache: dict[InventoryURI, InventoryCacheEntry] = inventories.cache
|
||||||
intersphinx_mapping: IntersphinxMapping = app.config.intersphinx_mapping
|
intersphinx_mapping: IntersphinxMapping = app.config.intersphinx_mapping
|
||||||
|
|
||||||
expected_uris = {uri for _name, (uri, _invs) in intersphinx_mapping.values()}
|
projects = []
|
||||||
|
for name, (uri, locations) in intersphinx_mapping.values():
|
||||||
|
try:
|
||||||
|
project = _IntersphinxProject(name=name, target_uri=uri, locations=locations)
|
||||||
|
except ValueError as err:
|
||||||
|
msg = __('An invalid intersphinx_mapping entry was added after normalisation.')
|
||||||
|
raise ConfigError(msg) from err
|
||||||
|
else:
|
||||||
|
projects.append(project)
|
||||||
|
|
||||||
|
expected_uris = {project.target_uri for project in projects}
|
||||||
for uri in frozenset(intersphinx_cache):
|
for uri in frozenset(intersphinx_cache):
|
||||||
if intersphinx_cache[uri][0] not in intersphinx_mapping:
|
if intersphinx_cache[uri][0] not in intersphinx_mapping:
|
||||||
# Remove all cached entries that are no longer in `intersphinx_mapping`.
|
# Remove all cached entries that are no longer in `intersphinx_mapping`.
|
||||||
@ -153,8 +163,15 @@ def load_mappings(app: Sphinx) -> None:
|
|||||||
|
|
||||||
with concurrent.futures.ThreadPoolExecutor() as pool:
|
with concurrent.futures.ThreadPoolExecutor() as pool:
|
||||||
futures = [
|
futures = [
|
||||||
pool.submit(fetch_inventory_group, name, uri, invs, intersphinx_cache, app, now)
|
pool.submit(
|
||||||
for name, (uri, invs) in app.config.intersphinx_mapping.values()
|
_fetch_inventory_group,
|
||||||
|
project=project,
|
||||||
|
cache=intersphinx_cache,
|
||||||
|
now=now,
|
||||||
|
config=app.config,
|
||||||
|
srcdir=app.srcdir,
|
||||||
|
)
|
||||||
|
for project in projects
|
||||||
]
|
]
|
||||||
updated = [f.result() for f in concurrent.futures.as_completed(futures)]
|
updated = [f.result() for f in concurrent.futures.as_completed(futures)]
|
||||||
|
|
||||||
@ -176,43 +193,52 @@ def load_mappings(app: Sphinx) -> None:
|
|||||||
inventories.main_inventory.setdefault(objtype, {}).update(objects)
|
inventories.main_inventory.setdefault(objtype, {}).update(objects)
|
||||||
|
|
||||||
|
|
||||||
def fetch_inventory_group(
|
def _fetch_inventory_group(
|
||||||
name: InventoryName,
|
*,
|
||||||
uri: InventoryURI,
|
project: _IntersphinxProject,
|
||||||
invs: tuple[InventoryLocation, ...],
|
|
||||||
cache: dict[InventoryURI, InventoryCacheEntry],
|
cache: dict[InventoryURI, InventoryCacheEntry],
|
||||||
app: Sphinx,
|
|
||||||
now: int,
|
now: int,
|
||||||
|
config: Config,
|
||||||
|
srcdir: Path,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
cache_time = now - app.config.intersphinx_cache_limit * 86400
|
cache_time = now - config.intersphinx_cache_limit * 86400
|
||||||
|
|
||||||
updated = False
|
updated = False
|
||||||
failures = []
|
failures = []
|
||||||
|
|
||||||
for location in invs:
|
for location in project.locations:
|
||||||
# location is either None or a non-empty string
|
# location is either None or a non-empty string
|
||||||
inv = f'{uri}/{INVENTORY_FILENAME}' if location is None else location
|
inv = f'{project.target_uri}/{INVENTORY_FILENAME}' if location is None else location
|
||||||
|
|
||||||
# decide whether the inventory must be read: always read local
|
# decide whether the inventory must be read: always read local
|
||||||
# files; remote ones only if the cache time is expired
|
# files; remote ones only if the cache time is expired
|
||||||
if '://' not in inv or uri not in cache or cache[uri][1] < cache_time:
|
if (
|
||||||
|
'://' not in inv
|
||||||
|
or project.target_uri not in cache
|
||||||
|
or cache[project.target_uri][1] < cache_time
|
||||||
|
):
|
||||||
LOGGER.info(__("loading intersphinx inventory '%s' from %s ..."),
|
LOGGER.info(__("loading intersphinx inventory '%s' from %s ..."),
|
||||||
name, _get_safe_url(inv))
|
project.name, _get_safe_url(inv))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
invdata = fetch_inventory(app, uri, inv)
|
invdata = _fetch_inventory(
|
||||||
|
target_uri=project.target_uri,
|
||||||
|
inv_location=inv,
|
||||||
|
config=config,
|
||||||
|
srcdir=srcdir,
|
||||||
|
)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
failures.append(err.args)
|
failures.append(err.args)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if invdata:
|
if invdata:
|
||||||
cache[uri] = name, now, invdata
|
cache[project.target_uri] = project.name, now, invdata
|
||||||
updated = True
|
updated = True
|
||||||
break
|
break
|
||||||
|
|
||||||
if not failures:
|
if not failures:
|
||||||
pass
|
pass
|
||||||
elif len(failures) < len(invs):
|
elif len(failures) < len(project.locations):
|
||||||
LOGGER.info(__('encountered some issues with some of the inventories,'
|
LOGGER.info(__('encountered some issues with some of the inventories,'
|
||||||
' but they had working alternatives:'))
|
' but they had working alternatives:'))
|
||||||
for fail in failures:
|
for fail in failures:
|
||||||
@ -226,36 +252,54 @@ def fetch_inventory_group(
|
|||||||
|
|
||||||
def fetch_inventory(app: Sphinx, uri: InventoryURI, inv: str) -> Inventory:
|
def fetch_inventory(app: Sphinx, uri: InventoryURI, inv: str) -> Inventory:
|
||||||
"""Fetch, parse and return an intersphinx inventory file."""
|
"""Fetch, parse and return an intersphinx inventory file."""
|
||||||
# both *uri* (base URI of the links to generate) and *inv* (actual
|
return _fetch_inventory(
|
||||||
# location of the inventory file) can be local or remote URIs
|
target_uri=uri,
|
||||||
if '://' in uri:
|
inv_location=inv,
|
||||||
|
config=app.config,
|
||||||
|
srcdir=app.srcdir,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _fetch_inventory(
|
||||||
|
*, target_uri: InventoryURI, inv_location: str, config: Config, srcdir: Path,
|
||||||
|
) -> Inventory:
|
||||||
|
"""Fetch, parse and return an intersphinx inventory file."""
|
||||||
|
# both *target_uri* (base URI of the links to generate)
|
||||||
|
# and *inv_location* (actual location of the inventory file)
|
||||||
|
# can be local or remote URIs
|
||||||
|
if '://' in target_uri:
|
||||||
# case: inv URI points to remote resource; strip any existing auth
|
# case: inv URI points to remote resource; strip any existing auth
|
||||||
uri = _strip_basic_auth(uri)
|
target_uri = _strip_basic_auth(target_uri)
|
||||||
try:
|
try:
|
||||||
if '://' in inv:
|
if '://' in inv_location:
|
||||||
f = _read_from_url(inv, config=app.config)
|
f = _read_from_url(inv_location, config=config)
|
||||||
else:
|
else:
|
||||||
f = open(path.join(app.srcdir, inv), 'rb') # NoQA: SIM115
|
f = open(path.join(srcdir, inv_location), 'rb') # NoQA: SIM115
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
err.args = ('intersphinx inventory %r not fetchable due to %s: %s',
|
err.args = ('intersphinx inventory %r not fetchable due to %s: %s',
|
||||||
inv, err.__class__, str(err))
|
inv_location, err.__class__, str(err))
|
||||||
raise
|
raise
|
||||||
try:
|
try:
|
||||||
if hasattr(f, 'url'):
|
if hasattr(f, 'url'):
|
||||||
newinv = f.url
|
new_inv_location = f.url
|
||||||
if inv != newinv:
|
if inv_location != new_inv_location:
|
||||||
LOGGER.info(__('intersphinx inventory has moved: %s -> %s'), inv, newinv)
|
msg = __('intersphinx inventory has moved: %s -> %s')
|
||||||
|
LOGGER.info(msg, inv_location, new_inv_location)
|
||||||
|
|
||||||
if uri in (inv, path.dirname(inv), path.dirname(inv) + '/'):
|
if target_uri in {
|
||||||
uri = path.dirname(newinv)
|
inv_location,
|
||||||
|
path.dirname(inv_location),
|
||||||
|
path.dirname(inv_location) + '/'
|
||||||
|
}:
|
||||||
|
target_uri = path.dirname(new_inv_location)
|
||||||
with f:
|
with f:
|
||||||
try:
|
try:
|
||||||
invdata = InventoryFile.load(f, uri, posixpath.join)
|
invdata = InventoryFile.load(f, target_uri, posixpath.join)
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
raise ValueError('unknown or unsupported inventory version: %r' % exc) from exc
|
raise ValueError('unknown or unsupported inventory version: %r' % exc) from exc
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
err.args = ('intersphinx inventory %r not readable due to %s: %s',
|
err.args = ('intersphinx inventory %r not readable due to %s: %s',
|
||||||
inv, err.__class__.__name__, str(err))
|
inv_location, err.__class__.__name__, str(err))
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
return invdata
|
return invdata
|
||||||
|
@ -2,11 +2,12 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Final
|
from typing import TYPE_CHECKING, Any, Final, NoReturn
|
||||||
|
|
||||||
from sphinx.util import logging
|
from sphinx.util import logging
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence
|
||||||
from typing import TypeAlias
|
from typing import TypeAlias
|
||||||
|
|
||||||
from sphinx.environment import BuildEnvironment
|
from sphinx.environment import BuildEnvironment
|
||||||
@ -31,7 +32,7 @@ if TYPE_CHECKING:
|
|||||||
#: Inventory cache entry. The integer field is the cache expiration time.
|
#: Inventory cache entry. The integer field is the cache expiration time.
|
||||||
InventoryCacheEntry: TypeAlias = tuple[InventoryName, int, Inventory]
|
InventoryCacheEntry: TypeAlias = tuple[InventoryName, int, Inventory]
|
||||||
|
|
||||||
#: The type of :confval:`intersphinx_mapping` *after* normalization.
|
#: The type of :confval:`intersphinx_mapping` *after* normalisation.
|
||||||
IntersphinxMapping = dict[
|
IntersphinxMapping = dict[
|
||||||
InventoryName,
|
InventoryName,
|
||||||
tuple[InventoryName, tuple[InventoryURI, tuple[InventoryLocation, ...]]],
|
tuple[InventoryName, tuple[InventoryURI, tuple[InventoryLocation, ...]]],
|
||||||
@ -40,6 +41,74 @@ if TYPE_CHECKING:
|
|||||||
LOGGER: Final[logging.SphinxLoggerAdapter] = logging.getLogger('sphinx.ext.intersphinx')
|
LOGGER: Final[logging.SphinxLoggerAdapter] = logging.getLogger('sphinx.ext.intersphinx')
|
||||||
|
|
||||||
|
|
||||||
|
class _IntersphinxProject:
|
||||||
|
name: InventoryName
|
||||||
|
target_uri: InventoryURI
|
||||||
|
locations: tuple[InventoryLocation, ...]
|
||||||
|
|
||||||
|
__slots__ = {
|
||||||
|
'name': 'The inventory name. '
|
||||||
|
'It is unique and in bijection with an remote inventory URL.',
|
||||||
|
'target_uri': 'The inventory project URL to which links are resolved. '
|
||||||
|
'It is unique and in bijection with an inventory name.',
|
||||||
|
'locations': 'A tuple of local or remote targets containing '
|
||||||
|
'the inventory data to fetch. '
|
||||||
|
'None indicates the default inventory file name.',
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
name: InventoryName,
|
||||||
|
target_uri: InventoryURI,
|
||||||
|
locations: Sequence[InventoryLocation],
|
||||||
|
) -> None:
|
||||||
|
if not name or not isinstance(name, str):
|
||||||
|
msg = 'name must be a non-empty string'
|
||||||
|
raise ValueError(msg)
|
||||||
|
if not target_uri or not isinstance(target_uri, str):
|
||||||
|
msg = 'target_uri must be a non-empty string'
|
||||||
|
raise ValueError(msg)
|
||||||
|
if not locations or not isinstance(locations, tuple):
|
||||||
|
msg = 'locations must be a non-empty tuple'
|
||||||
|
raise ValueError(msg)
|
||||||
|
if any(
|
||||||
|
location is not None and (not location or not isinstance(location, str))
|
||||||
|
for location in locations
|
||||||
|
):
|
||||||
|
msg = 'locations must be a tuple of strings or None'
|
||||||
|
raise ValueError(msg)
|
||||||
|
object.__setattr__(self, 'name', name)
|
||||||
|
object.__setattr__(self, 'target_uri', target_uri)
|
||||||
|
object.__setattr__(self, 'locations', tuple(locations))
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return (f'{self.__class__.__name__}('
|
||||||
|
f'name={self.name!r}, '
|
||||||
|
f'target_uri={self.target_uri!r}, '
|
||||||
|
f'locations={self.locations!r})')
|
||||||
|
|
||||||
|
def __eq__(self, other: object) -> bool:
|
||||||
|
if not isinstance(other, _IntersphinxProject):
|
||||||
|
return NotImplemented
|
||||||
|
return (
|
||||||
|
self.name == other.name
|
||||||
|
and self.target_uri == other.target_uri
|
||||||
|
and self.locations == other.locations
|
||||||
|
)
|
||||||
|
|
||||||
|
def __hash__(self) -> int:
|
||||||
|
return hash((self.name, self.target_uri, self.locations))
|
||||||
|
|
||||||
|
def __setattr__(self, key: str, value: Any) -> NoReturn:
|
||||||
|
msg = f'{self.__class__.__name__} is immutable'
|
||||||
|
raise AttributeError(msg)
|
||||||
|
|
||||||
|
def __delattr__(self, key: str) -> NoReturn:
|
||||||
|
msg = f'{self.__class__.__name__} is immutable'
|
||||||
|
raise AttributeError(msg)
|
||||||
|
|
||||||
|
|
||||||
class InventoryAdapter:
|
class InventoryAdapter:
|
||||||
"""Inventory adapter for environment"""
|
"""Inventory adapter for environment"""
|
||||||
|
|
||||||
|
@ -13,14 +13,13 @@ from sphinx import addnodes
|
|||||||
from sphinx.builders.html import INVENTORY_FILENAME
|
from sphinx.builders.html import INVENTORY_FILENAME
|
||||||
from sphinx.errors import ConfigError
|
from sphinx.errors import ConfigError
|
||||||
from sphinx.ext.intersphinx import (
|
from sphinx.ext.intersphinx import (
|
||||||
fetch_inventory,
|
|
||||||
inspect_main,
|
inspect_main,
|
||||||
load_mappings,
|
load_mappings,
|
||||||
missing_reference,
|
missing_reference,
|
||||||
validate_intersphinx_mapping,
|
validate_intersphinx_mapping,
|
||||||
)
|
)
|
||||||
from sphinx.ext.intersphinx import setup as intersphinx_setup
|
from sphinx.ext.intersphinx import setup as intersphinx_setup
|
||||||
from sphinx.ext.intersphinx._load import _get_safe_url, _strip_basic_auth
|
from sphinx.ext.intersphinx._load import _fetch_inventory, _get_safe_url, _strip_basic_auth
|
||||||
from sphinx.util.console import strip_colors
|
from sphinx.util.console import strip_colors
|
||||||
|
|
||||||
from tests.test_util.intersphinx_data import (
|
from tests.test_util.intersphinx_data import (
|
||||||
@ -70,7 +69,12 @@ def test_fetch_inventory_redirection(_read_from_url, InventoryFile, app): # NoQ
|
|||||||
|
|
||||||
# same uri and inv, not redirected
|
# same uri and inv, not redirected
|
||||||
_read_from_url().url = 'https://hostname/' + INVENTORY_FILENAME
|
_read_from_url().url = 'https://hostname/' + INVENTORY_FILENAME
|
||||||
fetch_inventory(app, 'https://hostname/', 'https://hostname/' + INVENTORY_FILENAME)
|
_fetch_inventory(
|
||||||
|
target_uri='https://hostname/',
|
||||||
|
inv_location='https://hostname/' + INVENTORY_FILENAME,
|
||||||
|
config=app.config,
|
||||||
|
srcdir=app.srcdir,
|
||||||
|
)
|
||||||
assert 'intersphinx inventory has moved' not in app.status.getvalue()
|
assert 'intersphinx inventory has moved' not in app.status.getvalue()
|
||||||
assert InventoryFile.load.call_args[0][1] == 'https://hostname/'
|
assert InventoryFile.load.call_args[0][1] == 'https://hostname/'
|
||||||
|
|
||||||
@ -79,7 +83,12 @@ def test_fetch_inventory_redirection(_read_from_url, InventoryFile, app): # NoQ
|
|||||||
app.status.truncate(0)
|
app.status.truncate(0)
|
||||||
_read_from_url().url = 'https://hostname/new/' + INVENTORY_FILENAME
|
_read_from_url().url = 'https://hostname/new/' + INVENTORY_FILENAME
|
||||||
|
|
||||||
fetch_inventory(app, 'https://hostname/', 'https://hostname/' + INVENTORY_FILENAME)
|
_fetch_inventory(
|
||||||
|
target_uri='https://hostname/',
|
||||||
|
inv_location='https://hostname/' + INVENTORY_FILENAME,
|
||||||
|
config=app.config,
|
||||||
|
srcdir=app.srcdir,
|
||||||
|
)
|
||||||
assert app.status.getvalue() == ('intersphinx inventory has moved: '
|
assert app.status.getvalue() == ('intersphinx inventory has moved: '
|
||||||
'https://hostname/%s -> https://hostname/new/%s\n' %
|
'https://hostname/%s -> https://hostname/new/%s\n' %
|
||||||
(INVENTORY_FILENAME, INVENTORY_FILENAME))
|
(INVENTORY_FILENAME, INVENTORY_FILENAME))
|
||||||
@ -90,7 +99,12 @@ def test_fetch_inventory_redirection(_read_from_url, InventoryFile, app): # NoQ
|
|||||||
app.status.truncate(0)
|
app.status.truncate(0)
|
||||||
_read_from_url().url = 'https://hostname/new/' + INVENTORY_FILENAME
|
_read_from_url().url = 'https://hostname/new/' + INVENTORY_FILENAME
|
||||||
|
|
||||||
fetch_inventory(app, 'https://hostname/', 'https://hostname/new/' + INVENTORY_FILENAME)
|
_fetch_inventory(
|
||||||
|
target_uri='https://hostname/',
|
||||||
|
inv_location='https://hostname/new/' + INVENTORY_FILENAME,
|
||||||
|
config=app.config,
|
||||||
|
srcdir=app.srcdir,
|
||||||
|
)
|
||||||
assert 'intersphinx inventory has moved' not in app.status.getvalue()
|
assert 'intersphinx inventory has moved' not in app.status.getvalue()
|
||||||
assert InventoryFile.load.call_args[0][1] == 'https://hostname/'
|
assert InventoryFile.load.call_args[0][1] == 'https://hostname/'
|
||||||
|
|
||||||
@ -99,7 +113,12 @@ def test_fetch_inventory_redirection(_read_from_url, InventoryFile, app): # NoQ
|
|||||||
app.status.truncate(0)
|
app.status.truncate(0)
|
||||||
_read_from_url().url = 'https://hostname/other/' + INVENTORY_FILENAME
|
_read_from_url().url = 'https://hostname/other/' + INVENTORY_FILENAME
|
||||||
|
|
||||||
fetch_inventory(app, 'https://hostname/', 'https://hostname/new/' + INVENTORY_FILENAME)
|
_fetch_inventory(
|
||||||
|
target_uri='https://hostname/',
|
||||||
|
inv_location='https://hostname/new/' + INVENTORY_FILENAME,
|
||||||
|
config=app.config,
|
||||||
|
srcdir=app.srcdir,
|
||||||
|
)
|
||||||
assert app.status.getvalue() == ('intersphinx inventory has moved: '
|
assert app.status.getvalue() == ('intersphinx inventory has moved: '
|
||||||
'https://hostname/new/%s -> https://hostname/other/%s\n' %
|
'https://hostname/new/%s -> https://hostname/other/%s\n' %
|
||||||
(INVENTORY_FILENAME, INVENTORY_FILENAME))
|
(INVENTORY_FILENAME, INVENTORY_FILENAME))
|
||||||
|
Loading…
Reference in New Issue
Block a user