mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Start using `pathlib.Path
and deprecate
sphinx.testing.path
` (#11526)
This commit is contained in:
parent
23c7fdde75
commit
49d8304670
5
CHANGES
5
CHANGES
@ -14,10 +14,15 @@ Deprecated
|
||||
|
||||
* #11512: Deprecate ``sphinx.util.md5`` and ``sphinx.util.sha1``.
|
||||
Use ``hashlib`` instead.
|
||||
* #11526: Deprecate ``sphinx.testing.path``.
|
||||
Use ``os.path`` or ``pathlib`` instead.
|
||||
|
||||
Features added
|
||||
--------------
|
||||
|
||||
* #11526: Support ``os.PathLike`` types and ``pathlib.Path`` objects
|
||||
in many more places.
|
||||
|
||||
Bugs fixed
|
||||
----------
|
||||
|
||||
|
@ -22,6 +22,11 @@ The following is a list of deprecated interfaces.
|
||||
- Removed
|
||||
- Alternatives
|
||||
|
||||
* - ``sphinx.testing.path``
|
||||
- 7.2
|
||||
- 9.0
|
||||
- ``os.path`` or ``pathlib``
|
||||
|
||||
* - ``sphinx.util.md5``
|
||||
- 7.2
|
||||
- 9.0
|
||||
|
@ -11,6 +11,7 @@ import sys
|
||||
from collections import deque
|
||||
from io import StringIO
|
||||
from os import path
|
||||
from pathlib import Path
|
||||
from typing import IO, TYPE_CHECKING, Any, Callable
|
||||
|
||||
from docutils import nodes
|
||||
@ -41,7 +42,7 @@ from sphinx.util.console import bold # type: ignore
|
||||
from sphinx.util.display import progress_message
|
||||
from sphinx.util.i18n import CatalogRepository
|
||||
from sphinx.util.logging import prefixed_warnings
|
||||
from sphinx.util.osutil import abspath, ensuredir, relpath
|
||||
from sphinx.util.osutil import ensuredir, relpath
|
||||
from sphinx.util.tags import Tags
|
||||
from sphinx.util.typing import RoleFunction, TitleGetter
|
||||
|
||||
@ -132,7 +133,8 @@ class Sphinx:
|
||||
warningiserror: bool
|
||||
_warncount: int
|
||||
|
||||
def __init__(self, srcdir: str, confdir: str | None, outdir: str, doctreedir: str,
|
||||
def __init__(self, srcdir: str | os.PathLike[str], confdir: str | os.PathLike[str] | None,
|
||||
outdir: str | os.PathLike[str], doctreedir: str | os.PathLike[str],
|
||||
buildername: str, confoverrides: dict | None = None,
|
||||
status: IO | None = sys.stdout, warning: IO | None = sys.stderr,
|
||||
freshenv: bool = False, warningiserror: bool = False,
|
||||
@ -145,9 +147,9 @@ class Sphinx:
|
||||
self.registry = SphinxComponentRegistry()
|
||||
|
||||
# validate provided directories
|
||||
self.srcdir = abspath(srcdir)
|
||||
self.outdir = abspath(outdir)
|
||||
self.doctreedir = abspath(doctreedir)
|
||||
self.srcdir = Path(srcdir).resolve()
|
||||
self.outdir = Path(outdir).resolve()
|
||||
self.doctreedir = Path(doctreedir).resolve()
|
||||
|
||||
if not path.isdir(self.srcdir):
|
||||
raise ApplicationError(__('Cannot find source directory (%s)') %
|
||||
@ -203,7 +205,7 @@ class Sphinx:
|
||||
self.confdir = self.srcdir
|
||||
self.config = Config({}, confoverrides or {})
|
||||
else:
|
||||
self.confdir = abspath(confdir)
|
||||
self.confdir = Path(confdir).resolve()
|
||||
self.config = Config.read(self.confdir, confoverrides or {}, self.tags)
|
||||
|
||||
# initialize some limited config variables before initialize i18n and loading
|
||||
|
@ -262,7 +262,7 @@ class Builder:
|
||||
filename)
|
||||
continue
|
||||
|
||||
if not filename.startswith(self.srcdir):
|
||||
if not filename.startswith(str(self.srcdir)):
|
||||
logger.warning(__('file %r given on command line is not under the '
|
||||
'source directory, ignoring'), filename)
|
||||
continue
|
||||
|
@ -21,7 +21,7 @@ from sphinx.locale import __
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.display import status_iterator
|
||||
from sphinx.util.fileutil import copy_asset_file
|
||||
from sphinx.util.osutil import copyfile, ensuredir
|
||||
from sphinx.util.osutil import copyfile, ensuredir, relpath
|
||||
|
||||
try:
|
||||
from PIL import Image
|
||||
@ -508,9 +508,6 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
metadata = self.content_metadata()
|
||||
|
||||
# files
|
||||
if not self.outdir.endswith(os.sep):
|
||||
self.outdir += os.sep
|
||||
olen = len(self.outdir)
|
||||
self.files: list[str] = []
|
||||
self.ignored_files = ['.buildinfo', 'mimetype', 'content.opf',
|
||||
'toc.ncx', 'META-INF/container.xml',
|
||||
@ -522,7 +519,7 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
for root, dirs, files in os.walk(self.outdir):
|
||||
dirs.sort()
|
||||
for fn in sorted(files):
|
||||
filename = path.join(root, fn)[olen:]
|
||||
filename = relpath(path.join(root, fn), self.outdir)
|
||||
if filename in self.ignored_files:
|
||||
continue
|
||||
ext = path.splitext(filename)[-1]
|
||||
|
@ -34,7 +34,7 @@ class ChangesBuilder(Builder):
|
||||
self.templates.init(self, self.theme)
|
||||
|
||||
def get_outdated_docs(self) -> str:
|
||||
return self.outdir
|
||||
return str(self.outdir)
|
||||
|
||||
typemap = {
|
||||
'versionadded': 'added',
|
||||
|
@ -28,6 +28,7 @@ from sphinx.util.tags import Tags
|
||||
from sphinx.util.template import SphinxRenderer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import os
|
||||
from collections.abc import Generator, Iterable
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -84,11 +85,12 @@ class MsgOrigin:
|
||||
|
||||
class GettextRenderer(SphinxRenderer):
|
||||
def __init__(
|
||||
self, template_path: str | None = None, outdir: str | None = None,
|
||||
self, template_path: list[str | os.PathLike[str]] | None = None,
|
||||
outdir: str | os.PathLike[str] | None = None,
|
||||
) -> None:
|
||||
self.outdir = outdir
|
||||
if template_path is None:
|
||||
template_path = path.join(package_dir, 'templates', 'gettext')
|
||||
template_path = [path.join(package_dir, 'templates', 'gettext')]
|
||||
super().__init__(template_path)
|
||||
|
||||
def escape(s: str) -> str:
|
||||
|
@ -786,7 +786,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
|
||||
def copy_download_files(self) -> None:
|
||||
def to_relpath(f: str) -> str:
|
||||
return relative_path(self.srcdir, f)
|
||||
return relative_path(self.srcdir, f) # type: ignore[arg-type]
|
||||
|
||||
# copy downloadable files
|
||||
if self.env.dlfiles:
|
||||
@ -1254,9 +1254,9 @@ def setup_js_tag_helper(app: Sphinx, pagename: str, templatename: str,
|
||||
context['js_tag'] = js_tag
|
||||
|
||||
|
||||
def _file_checksum(outdir: str, filename: str) -> str:
|
||||
def _file_checksum(outdir: str | os.PathLike[str], filename: str | os.PathLike[str]) -> str:
|
||||
# Don't generate checksums for HTTP URIs
|
||||
if '://' in filename:
|
||||
if '://' in str(filename):
|
||||
return ''
|
||||
try:
|
||||
# Ensure universal newline mode is used to avoid checksum differences
|
||||
@ -1305,7 +1305,7 @@ def validate_html_extra_path(app: Sphinx, config: Config) -> None:
|
||||
logger.warning(__('html_extra_path entry %r does not exist'), entry)
|
||||
config.html_extra_path.remove(entry)
|
||||
elif (path.splitdrive(app.outdir)[0] == path.splitdrive(extra_path)[0] and
|
||||
path.commonpath([app.outdir, extra_path]) == app.outdir):
|
||||
path.commonpath((app.outdir, extra_path)) == path.normpath(app.outdir)):
|
||||
logger.warning(__('html_extra_path entry %r is placed inside outdir'), entry)
|
||||
config.html_extra_path.remove(entry)
|
||||
|
||||
@ -1318,7 +1318,7 @@ def validate_html_static_path(app: Sphinx, config: Config) -> None:
|
||||
logger.warning(__('html_static_path entry %r does not exist'), entry)
|
||||
config.html_static_path.remove(entry)
|
||||
elif (path.splitdrive(app.outdir)[0] == path.splitdrive(static_path)[0] and
|
||||
path.commonpath([app.outdir, static_path]) == app.outdir):
|
||||
path.commonpath((app.outdir, static_path)) == path.normpath(app.outdir)):
|
||||
logger.warning(__('html_static_path entry %r is placed inside outdir'), entry)
|
||||
config.html_static_path.remove(entry)
|
||||
|
||||
|
@ -24,7 +24,7 @@ from sphinx.util import Tee
|
||||
from sphinx.util.console import color_terminal, nocolor, red, terminal_safe # type: ignore
|
||||
from sphinx.util.docutils import docutils_namespace, patch_docutils
|
||||
from sphinx.util.exceptions import format_exception_cut_frames, save_traceback
|
||||
from sphinx.util.osutil import abspath, ensuredir
|
||||
from sphinx.util.osutil import ensuredir
|
||||
|
||||
|
||||
def handle_exception(
|
||||
@ -234,7 +234,7 @@ def _parse_arguments(argv: list[str] = sys.argv[1:]) -> argparse.Namespace:
|
||||
|
||||
if warning and args.warnfile:
|
||||
try:
|
||||
warnfile = abspath(args.warnfile)
|
||||
warnfile = path.abspath(args.warnfile)
|
||||
ensuredir(path.dirname(warnfile))
|
||||
warnfp = open(args.warnfile, 'w', encoding="utf-8")
|
||||
except Exception as exc:
|
||||
|
@ -21,6 +21,7 @@ except ImportError:
|
||||
from sphinx.util.osutil import _chdir as chdir
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import os
|
||||
from collections.abc import Generator, Iterator, Sequence
|
||||
|
||||
from sphinx.application import Sphinx
|
||||
@ -168,9 +169,8 @@ class Config:
|
||||
self.extensions: list[str] = config.get('extensions', [])
|
||||
|
||||
@classmethod
|
||||
def read(
|
||||
cls, confdir: str, overrides: dict | None = None, tags: Tags | None = None,
|
||||
) -> Config:
|
||||
def read(cls, confdir: str | os.PathLike[str], overrides: dict | None = None,
|
||||
tags: Tags | None = None) -> Config:
|
||||
"""Create a Config object from configuration file."""
|
||||
filename = path.join(confdir, CONFIG_FILENAME)
|
||||
if not path.isfile(filename):
|
||||
|
@ -244,7 +244,7 @@ class LiteralIncludeReader:
|
||||
new_lines = self.read_file(self.filename)
|
||||
old_filename = self.options['diff']
|
||||
old_lines = self.read_file(old_filename)
|
||||
diff = unified_diff(old_lines, new_lines, old_filename, self.filename)
|
||||
diff = unified_diff(old_lines, new_lines, str(old_filename), str(self.filename))
|
||||
return list(diff)
|
||||
|
||||
def pyobject_filter(
|
||||
|
@ -31,6 +31,7 @@ from sphinx.util.osutil import canon_path, os_path
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Generator, Iterator
|
||||
from pathlib import Path
|
||||
|
||||
from sphinx.application import Sphinx
|
||||
from sphinx.builders import Builder
|
||||
@ -147,8 +148,8 @@ class BuildEnvironment:
|
||||
|
||||
def __init__(self, app: Sphinx):
|
||||
self.app: Sphinx = app
|
||||
self.doctreedir: str = app.doctreedir
|
||||
self.srcdir: str = app.srcdir
|
||||
self.doctreedir: Path = app.doctreedir
|
||||
self.srcdir: Path = app.srcdir
|
||||
self.config: Config = None # type: ignore[assignment]
|
||||
self.config_status: int = CONFIG_UNSET
|
||||
self.config_status_extra: str = ''
|
||||
@ -387,7 +388,7 @@ class BuildEnvironment:
|
||||
domain.merge_domaindata(docnames, other.domaindata[domainname])
|
||||
self.events.emit('env-merge-info', self, docnames, other)
|
||||
|
||||
def path2doc(self, filename: str) -> str | None:
|
||||
def path2doc(self, filename: str | os.PathLike[str]) -> str | None:
|
||||
"""Return the docname for the filename if the file is document.
|
||||
|
||||
*filename* should be absolute or relative to the source directory.
|
||||
|
@ -424,8 +424,10 @@ def _get_modules(
|
||||
return public, items
|
||||
|
||||
|
||||
def generate_autosummary_docs(sources: list[str], output_dir: str | None = None,
|
||||
suffix: str = '.rst', base_path: str | None = None,
|
||||
def generate_autosummary_docs(sources: list[str],
|
||||
output_dir: str | os.PathLike[str] | None = None,
|
||||
suffix: str = '.rst',
|
||||
base_path: str | os.PathLike[str] | None = None,
|
||||
imported_members: bool = False, app: Any = None,
|
||||
overwrite: bool = True, encoding: str = 'utf-8') -> None:
|
||||
showed_sources = sorted(sources)
|
||||
|
@ -10,7 +10,7 @@ import tempfile
|
||||
from hashlib import sha1
|
||||
from os import path
|
||||
from subprocess import CalledProcessError
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Element
|
||||
@ -29,6 +29,9 @@ from sphinx.util.png import read_png_depth, write_png_depth
|
||||
from sphinx.util.template import LaTeXRenderer
|
||||
from sphinx.writers.html import HTML5Translator
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
templates_path = path.join(package_dir, 'templates', 'imgmath')
|
||||
@ -83,7 +86,7 @@ def write_svg_depth(filename: str, depth: int) -> None:
|
||||
def generate_latex_macro(image_format: str,
|
||||
math: str,
|
||||
config: Config,
|
||||
confdir: str = '') -> str:
|
||||
confdir: str | os.PathLike[str] = '') -> str:
|
||||
"""Generate LaTeX macro."""
|
||||
variables = {
|
||||
'fontsize': config.imgmath_font_size,
|
||||
|
@ -21,7 +21,7 @@ EXCLUDE_PATHS = ['**/_sources', '.#*', '**/.#*', '*.lproj/**']
|
||||
class Project:
|
||||
"""A project is the source code set of the Sphinx document(s)."""
|
||||
|
||||
def __init__(self, srcdir: str, source_suffix: dict[str, str]) -> None:
|
||||
def __init__(self, srcdir: str | os.PathLike[str], source_suffix: dict[str, str]) -> None:
|
||||
#: Source directory.
|
||||
self.srcdir = srcdir
|
||||
|
||||
@ -61,15 +61,15 @@ class Project:
|
||||
|
||||
return self.docnames
|
||||
|
||||
def path2doc(self, filename: str) -> str | None:
|
||||
def path2doc(self, filename: str | os.PathLike[str]) -> str | None:
|
||||
"""Return the docname for the filename if the file is a document.
|
||||
|
||||
*filename* should be absolute or relative to the source directory.
|
||||
"""
|
||||
if filename.startswith(self.srcdir):
|
||||
if str(filename).startswith(str(self.srcdir)):
|
||||
filename = relpath(filename, self.srcdir)
|
||||
for suffix in self.source_suffix:
|
||||
if filename.endswith(suffix):
|
||||
if str(filename).endswith(suffix):
|
||||
filename = path_stabilize(filename)
|
||||
return filename[:-len(suffix)]
|
||||
|
||||
|
@ -1,97 +0,0 @@
|
||||
"""Sphinx test comparer for pytest"""
|
||||
from __future__ import annotations
|
||||
|
||||
import difflib
|
||||
import pathlib
|
||||
from typing import Any
|
||||
|
||||
|
||||
class PathComparer:
|
||||
"""
|
||||
OS-independent path comparison.
|
||||
|
||||
Windows path sep and posix path sep:
|
||||
|
||||
>>> '\\to\\index' == PathComparer('/to/index')
|
||||
True
|
||||
>>> '\\to\\index' == PathComparer('/to/index2')
|
||||
False
|
||||
|
||||
Windows path with drive letters
|
||||
|
||||
>>> 'C:\\to\\index' == PathComparer('/to/index')
|
||||
True
|
||||
>>> 'C:\\to\\index' == PathComparer('C:/to/index')
|
||||
True
|
||||
>>> 'C:\\to\\index' == PathComparer('D:/to/index')
|
||||
False
|
||||
"""
|
||||
def __init__(self, path: str | pathlib.Path):
|
||||
"""
|
||||
:param str path: path string, it will be cast as pathlib.Path.
|
||||
"""
|
||||
self.path = pathlib.Path(path)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.path.as_posix()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self.__class__.__name__}: '{self}'>"
|
||||
|
||||
def __eq__(self, other: str | pathlib.Path) -> bool: # type: ignore
|
||||
return not bool(self.ldiff(other))
|
||||
|
||||
def diff(self, other: str | pathlib.Path) -> list[str]:
|
||||
"""compare self and other.
|
||||
|
||||
When different is not exist, return empty list.
|
||||
|
||||
>>> PathComparer('/to/index').diff('C:\\to\\index')
|
||||
[]
|
||||
|
||||
When different is exist, return unified diff style list as:
|
||||
|
||||
>>> PathComparer('/to/index').diff('C:\\to\\index2')
|
||||
[
|
||||
'- C:/to/index'
|
||||
'+ C:/to/index2'
|
||||
'? +'
|
||||
]
|
||||
"""
|
||||
return self.ldiff(other)
|
||||
|
||||
def ldiff(self, other: str | pathlib.Path) -> list[str]:
|
||||
return self._diff(
|
||||
self.path,
|
||||
pathlib.Path(other),
|
||||
)
|
||||
|
||||
def rdiff(self, other: str | pathlib.Path) -> list[str]:
|
||||
return self._diff(
|
||||
pathlib.Path(other),
|
||||
self.path,
|
||||
)
|
||||
|
||||
def _diff(self, lhs: pathlib.Path, rhs: pathlib.Path) -> list[str]:
|
||||
if lhs == rhs:
|
||||
return []
|
||||
|
||||
if lhs.drive or rhs.drive:
|
||||
# If either has a drive letter compare by absolute path
|
||||
s_path, o_path = lhs.absolute().as_posix(), rhs.absolute().as_posix()
|
||||
else:
|
||||
s_path, o_path = lhs.as_posix(), rhs.as_posix()
|
||||
|
||||
if s_path == o_path:
|
||||
return []
|
||||
|
||||
return [line.strip() for line in difflib.Differ().compare([s_path], [o_path])]
|
||||
|
||||
|
||||
def pytest_assertrepr_compare(op: str, left: Any, right: Any) -> list[str]:
|
||||
if isinstance(left, PathComparer) and op == "==":
|
||||
return ['Comparing path:'] + left.ldiff(right)
|
||||
elif isinstance(right, PathComparer) and op == "==":
|
||||
return ['Comparing path:'] + right.rdiff(left)
|
||||
else:
|
||||
return []
|
@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from collections import namedtuple
|
||||
@ -10,11 +11,11 @@ from typing import TYPE_CHECKING, Any, Callable
|
||||
|
||||
import pytest
|
||||
|
||||
from sphinx.testing import util
|
||||
from sphinx.testing.util import SphinxTestApp, SphinxTestAppWrapperForSkipBuilding
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
|
||||
DEFAULT_ENABLED_MARKERS = [
|
||||
(
|
||||
@ -96,7 +97,7 @@ def app_params(request: Any, test_params: dict, shared_result: SharedResult,
|
||||
# special support for sphinx/tests
|
||||
if rootdir and not srcdir.exists():
|
||||
testroot_path = rootdir / ('test-' + testroot)
|
||||
testroot_path.copytree(srcdir)
|
||||
shutil.copytree(testroot_path, srcdir)
|
||||
|
||||
return namedtuple('app_params', 'args,kwargs')(args, kwargs) # type: ignore
|
||||
|
||||
@ -169,8 +170,6 @@ def make_app(test_params: dict, monkeypatch: Any) -> Generator[Callable, None, N
|
||||
if you want to initialize 'app' in your test function. please use this
|
||||
instead of using SphinxTestApp class directory.
|
||||
"""
|
||||
monkeypatch.setattr('sphinx.application.abspath', lambda x: x)
|
||||
|
||||
apps = []
|
||||
syspath = sys.path[:]
|
||||
|
||||
@ -218,21 +217,9 @@ def if_graphviz_found(app: SphinxTestApp) -> None: # NoQA: PT004
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def sphinx_test_tempdir(tmpdir_factory: Any) -> util.path:
|
||||
"""
|
||||
Temporary directory wrapped with `path` class.
|
||||
"""
|
||||
tmpdir = tmpdir_factory.getbasetemp()
|
||||
return util.path(tmpdir).abspath()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def tempdir(tmpdir: str) -> util.path:
|
||||
"""
|
||||
Temporary directory wrapped with `path` class.
|
||||
This fixture is for back-compatibility with old test implementation.
|
||||
"""
|
||||
return util.path(tmpdir)
|
||||
def sphinx_test_tempdir(tmp_path_factory: Any) -> Path:
|
||||
"""Temporary directory."""
|
||||
return tmp_path_factory.getbasetemp()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
|
@ -3,11 +3,18 @@ from __future__ import annotations
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import warnings
|
||||
from typing import IO, TYPE_CHECKING, Any, Callable
|
||||
|
||||
from sphinx.deprecation import RemovedInSphinx90Warning
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import builtins
|
||||
|
||||
warnings.warn("'sphinx.testing.path' is deprecated. "
|
||||
"Use 'os.path' or 'pathlib' instead.",
|
||||
RemovedInSphinx90Warning, stacklevel=2)
|
||||
|
||||
FILESYSTEMENCODING = sys.getfilesystemencoding() or sys.getdefaultencoding()
|
||||
|
||||
|
||||
|
@ -15,12 +15,10 @@ from docutils.parsers.rst import directives, roles
|
||||
|
||||
from sphinx import application, locale
|
||||
from sphinx.pycode import ModuleAnalyzer
|
||||
from sphinx.testing.path import path
|
||||
from sphinx.util.osutil import relpath
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Generator
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
|
||||
__all__ = [
|
||||
'Struct', 'SphinxTestApp', 'SphinxTestAppWrapperForSkipBuilding',
|
||||
@ -102,8 +100,8 @@ class SphinxTestApp(application.Sphinx):
|
||||
def __init__(
|
||||
self,
|
||||
buildername: str = 'html',
|
||||
srcdir: path | None = None,
|
||||
builddir: path | None = None,
|
||||
srcdir: Path | None = None,
|
||||
builddir: Path | None = None,
|
||||
freshenv: bool = False,
|
||||
confoverrides: dict | None = None,
|
||||
status: IO | None = None,
|
||||
@ -123,9 +121,9 @@ class SphinxTestApp(application.Sphinx):
|
||||
|
||||
confdir = srcdir
|
||||
outdir = builddir.joinpath(buildername)
|
||||
outdir.makedirs(exist_ok=True)
|
||||
outdir.mkdir(parents=True, exist_ok=True)
|
||||
doctreedir = builddir.joinpath('doctrees')
|
||||
doctreedir.makedirs(exist_ok=True)
|
||||
doctreedir.mkdir(parents=True, exist_ok=True)
|
||||
if confoverrides is None:
|
||||
confoverrides = {}
|
||||
warningiserror = False
|
||||
@ -184,7 +182,7 @@ class SphinxTestAppWrapperForSkipBuilding:
|
||||
return getattr(self.app, name)
|
||||
|
||||
def build(self, *args: Any, **kwargs: Any) -> None:
|
||||
if not self.app.outdir.listdir(): # type: ignore
|
||||
if not os.listdir(self.app.outdir):
|
||||
# if listdir is empty, do build.
|
||||
self.app.build(*args, **kwargs)
|
||||
# otherwise, we can use built cache
|
||||
@ -193,14 +191,6 @@ class SphinxTestAppWrapperForSkipBuilding:
|
||||
_unicode_literals_re = re.compile(r'u(".*?")|u(\'.*?\')')
|
||||
|
||||
|
||||
def find_files(root: str, suffix: str | None = None) -> Generator[str, None, None]:
|
||||
for dirpath, _dirs, files in os.walk(root, followlinks=True):
|
||||
dirpath = path(dirpath)
|
||||
for f in [f for f in files if not suffix or f.endswith(suffix)]:
|
||||
fpath = dirpath / f
|
||||
yield relpath(fpath, root)
|
||||
|
||||
|
||||
def strip_escseq(text: str) -> str:
|
||||
return re.sub('\x1b.*?m', '', text)
|
||||
|
||||
|
@ -15,7 +15,7 @@ if TYPE_CHECKING:
|
||||
from sphinx.util.template import BaseRenderer
|
||||
|
||||
|
||||
def copy_asset_file(source: str, destination: str,
|
||||
def copy_asset_file(source: str | os.PathLike[str], destination: str | os.PathLike[str],
|
||||
context: dict | None = None,
|
||||
renderer: BaseRenderer | None = None) -> None:
|
||||
"""Copy an asset file to destination.
|
||||
@ -34,8 +34,10 @@ def copy_asset_file(source: str, destination: str,
|
||||
if os.path.isdir(destination):
|
||||
# Use source filename if destination points a directory
|
||||
destination = os.path.join(destination, os.path.basename(source))
|
||||
else:
|
||||
destination = str(destination)
|
||||
|
||||
if source.lower().endswith('_t') and context is not None:
|
||||
if os.path.splitext(source)[1].lower().endswith('_t') and context is not None:
|
||||
if renderer is None:
|
||||
from sphinx.util.template import SphinxRenderer
|
||||
renderer = SphinxRenderer()
|
||||
@ -49,7 +51,8 @@ def copy_asset_file(source: str, destination: str,
|
||||
copyfile(source, destination)
|
||||
|
||||
|
||||
def copy_asset(source: str, destination: str, excluded: PathMatcher = lambda path: False,
|
||||
def copy_asset(source: str | os.PathLike[str], destination: str | os.PathLike[str],
|
||||
excluded: PathMatcher = lambda path: False,
|
||||
context: dict | None = None, renderer: BaseRenderer | None = None,
|
||||
onerror: Callable[[str, Exception], None] | None = None) -> None:
|
||||
"""Copy asset files to destination recursively.
|
||||
@ -77,7 +80,7 @@ def copy_asset(source: str, destination: str, excluded: PathMatcher = lambda pat
|
||||
return
|
||||
|
||||
for root, dirs, files in os.walk(source, followlinks=True):
|
||||
reldir = relative_path(source, root)
|
||||
reldir = relative_path(source, root) # type: ignore[arg-type]
|
||||
for dir in dirs[:]:
|
||||
if excluded(posixpath.join(reldir, dir)):
|
||||
dirs.remove(dir)
|
||||
|
@ -73,7 +73,7 @@ class CatalogInfo(LocaleFileInfoBase):
|
||||
class CatalogRepository:
|
||||
"""A repository for message catalogs."""
|
||||
|
||||
def __init__(self, basedir: str, locale_dirs: list[str],
|
||||
def __init__(self, basedir: str | os.PathLike[str], locale_dirs: list[str],
|
||||
language: str, encoding: str) -> None:
|
||||
self.basedir = basedir
|
||||
self._locale_dirs = locale_dirs
|
||||
|
@ -110,7 +110,7 @@ def patfilter(names: Iterable[str], pat: str) -> list[str]:
|
||||
|
||||
|
||||
def get_matching_files(
|
||||
dirname: str,
|
||||
dirname: str | os.PathLike[str],
|
||||
include_patterns: Iterable[str] = ("**",),
|
||||
exclude_patterns: Iterable[str] = (),
|
||||
) -> Iterator[str]:
|
||||
|
@ -18,13 +18,6 @@ from sphinx.deprecation import _deprecation_warning
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterator
|
||||
|
||||
try:
|
||||
# for ALT Linux (#6712)
|
||||
from sphinx.testing.path import path as Path
|
||||
except ImportError:
|
||||
Path = None # type: ignore
|
||||
|
||||
|
||||
# SEP separates path elements in the canonical file names
|
||||
#
|
||||
# Define SEP as a manifest constant, not so much because we expect it to change
|
||||
@ -37,12 +30,12 @@ def os_path(canonicalpath: str) -> str:
|
||||
return canonicalpath.replace(SEP, path.sep)
|
||||
|
||||
|
||||
def canon_path(nativepath: str) -> str:
|
||||
def canon_path(nativepath: str | os.PathLike[str]) -> str:
|
||||
"""Return path in OS-independent form"""
|
||||
return nativepath.replace(path.sep, SEP)
|
||||
return str(nativepath).replace(path.sep, SEP)
|
||||
|
||||
|
||||
def path_stabilize(filepath: str) -> str:
|
||||
def path_stabilize(filepath: str | os.PathLike[str]) -> str:
|
||||
"Normalize path separator and unicode string"
|
||||
new_path = canon_path(filepath)
|
||||
return unicodedata.normalize('NFC', new_path)
|
||||
@ -71,9 +64,9 @@ def relative_uri(base: str, to: str) -> str:
|
||||
return ('..' + SEP) * (len(b2) - 1) + SEP.join(t2)
|
||||
|
||||
|
||||
def ensuredir(path: str) -> None:
|
||||
def ensuredir(file: str | os.PathLike[str]) -> None:
|
||||
"""Ensure that a path exists."""
|
||||
os.makedirs(path, exist_ok=True)
|
||||
os.makedirs(file, exist_ok=True)
|
||||
|
||||
|
||||
def mtimes_of_files(dirnames: list[str], suffix: str) -> Iterator[float]:
|
||||
@ -87,14 +80,14 @@ def mtimes_of_files(dirnames: list[str], suffix: str) -> Iterator[float]:
|
||||
pass
|
||||
|
||||
|
||||
def copytimes(source: str, dest: str) -> None:
|
||||
def copytimes(source: str | os.PathLike[str], dest: str | os.PathLike[str]) -> None:
|
||||
"""Copy a file's modification times."""
|
||||
st = os.stat(source)
|
||||
if hasattr(os, 'utime'):
|
||||
os.utime(dest, (st.st_atime, st.st_mtime))
|
||||
|
||||
|
||||
def copyfile(source: str, dest: str) -> None:
|
||||
def copyfile(source: str | os.PathLike[str], dest: str | os.PathLike[str]) -> None:
|
||||
"""Copy a file and its modification times, if possible.
|
||||
|
||||
Note: ``copyfile`` skips copying if the file has not been changed"""
|
||||
@ -119,7 +112,8 @@ def make_filename_from_project(project: str) -> str:
|
||||
return make_filename(project_suffix_re.sub('', project)).lower()
|
||||
|
||||
|
||||
def relpath(path: str, start: str | None = os.curdir) -> str:
|
||||
def relpath(path: str | os.PathLike[str],
|
||||
start: str | os.PathLike[str] | None = os.curdir) -> str:
|
||||
"""Return a relative filepath to *path* either from the current directory or
|
||||
from an optional *start* directory.
|
||||
|
||||
@ -129,26 +123,14 @@ def relpath(path: str, start: str | None = os.curdir) -> str:
|
||||
try:
|
||||
return os.path.relpath(path, start)
|
||||
except ValueError:
|
||||
return path
|
||||
return str(path)
|
||||
|
||||
|
||||
safe_relpath = relpath # for compatibility
|
||||
fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
|
||||
|
||||
|
||||
def abspath(pathdir: str) -> str:
|
||||
if Path is not None and isinstance(pathdir, Path):
|
||||
return pathdir.abspath()
|
||||
else:
|
||||
pathdir = path.abspath(pathdir)
|
||||
if isinstance(pathdir, bytes):
|
||||
try:
|
||||
pathdir = pathdir.decode(fs_encoding)
|
||||
except UnicodeDecodeError as exc:
|
||||
raise UnicodeDecodeError('multibyte filename not supported on '
|
||||
'this filesystem encoding '
|
||||
'(%r)' % fs_encoding) from exc
|
||||
return pathdir
|
||||
abspath = path.abspath
|
||||
|
||||
|
||||
class _chdir:
|
||||
|
@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import os
|
||||
from functools import partial
|
||||
from os import path
|
||||
from typing import Any, Callable
|
||||
from typing import TYPE_CHECKING, Any, Callable
|
||||
|
||||
from jinja2 import TemplateNotFound
|
||||
from jinja2.environment import Environment
|
||||
@ -17,6 +17,9 @@ from sphinx.jinja2glue import SphinxFileSystemLoader
|
||||
from sphinx.locale import get_translator
|
||||
from sphinx.util import rst, texescape
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
|
||||
class BaseRenderer:
|
||||
def __init__(self, loader: BaseLoader | None = None) -> None:
|
||||
@ -32,8 +35,8 @@ class BaseRenderer:
|
||||
|
||||
|
||||
class FileRenderer(BaseRenderer):
|
||||
def __init__(self, search_path: str | list[str]) -> None:
|
||||
if isinstance(search_path, str):
|
||||
def __init__(self, search_path: Sequence[str | os.PathLike[str]]) -> None:
|
||||
if isinstance(search_path, (str, os.PathLike)):
|
||||
search_path = [search_path]
|
||||
else:
|
||||
# filter "None" paths
|
||||
@ -50,7 +53,7 @@ class FileRenderer(BaseRenderer):
|
||||
|
||||
|
||||
class SphinxRenderer(FileRenderer):
|
||||
def __init__(self, template_path: None | str | list[str] = None) -> None:
|
||||
def __init__(self, template_path: Sequence[str | os.PathLike[str]] | None = None) -> None:
|
||||
if template_path is None:
|
||||
template_path = os.path.join(package_dir, 'templates')
|
||||
super().__init__(template_path)
|
||||
@ -61,11 +64,10 @@ class SphinxRenderer(FileRenderer):
|
||||
|
||||
|
||||
class LaTeXRenderer(SphinxRenderer):
|
||||
def __init__(
|
||||
self, template_path: str | None = None, latex_engine: str | None = None,
|
||||
) -> None:
|
||||
def __init__(self, template_path: Sequence[str | os.PathLike[str]] | None = None,
|
||||
latex_engine: str | None = None) -> None:
|
||||
if template_path is None:
|
||||
template_path = os.path.join(package_dir, 'templates', 'latex')
|
||||
template_path = [os.path.join(package_dir, 'templates', 'latex')]
|
||||
super().__init__(template_path)
|
||||
|
||||
# use texescape as escape filter
|
||||
@ -85,9 +87,8 @@ class LaTeXRenderer(SphinxRenderer):
|
||||
|
||||
|
||||
class ReSTRenderer(SphinxRenderer):
|
||||
def __init__(
|
||||
self, template_path: None | str | list[str] = None, language: str | None = None,
|
||||
) -> None:
|
||||
def __init__(self, template_path: Sequence[str | os.PathLike[str]] | None = None,
|
||||
language: str | None = None) -> None:
|
||||
super().__init__(template_path)
|
||||
|
||||
# add language to environment
|
||||
@ -102,8 +103,9 @@ class ReSTRenderer(SphinxRenderer):
|
||||
class SphinxTemplateLoader(BaseLoader):
|
||||
"""A loader supporting template inheritance"""
|
||||
|
||||
def __init__(self, confdir: str, templates_paths: list[str],
|
||||
system_templates_paths: list[str]) -> None:
|
||||
def __init__(self, confdir: str | os.PathLike[str],
|
||||
templates_paths: Sequence[str | os.PathLike[str]],
|
||||
system_templates_paths: Sequence[str | os.PathLike[str]]) -> None:
|
||||
self.loaders = []
|
||||
self.sysloaders = []
|
||||
|
||||
|
@ -1,13 +1,10 @@
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
import docutils
|
||||
import pytest
|
||||
|
||||
import sphinx
|
||||
import sphinx.locale
|
||||
from sphinx.testing import comparer
|
||||
from sphinx.testing.path import path
|
||||
|
||||
|
||||
def _init_console(locale_dir=sphinx.locale._LOCALE_DIR, catalog='sphinx'):
|
||||
@ -30,32 +27,11 @@ collect_ignore = ['roots']
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def rootdir():
|
||||
return path(__file__).parent.abspath() / 'roots'
|
||||
return Path(__file__).parent.absolute() / 'roots'
|
||||
|
||||
|
||||
def pytest_report_header(config):
|
||||
header = ("libraries: Sphinx-%s, docutils-%s" %
|
||||
(sphinx.__display_version__, docutils.__version__))
|
||||
header = f"libraries: Sphinx-{sphinx.__display_version__}, docutils-{docutils.__version__}"
|
||||
if hasattr(config, '_tmp_path_factory'):
|
||||
header += "\nbase tempdir: %s" % config._tmp_path_factory.getbasetemp()
|
||||
|
||||
header += f"\nbase tmp_path: {config._tmp_path_factory.getbasetemp()}"
|
||||
return header
|
||||
|
||||
|
||||
def pytest_assertrepr_compare(op, left, right):
|
||||
comparer.pytest_assertrepr_compare(op, left, right)
|
||||
|
||||
|
||||
def _initialize_test_directory(session):
|
||||
if 'SPHINX_TEST_TEMPDIR' in os.environ:
|
||||
tempdir = os.path.abspath(os.getenv('SPHINX_TEST_TEMPDIR'))
|
||||
print('Temporary files will be placed in %s.' % tempdir)
|
||||
|
||||
if os.path.exists(tempdir):
|
||||
shutil.rmtree(tempdir)
|
||||
|
||||
os.makedirs(tempdir)
|
||||
|
||||
|
||||
def pytest_sessionstart(session):
|
||||
_initialize_test_directory(session)
|
||||
|
@ -11,7 +11,6 @@ from docutils import nodes
|
||||
|
||||
import sphinx.application
|
||||
from sphinx.errors import ExtensionError
|
||||
from sphinx.testing.path import path
|
||||
from sphinx.testing.util import SphinxTestApp, strip_escseq
|
||||
from sphinx.util import logging
|
||||
|
||||
@ -24,13 +23,11 @@ def test_instantiation(tmp_path_factory, rootdir: str, monkeypatch):
|
||||
if rootdir and not src_dir.exists():
|
||||
shutil.copytree(Path(str(rootdir)) / 'test-root', src_dir)
|
||||
|
||||
monkeypatch.setattr('sphinx.application.abspath', lambda x: x)
|
||||
|
||||
syspath = sys.path[:]
|
||||
|
||||
# When
|
||||
app_ = SphinxTestApp(
|
||||
srcdir=path(src_dir),
|
||||
srcdir=src_dir,
|
||||
status=StringIO(),
|
||||
warning=StringIO(),
|
||||
)
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""Test all builders."""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from unittest import mock
|
||||
|
||||
@ -23,7 +25,7 @@ def nonascii_srcdir(request, rootdir, sphinx_test_tempdir):
|
||||
basedir = sphinx_test_tempdir / request.node.originalname
|
||||
srcdir = basedir / test_name
|
||||
if not srcdir.exists():
|
||||
(rootdir / 'test-root').copytree(srcdir)
|
||||
shutil.copytree(rootdir / 'test-root', srcdir)
|
||||
|
||||
# add a doc with a non-ASCII file name to the source dir
|
||||
(srcdir / (test_name + '.txt')).write_text("""
|
||||
@ -54,11 +56,11 @@ def test_build_all(requests_head, make_app, nonascii_srcdir, buildername):
|
||||
app.build()
|
||||
|
||||
|
||||
def test_root_doc_not_found(tempdir, make_app):
|
||||
(tempdir / 'conf.py').write_text('', encoding='utf8')
|
||||
assert tempdir.listdir() == ['conf.py']
|
||||
def test_root_doc_not_found(tmp_path, make_app):
|
||||
(tmp_path / 'conf.py').write_text('', encoding='utf8')
|
||||
assert os.listdir(tmp_path) == ['conf.py']
|
||||
|
||||
app = make_app('dummy', srcdir=tempdir)
|
||||
app = make_app('dummy', srcdir=tmp_path)
|
||||
with pytest.raises(SphinxError):
|
||||
app.builder.build_all() # no index.rst
|
||||
|
||||
|
@ -42,9 +42,9 @@ def test_build_gettext(app):
|
||||
|
||||
# Do messages end up in the correct location?
|
||||
# top-level documents end up in a message catalog
|
||||
assert (app.outdir / 'extapi.pot').isfile()
|
||||
assert (app.outdir / 'extapi.pot').is_file()
|
||||
# directory items are grouped into sections
|
||||
assert (app.outdir / 'subdir.pot').isfile()
|
||||
assert (app.outdir / 'subdir.pot').is_file()
|
||||
|
||||
# regression test for issue #960
|
||||
catalog = (app.outdir / 'markup.pot').read_text(encoding='utf8')
|
||||
@ -54,7 +54,7 @@ def test_build_gettext(app):
|
||||
@pytest.mark.sphinx('gettext', srcdir='root-gettext')
|
||||
def test_msgfmt(app):
|
||||
app.builder.build_all()
|
||||
(app.outdir / 'en' / 'LC_MESSAGES').makedirs()
|
||||
(app.outdir / 'en' / 'LC_MESSAGES').mkdir(parents=True, exist_ok=True)
|
||||
with chdir(app.outdir):
|
||||
try:
|
||||
args = ['msginit', '--no-translator', '-i', 'markup.pot', '--locale', 'en_US']
|
||||
@ -66,7 +66,7 @@ def test_msgfmt(app):
|
||||
print(exc.stderr)
|
||||
raise AssertionError('msginit exited with return code %s' % exc.returncode)
|
||||
|
||||
assert (app.outdir / 'en_US.po').isfile(), 'msginit failed'
|
||||
assert (app.outdir / 'en_US.po').is_file(), 'msginit failed'
|
||||
try:
|
||||
args = ['msgfmt', 'en_US.po',
|
||||
'-o', os.path.join('en', 'LC_MESSAGES', 'test_root.mo')]
|
||||
@ -79,7 +79,7 @@ def test_msgfmt(app):
|
||||
raise AssertionError('msgfmt exited with return code %s' % exc.returncode)
|
||||
|
||||
mo = app.outdir / 'en' / 'LC_MESSAGES' / 'test_root.mo'
|
||||
assert mo.isfile(), 'msgfmt failed'
|
||||
assert mo.is_file(), 'msgfmt failed'
|
||||
|
||||
_ = gettext.translation('test_root', app.outdir, languages=['en']).gettext
|
||||
assert _("Testing various markup") == "Testing various markup"
|
||||
@ -164,7 +164,7 @@ def test_gettext_disable_index_entries(app):
|
||||
@pytest.mark.sphinx('gettext', testroot='intl', srcdir='gettext')
|
||||
def test_gettext_template(app):
|
||||
app.build()
|
||||
assert (app.outdir / 'sphinx.pot').isfile()
|
||||
assert (app.outdir / 'sphinx.pot').is_file()
|
||||
|
||||
result = (app.outdir / 'sphinx.pot').read_text(encoding='utf8')
|
||||
assert "Welcome" in result
|
||||
@ -174,7 +174,7 @@ def test_gettext_template(app):
|
||||
@pytest.mark.sphinx('gettext', testroot='gettext-template')
|
||||
def test_gettext_template_msgid_order_in_sphinxpot(app):
|
||||
app.builder.build_all()
|
||||
assert (app.outdir / 'sphinx.pot').isfile()
|
||||
assert (app.outdir / 'sphinx.pot').is_file()
|
||||
|
||||
result = (app.outdir / 'sphinx.pot').read_text(encoding='utf8')
|
||||
assert re.search(
|
||||
@ -192,7 +192,7 @@ def test_gettext_template_msgid_order_in_sphinxpot(app):
|
||||
def test_build_single_pot(app):
|
||||
app.builder.build_all()
|
||||
|
||||
assert (app.outdir / 'documentation.pot').isfile()
|
||||
assert (app.outdir / 'documentation.pot').is_file()
|
||||
|
||||
result = (app.outdir / 'documentation.pot').read_text(encoding='utf8')
|
||||
assert re.search(
|
||||
|
@ -116,22 +116,22 @@ def test_html_warnings(app, warning):
|
||||
app.build()
|
||||
html_warnings = strip_escseq(re.sub(re.escape(os.sep) + '{1,2}', '/', warning.getvalue()))
|
||||
html_warnings_exp = HTML_WARNINGS % {
|
||||
'root': re.escape(app.srcdir.replace(os.sep, '/'))}
|
||||
'root': re.escape(app.srcdir.as_posix())}
|
||||
assert re.match(html_warnings_exp + '$', html_warnings), \
|
||||
"Warnings don't match:\n" + \
|
||||
'--- Expected (regex):\n' + html_warnings_exp + \
|
||||
'--- Got:\n' + html_warnings
|
||||
|
||||
|
||||
def test_html4_error(make_app, tempdir):
|
||||
(tempdir / 'conf.py').write_text('', encoding='utf-8')
|
||||
def test_html4_error(make_app, tmp_path):
|
||||
(tmp_path / 'conf.py').write_text('', encoding='utf-8')
|
||||
with pytest.raises(
|
||||
ConfigError,
|
||||
match=r'HTML 4 is no longer supported by Sphinx',
|
||||
):
|
||||
make_app(
|
||||
buildername='html',
|
||||
srcdir=tempdir,
|
||||
srcdir=tmp_path,
|
||||
confoverrides={'html4_writer': True},
|
||||
)
|
||||
|
||||
@ -753,7 +753,7 @@ def test_numfig_without_numbered_toctree(app, cached_etree_parse, fname, expect)
|
||||
index = re.sub(':numbered:.*', '', index)
|
||||
(app.srcdir / 'index.rst').write_text(index, encoding='utf8')
|
||||
|
||||
if not app.outdir.listdir():
|
||||
if not os.listdir(app.outdir):
|
||||
app.build()
|
||||
check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect)
|
||||
|
||||
@ -1371,7 +1371,7 @@ def test_html_remote_images(app, status, warning):
|
||||
def test_html_encoded_image(app, status, warning):
|
||||
app.builder.build_all()
|
||||
|
||||
result = (app.outdir / 'index.html').read_text()
|
||||
result = (app.outdir / 'index.html').read_text(encoding='utf8')
|
||||
assert ('<img alt="_images/img_%231.png" src="_images/img_%231.png" />' in result)
|
||||
assert (app.outdir / '_images/img_#1.png').exists()
|
||||
|
||||
@ -1563,7 +1563,7 @@ def test_html_dark_pygments_style_default(app):
|
||||
|
||||
@pytest.mark.sphinx(testroot='basic', srcdir='validate_html_extra_path')
|
||||
def test_validate_html_extra_path(app):
|
||||
(app.confdir / '_static').makedirs()
|
||||
(app.confdir / '_static').mkdir(parents=True, exist_ok=True)
|
||||
app.config.html_extra_path = [
|
||||
'/path/to/not_found', # not found
|
||||
'_static',
|
||||
@ -1576,7 +1576,7 @@ def test_validate_html_extra_path(app):
|
||||
|
||||
@pytest.mark.sphinx(testroot='basic', srcdir='validate_html_static_path')
|
||||
def test_validate_html_static_path(app):
|
||||
(app.confdir / '_static').makedirs()
|
||||
(app.confdir / '_static').mkdir(parents=True, exist_ok=True)
|
||||
app.config.html_static_path = [
|
||||
'/path/to/not_found', # not found
|
||||
'_static',
|
||||
@ -1691,7 +1691,7 @@ def test_html_signaturereturn_icon(app):
|
||||
@pytest.mark.sphinx('html', testroot='reST-code-role')
|
||||
def test_html_code_role(app):
|
||||
app.build()
|
||||
content = (app.outdir / 'index.html').read_text()
|
||||
content = (app.outdir / 'index.html').read_text(encoding='utf8')
|
||||
|
||||
common_content = (
|
||||
'<span class="k">def</span> <span class="nf">foo</span>'
|
||||
@ -1716,7 +1716,7 @@ def test_html_code_role(app):
|
||||
confoverrides={'option_emphasise_placeholders': True})
|
||||
def test_option_emphasise_placeholders(app, status, warning):
|
||||
app.build()
|
||||
content = (app.outdir / 'objects.html').read_text()
|
||||
content = (app.outdir / 'objects.html').read_text(encoding='utf8')
|
||||
assert '<em><span class="pre">TYPE</span></em>' in content
|
||||
assert '{TYPE}' not in content
|
||||
assert ('<em><span class="pre">WHERE</span></em>'
|
||||
@ -1730,7 +1730,7 @@ def test_option_emphasise_placeholders(app, status, warning):
|
||||
@pytest.mark.sphinx('html', testroot='root')
|
||||
def test_option_emphasise_placeholders_default(app, status, warning):
|
||||
app.build()
|
||||
content = (app.outdir / 'objects.html').read_text()
|
||||
content = (app.outdir / 'objects.html').read_text(encoding='utf8')
|
||||
assert '<span class="pre">={TYPE}</span>' in content
|
||||
assert '<span class="pre">={WHERE}-{COUNT}</span></span>' in content
|
||||
assert '<span class="pre">{client_name}</span>' in content
|
||||
@ -1742,7 +1742,7 @@ def test_option_emphasise_placeholders_default(app, status, warning):
|
||||
@pytest.mark.sphinx('html', testroot='root')
|
||||
def test_option_reference_with_value(app, status, warning):
|
||||
app.build()
|
||||
content = (app.outdir / 'objects.html').read_text()
|
||||
content = (app.outdir / 'objects.html').read_text(encoding='utf-8')
|
||||
assert ('<span class="pre">-mapi</span></span><span class="sig-prename descclassname">'
|
||||
'</span><a class="headerlink" href="#cmdoption-git-commit-mapi"') in content
|
||||
assert 'first option <a class="reference internal" href="#cmdoption-git-commit-mapi">' in content
|
||||
|
@ -124,7 +124,7 @@ def test_build_latex_doc(app, status, warning, engine, docclass, python_maximum_
|
||||
app.builder.build_all()
|
||||
|
||||
# file from latex_additional_files
|
||||
assert (app.outdir / 'svgimg.svg').isfile()
|
||||
assert (app.outdir / 'svgimg.svg').is_file()
|
||||
|
||||
compile_latex_document(app, 'sphinxtests.tex', docclass)
|
||||
|
||||
@ -179,7 +179,7 @@ def test_latex_warnings(app, status, warning):
|
||||
|
||||
warnings = strip_escseq(re.sub(re.escape(os.sep) + '{1,2}', '/', warning.getvalue()))
|
||||
warnings_exp = LATEX_WARNINGS % {
|
||||
'root': re.escape(app.srcdir.replace(os.sep, '/'))}
|
||||
'root': re.escape(app.srcdir.as_posix())}
|
||||
assert re.match(warnings_exp + '$', warnings), \
|
||||
"Warnings don't match:\n" + \
|
||||
'--- Expected (regex):\n' + warnings_exp + \
|
||||
@ -1671,7 +1671,7 @@ def test_latex_container(app, status, warning):
|
||||
@pytest.mark.sphinx('latex', testroot='reST-code-role')
|
||||
def test_latex_code_role(app):
|
||||
app.build()
|
||||
content = (app.outdir / 'python.tex').read_text()
|
||||
content = (app.outdir / 'python.tex').read_text(encoding='utf8')
|
||||
|
||||
common_content = (
|
||||
r'\PYG{k}{def} '
|
||||
@ -1715,7 +1715,7 @@ def test_copy_images(app, status, warning):
|
||||
@pytest.mark.sphinx('latex', testroot='latex-labels-before-module')
|
||||
def test_duplicated_labels_before_module(app, status, warning):
|
||||
app.build()
|
||||
content: str = (app.outdir / 'python.tex').read_text()
|
||||
content: str = (app.outdir / 'python.tex').read_text(encoding='utf8')
|
||||
|
||||
def count_label(name):
|
||||
text = r'\phantomsection\label{\detokenize{%s}}' % name
|
||||
|
@ -31,7 +31,7 @@ def test_texinfo_warnings(app, status, warning):
|
||||
app.builder.build_all()
|
||||
warnings = strip_escseq(re.sub(re.escape(os.sep) + '{1,2}', '/', warning.getvalue()))
|
||||
warnings_exp = TEXINFO_WARNINGS % {
|
||||
'root': re.escape(app.srcdir.replace(os.sep, '/'))}
|
||||
'root': re.escape(app.srcdir.as_posix())}
|
||||
assert re.match(warnings_exp + '$', warnings), \
|
||||
"Warnings don't match:\n" + \
|
||||
'--- Expected (regex):\n' + warnings_exp + \
|
||||
|
@ -1,28 +1,28 @@
|
||||
"""Test the base build process."""
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from sphinx.testing.util import find_files
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def _setup_test(app_params):
|
||||
assert isinstance(app_params.kwargs['srcdir'], Path)
|
||||
srcdir = app_params.kwargs['srcdir']
|
||||
src_locale_dir = srcdir / 'xx' / 'LC_MESSAGES'
|
||||
dest_locale_dir = srcdir / 'locale'
|
||||
# copy all catalogs into locale layout directory
|
||||
for po in find_files(src_locale_dir, '.po'):
|
||||
copy_po = (dest_locale_dir / 'en' / 'LC_MESSAGES' / po)
|
||||
for po in src_locale_dir.rglob('*.po'):
|
||||
copy_po = (dest_locale_dir / 'en' / 'LC_MESSAGES' / po.relative_to(src_locale_dir))
|
||||
if not copy_po.parent.exists():
|
||||
copy_po.parent.makedirs()
|
||||
shutil.copy(src_locale_dir / po, copy_po)
|
||||
copy_po.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy(po, copy_po)
|
||||
|
||||
yield
|
||||
|
||||
# delete remnants left over after failed build
|
||||
dest_locale_dir.rmtree(True)
|
||||
(srcdir / '_build').rmtree(True)
|
||||
shutil.rmtree(dest_locale_dir, ignore_errors=True)
|
||||
shutil.rmtree(srcdir / '_build', ignore_errors=True)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('_setup_test')
|
||||
@ -35,11 +35,8 @@ def test_compile_all_catalogs(app, status, warning):
|
||||
|
||||
locale_dir = app.srcdir / 'locale'
|
||||
catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
|
||||
expect = {
|
||||
x.replace('.po', '.mo')
|
||||
for x in find_files(catalog_dir, '.po')
|
||||
}
|
||||
actual = set(find_files(catalog_dir, '.mo'))
|
||||
expect = {x.with_suffix('.mo') for x in catalog_dir.rglob('*.po')}
|
||||
actual = set(catalog_dir.rglob('*.mo'))
|
||||
assert actual # not empty
|
||||
assert actual == expect
|
||||
|
||||
@ -53,12 +50,11 @@ def test_compile_specific_catalogs(app, status, warning):
|
||||
locale_dir = app.srcdir / 'locale'
|
||||
catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
|
||||
|
||||
def get_actual():
|
||||
return set(find_files(catalog_dir, '.mo'))
|
||||
|
||||
actual_on_boot = get_actual() # sphinx.mo might be included
|
||||
actual_on_boot = set(catalog_dir.rglob('*.mo')) # sphinx.mo might be included
|
||||
app.builder.compile_specific_catalogs([app.srcdir / 'admonitions.txt'])
|
||||
actual = get_actual() - actual_on_boot
|
||||
actual = {str(x.relative_to(catalog_dir))
|
||||
for x in catalog_dir.rglob('*.mo')
|
||||
if x not in actual_on_boot}
|
||||
assert actual == {'admonitions.mo'}
|
||||
|
||||
|
||||
@ -72,10 +68,7 @@ def test_compile_update_catalogs(app, status, warning):
|
||||
|
||||
locale_dir = app.srcdir / 'locale'
|
||||
catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
|
||||
expect = {
|
||||
x.replace('.po', '.mo')
|
||||
for x in find_files(catalog_dir, '.po')
|
||||
}
|
||||
actual = set(find_files(catalog_dir, '.mo'))
|
||||
expect = {x.with_suffix('.mo') for x in set(catalog_dir.rglob('*.po'))}
|
||||
actual = set(catalog_dir.rglob('*.mo'))
|
||||
assert actual # not empty
|
||||
assert actual == expect
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""Test the sphinx.config.Config class."""
|
||||
|
||||
import time
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
@ -8,7 +9,6 @@ import pytest
|
||||
import sphinx
|
||||
from sphinx.config import ENUM, Config, check_confval_types
|
||||
from sphinx.errors import ConfigError, ExtensionError, VersionRequirementError
|
||||
from sphinx.testing.path import path
|
||||
|
||||
|
||||
@pytest.mark.sphinx(testroot='config', confoverrides={
|
||||
@ -66,9 +66,9 @@ def test_core_config(app, status, warning):
|
||||
assert cfg['project'] == cfg.project == 'Sphinx Tests'
|
||||
|
||||
|
||||
def test_config_not_found(tempdir):
|
||||
def test_config_not_found(tmp_path):
|
||||
with pytest.raises(ConfigError):
|
||||
Config.read(tempdir)
|
||||
Config.read(tmp_path)
|
||||
|
||||
|
||||
def test_extension_values():
|
||||
@ -131,35 +131,35 @@ def test_overrides_boolean():
|
||||
|
||||
|
||||
@mock.patch("sphinx.config.logger")
|
||||
def test_errors_warnings(logger, tempdir):
|
||||
def test_errors_warnings(logger, tmp_path):
|
||||
# test the error for syntax errors in the config file
|
||||
(tempdir / 'conf.py').write_text('project = \n', encoding='ascii')
|
||||
(tmp_path / 'conf.py').write_text('project = \n', encoding='ascii')
|
||||
with pytest.raises(ConfigError) as excinfo:
|
||||
Config.read(tempdir, {}, None)
|
||||
Config.read(tmp_path, {}, None)
|
||||
assert 'conf.py' in str(excinfo.value)
|
||||
|
||||
# test the automatic conversion of 2.x only code in configs
|
||||
(tempdir / 'conf.py').write_text('project = u"Jägermeister"\n', encoding='utf8')
|
||||
cfg = Config.read(tempdir, {}, None)
|
||||
(tmp_path / 'conf.py').write_text('project = u"Jägermeister"\n', encoding='utf8')
|
||||
cfg = Config.read(tmp_path, {}, None)
|
||||
cfg.init_values()
|
||||
assert cfg.project == 'Jägermeister'
|
||||
assert logger.called is False
|
||||
|
||||
|
||||
def test_errors_if_setup_is_not_callable(tempdir, make_app):
|
||||
def test_errors_if_setup_is_not_callable(tmp_path, make_app):
|
||||
# test the error to call setup() in the config file
|
||||
(tempdir / 'conf.py').write_text('setup = 1', encoding='utf8')
|
||||
(tmp_path / 'conf.py').write_text('setup = 1', encoding='utf8')
|
||||
with pytest.raises(ConfigError) as excinfo:
|
||||
make_app(srcdir=tempdir)
|
||||
make_app(srcdir=tmp_path)
|
||||
assert 'callable' in str(excinfo.value)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def make_app_with_empty_project(make_app, tempdir):
|
||||
(tempdir / 'conf.py').write_text('', encoding='utf8')
|
||||
def make_app_with_empty_project(make_app, tmp_path):
|
||||
(tmp_path / 'conf.py').write_text('', encoding='utf8')
|
||||
|
||||
def _make_app(*args, **kw):
|
||||
kw.setdefault('srcdir', path(tempdir))
|
||||
kw.setdefault('srcdir', Path(tmp_path))
|
||||
return make_app(*args, **kw)
|
||||
return _make_app
|
||||
|
||||
@ -187,12 +187,12 @@ def test_needs_sphinx(make_app_with_empty_project):
|
||||
|
||||
|
||||
@mock.patch("sphinx.config.logger")
|
||||
def test_config_eol(logger, tempdir):
|
||||
def test_config_eol(logger, tmp_path):
|
||||
# test config file's eol patterns: LF, CRLF
|
||||
configfile = tempdir / 'conf.py'
|
||||
configfile = tmp_path / 'conf.py'
|
||||
for eol in (b'\n', b'\r\n'):
|
||||
configfile.write_bytes(b'project = "spam"' + eol)
|
||||
cfg = Config.read(tempdir, {}, None)
|
||||
cfg = Config.read(tmp_path, {}, None)
|
||||
cfg.init_values()
|
||||
assert cfg.project == 'spam'
|
||||
assert logger.called is False
|
||||
@ -384,14 +384,14 @@ def test_nitpick_ignore_regex_fullmatch(app, status, warning):
|
||||
assert expected in actual
|
||||
|
||||
|
||||
def test_conf_py_language_none(tempdir):
|
||||
def test_conf_py_language_none(tmp_path):
|
||||
"""Regression test for #10474."""
|
||||
|
||||
# Given a conf.py file with language = None
|
||||
(tempdir / 'conf.py').write_text("language = None", encoding='utf-8')
|
||||
(tmp_path / 'conf.py').write_text("language = None", encoding='utf-8')
|
||||
|
||||
# When we load conf.py into a Config object
|
||||
cfg = Config.read(tempdir, {}, None)
|
||||
cfg = Config.read(tmp_path, {}, None)
|
||||
cfg.init_values()
|
||||
|
||||
# Then the language is coerced to English
|
||||
@ -399,14 +399,14 @@ def test_conf_py_language_none(tempdir):
|
||||
|
||||
|
||||
@mock.patch("sphinx.config.logger")
|
||||
def test_conf_py_language_none_warning(logger, tempdir):
|
||||
def test_conf_py_language_none_warning(logger, tmp_path):
|
||||
"""Regression test for #10474."""
|
||||
|
||||
# Given a conf.py file with language = None
|
||||
(tempdir / 'conf.py').write_text("language = None", encoding='utf-8')
|
||||
(tmp_path / 'conf.py').write_text("language = None", encoding='utf-8')
|
||||
|
||||
# When we load conf.py into a Config object
|
||||
Config.read(tempdir, {}, None)
|
||||
Config.read(tmp_path, {}, None)
|
||||
|
||||
# Then a warning is raised
|
||||
assert logger.warning.called
|
||||
@ -416,28 +416,28 @@ def test_conf_py_language_none_warning(logger, tempdir):
|
||||
"Falling back to 'en' (English).")
|
||||
|
||||
|
||||
def test_conf_py_no_language(tempdir):
|
||||
def test_conf_py_no_language(tmp_path):
|
||||
"""Regression test for #10474."""
|
||||
|
||||
# Given a conf.py file with no language attribute
|
||||
(tempdir / 'conf.py').write_text("", encoding='utf-8')
|
||||
(tmp_path / 'conf.py').write_text("", encoding='utf-8')
|
||||
|
||||
# When we load conf.py into a Config object
|
||||
cfg = Config.read(tempdir, {}, None)
|
||||
cfg = Config.read(tmp_path, {}, None)
|
||||
cfg.init_values()
|
||||
|
||||
# Then the language is coerced to English
|
||||
assert cfg.language == "en"
|
||||
|
||||
|
||||
def test_conf_py_nitpick_ignore_list(tempdir):
|
||||
def test_conf_py_nitpick_ignore_list(tmp_path):
|
||||
"""Regression test for #11355."""
|
||||
|
||||
# Given a conf.py file with no language attribute
|
||||
(tempdir / 'conf.py').write_text("", encoding='utf-8')
|
||||
(tmp_path / 'conf.py').write_text("", encoding='utf-8')
|
||||
|
||||
# When we load conf.py into a Config object
|
||||
cfg = Config.read(tempdir, {}, None)
|
||||
cfg = Config.read(tmp_path, {}, None)
|
||||
cfg.init_values()
|
||||
|
||||
# Then the default nitpick_ignore[_regex] is an empty list
|
||||
|
@ -297,8 +297,8 @@ def test_LiteralIncludeReader_diff(testroot, literal_inc_path):
|
||||
options = {'diff': testroot / 'literal-diff.inc'}
|
||||
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
|
||||
content, lines = reader.read()
|
||||
assert content == ("--- " + testroot + "/literal-diff.inc\n"
|
||||
"+++ " + testroot + "/literal.inc\n"
|
||||
assert content == ("--- " + str(testroot) + "/literal-diff.inc\n"
|
||||
"+++ " + str(testroot) + "/literal.inc\n"
|
||||
"@@ -6,8 +6,8 @@\n"
|
||||
" pass\n"
|
||||
" \n"
|
||||
|
@ -737,7 +737,7 @@ def _get_obj(app, queryName):
|
||||
|
||||
|
||||
@pytest.mark.sphinx(testroot='domain-c-intersphinx', confoverrides={'nitpicky': True})
|
||||
def test_domain_c_build_intersphinx(tempdir, app, status, warning):
|
||||
def test_domain_c_build_intersphinx(tmp_path, app, status, warning):
|
||||
# a splitting of test_ids_vs_tags0 into the primary directives in a remote project,
|
||||
# and then the references in the test project
|
||||
origSource = """\
|
||||
@ -754,7 +754,7 @@ def test_domain_c_build_intersphinx(tempdir, app, status, warning):
|
||||
.. c:type:: _type
|
||||
.. c:function:: void _functionParam(int param)
|
||||
""" # noqa: F841
|
||||
inv_file = tempdir / 'inventory'
|
||||
inv_file = tmp_path / 'inventory'
|
||||
inv_file.write_bytes(b'''\
|
||||
# Sphinx inventory version 2
|
||||
# Project: C Intersphinx Test
|
||||
@ -775,7 +775,7 @@ _union c:union 1 index.html#c.$ -
|
||||
_var c:member 1 index.html#c.$ -
|
||||
''')) # noqa: W291
|
||||
app.config.intersphinx_mapping = {
|
||||
'https://localhost/intersphinx/c/': inv_file,
|
||||
'https://localhost/intersphinx/c/': str(inv_file),
|
||||
}
|
||||
app.config.intersphinx_cache_limit = 0
|
||||
# load the inventory and check if it's done correctly
|
||||
|
@ -1363,7 +1363,7 @@ def test_domain_cpp_build_field_role(app, status, warning):
|
||||
|
||||
|
||||
@pytest.mark.sphinx(testroot='domain-cpp-intersphinx', confoverrides={'nitpicky': True})
|
||||
def test_domain_cpp_build_intersphinx(tempdir, app, status, warning):
|
||||
def test_domain_cpp_build_intersphinx(tmp_path, app, status, warning):
|
||||
origSource = """\
|
||||
.. cpp:class:: _class
|
||||
.. cpp:struct:: _struct
|
||||
@ -1385,7 +1385,7 @@ def test_domain_cpp_build_intersphinx(tempdir, app, status, warning):
|
||||
.. cpp:function:: void _functionParam(int param)
|
||||
.. cpp:function:: template<typename TParam> void _templateParam()
|
||||
""" # noqa: F841
|
||||
inv_file = tempdir / 'inventory'
|
||||
inv_file = tmp_path / 'inventory'
|
||||
inv_file.write_bytes(b'''\
|
||||
# Sphinx inventory version 2
|
||||
# Project: C Intersphinx Test
|
||||
@ -1413,7 +1413,7 @@ _union cpp:union 1 index.html#_CPPv46$ -
|
||||
_var cpp:member 1 index.html#_CPPv44$ -
|
||||
''')) # noqa: W291
|
||||
app.config.intersphinx_mapping = {
|
||||
'https://localhost/intersphinx/cpp/': inv_file,
|
||||
'https://localhost/intersphinx/cpp/': str(inv_file),
|
||||
}
|
||||
app.config.intersphinx_cache_limit = 0
|
||||
# load the inventory and check if it's done correctly
|
||||
|
@ -1,13 +1,13 @@
|
||||
"""Test the BuildEnvironment class."""
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from sphinx.builders.html import StandaloneHTMLBuilder
|
||||
from sphinx.builders.latex import LaTeXBuilder
|
||||
from sphinx.environment import CONFIG_CHANGED, CONFIG_EXTENSIONS_CHANGED, CONFIG_NEW, CONFIG_OK
|
||||
from sphinx.testing.comparer import PathComparer
|
||||
|
||||
|
||||
@pytest.mark.sphinx('dummy', testroot='basic')
|
||||
@ -101,43 +101,43 @@ def test_env_relfn2path(app):
|
||||
# relative filename and root document
|
||||
relfn, absfn = app.env.relfn2path('logo.jpg', 'index')
|
||||
assert relfn == 'logo.jpg'
|
||||
assert absfn == app.srcdir / 'logo.jpg'
|
||||
assert absfn == str(app.srcdir / 'logo.jpg')
|
||||
|
||||
# absolute filename and root document
|
||||
relfn, absfn = app.env.relfn2path('/logo.jpg', 'index')
|
||||
assert relfn == 'logo.jpg'
|
||||
assert absfn == app.srcdir / 'logo.jpg'
|
||||
assert absfn == str(app.srcdir / 'logo.jpg')
|
||||
|
||||
# relative filename and a document in subdir
|
||||
relfn, absfn = app.env.relfn2path('logo.jpg', 'subdir/index')
|
||||
assert relfn == PathComparer('subdir/logo.jpg')
|
||||
assert absfn == app.srcdir / 'subdir' / 'logo.jpg'
|
||||
assert Path(relfn) == Path('subdir/logo.jpg')
|
||||
assert absfn == str(app.srcdir / 'subdir' / 'logo.jpg')
|
||||
|
||||
# absolute filename and a document in subdir
|
||||
relfn, absfn = app.env.relfn2path('/logo.jpg', 'subdir/index')
|
||||
assert relfn == 'logo.jpg'
|
||||
assert absfn == app.srcdir / 'logo.jpg'
|
||||
assert absfn == str(app.srcdir / 'logo.jpg')
|
||||
|
||||
# relative filename having subdir
|
||||
relfn, absfn = app.env.relfn2path('images/logo.jpg', 'index')
|
||||
assert relfn == 'images/logo.jpg'
|
||||
assert absfn == app.srcdir / 'images' / 'logo.jpg'
|
||||
assert absfn == str(app.srcdir / 'images' / 'logo.jpg')
|
||||
|
||||
# relative path traversal
|
||||
relfn, absfn = app.env.relfn2path('../logo.jpg', 'index')
|
||||
assert relfn == '../logo.jpg'
|
||||
assert absfn == app.srcdir.parent / 'logo.jpg'
|
||||
assert absfn == str(app.srcdir.parent / 'logo.jpg')
|
||||
|
||||
# relative path traversal
|
||||
relfn, absfn = app.env.relfn2path('subdir/../logo.jpg', 'index')
|
||||
assert relfn == 'logo.jpg'
|
||||
assert absfn == app.srcdir / 'logo.jpg'
|
||||
assert absfn == str(app.srcdir / 'logo.jpg')
|
||||
|
||||
# omit docname (w/ current docname)
|
||||
app.env.temp_data['docname'] = 'subdir/document'
|
||||
relfn, absfn = app.env.relfn2path('images/logo.jpg')
|
||||
assert relfn == PathComparer('subdir/images/logo.jpg')
|
||||
assert absfn == app.srcdir / 'subdir' / 'images' / 'logo.jpg'
|
||||
assert Path(relfn) == Path('subdir/images/logo.jpg')
|
||||
assert absfn == str(app.srcdir / 'subdir' / 'images' / 'logo.jpg')
|
||||
|
||||
# omit docname (w/o current docname)
|
||||
app.env.temp_data.clear()
|
||||
|
@ -1,21 +1,21 @@
|
||||
"""Test the sphinx.apidoc module."""
|
||||
|
||||
import os.path
|
||||
from collections import namedtuple
|
||||
|
||||
import pytest
|
||||
|
||||
import sphinx.ext.apidoc
|
||||
from sphinx.ext.apidoc import main as apidoc_main
|
||||
from sphinx.testing.path import path
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def apidoc(rootdir, tempdir, apidoc_params):
|
||||
def apidoc(rootdir, tmp_path, apidoc_params):
|
||||
_, kwargs = apidoc_params
|
||||
coderoot = rootdir / kwargs.get('coderoot', 'test-root')
|
||||
outdir = tempdir / 'out'
|
||||
excludes = [coderoot / e for e in kwargs.get('excludes', [])]
|
||||
args = ['-o', outdir, '-F', coderoot] + excludes + kwargs.get('options', [])
|
||||
outdir = tmp_path / 'out'
|
||||
excludes = [str(coderoot / e) for e in kwargs.get('excludes', [])]
|
||||
args = ['-o', str(outdir), '-F', str(coderoot)] + excludes + kwargs.get('options', [])
|
||||
apidoc_main(args)
|
||||
return namedtuple('apidoc', 'coderoot,outdir')(coderoot, outdir)
|
||||
|
||||
@ -37,8 +37,8 @@ def apidoc_params(request):
|
||||
@pytest.mark.apidoc(coderoot='test-root')
|
||||
def test_simple(make_app, apidoc):
|
||||
outdir = apidoc.outdir
|
||||
assert (outdir / 'conf.py').isfile()
|
||||
assert (outdir / 'index.rst').isfile()
|
||||
assert (outdir / 'conf.py').is_file()
|
||||
assert (outdir / 'index.rst').is_file()
|
||||
|
||||
app = make_app('text', srcdir=outdir)
|
||||
app.build()
|
||||
@ -52,10 +52,10 @@ def test_simple(make_app, apidoc):
|
||||
)
|
||||
def test_pep_0420_enabled(make_app, apidoc):
|
||||
outdir = apidoc.outdir
|
||||
assert (outdir / 'conf.py').isfile()
|
||||
assert (outdir / 'a.b.c.rst').isfile()
|
||||
assert (outdir / 'a.b.e.rst').isfile()
|
||||
assert (outdir / 'a.b.x.rst').isfile()
|
||||
assert (outdir / 'conf.py').is_file()
|
||||
assert (outdir / 'a.b.c.rst').is_file()
|
||||
assert (outdir / 'a.b.e.rst').is_file()
|
||||
assert (outdir / 'a.b.x.rst').is_file()
|
||||
|
||||
with open(outdir / 'a.b.c.rst', encoding='utf-8') as f:
|
||||
rst = f.read()
|
||||
@ -77,9 +77,9 @@ def test_pep_0420_enabled(make_app, apidoc):
|
||||
print(app._warning.getvalue())
|
||||
|
||||
builddir = outdir / '_build' / 'text'
|
||||
assert (builddir / 'a.b.c.txt').isfile()
|
||||
assert (builddir / 'a.b.e.txt').isfile()
|
||||
assert (builddir / 'a.b.x.txt').isfile()
|
||||
assert (builddir / 'a.b.c.txt').is_file()
|
||||
assert (builddir / 'a.b.e.txt').is_file()
|
||||
assert (builddir / 'a.b.x.txt').is_file()
|
||||
|
||||
with open(builddir / 'a.b.c.txt', encoding='utf-8') as f:
|
||||
txt = f.read()
|
||||
@ -100,12 +100,12 @@ def test_pep_0420_enabled(make_app, apidoc):
|
||||
)
|
||||
def test_pep_0420_enabled_separate(make_app, apidoc):
|
||||
outdir = apidoc.outdir
|
||||
assert (outdir / 'conf.py').isfile()
|
||||
assert (outdir / 'a.b.c.rst').isfile()
|
||||
assert (outdir / 'a.b.e.rst').isfile()
|
||||
assert (outdir / 'a.b.e.f.rst').isfile()
|
||||
assert (outdir / 'a.b.x.rst').isfile()
|
||||
assert (outdir / 'a.b.x.y.rst').isfile()
|
||||
assert (outdir / 'conf.py').is_file()
|
||||
assert (outdir / 'a.b.c.rst').is_file()
|
||||
assert (outdir / 'a.b.e.rst').is_file()
|
||||
assert (outdir / 'a.b.e.f.rst').is_file()
|
||||
assert (outdir / 'a.b.x.rst').is_file()
|
||||
assert (outdir / 'a.b.x.y.rst').is_file()
|
||||
|
||||
with open(outdir / 'a.b.c.rst', encoding='utf-8') as f:
|
||||
rst = f.read()
|
||||
@ -125,11 +125,11 @@ def test_pep_0420_enabled_separate(make_app, apidoc):
|
||||
print(app._warning.getvalue())
|
||||
|
||||
builddir = outdir / '_build' / 'text'
|
||||
assert (builddir / 'a.b.c.txt').isfile()
|
||||
assert (builddir / 'a.b.e.txt').isfile()
|
||||
assert (builddir / 'a.b.e.f.txt').isfile()
|
||||
assert (builddir / 'a.b.x.txt').isfile()
|
||||
assert (builddir / 'a.b.x.y.txt').isfile()
|
||||
assert (builddir / 'a.b.c.txt').is_file()
|
||||
assert (builddir / 'a.b.e.txt').is_file()
|
||||
assert (builddir / 'a.b.e.f.txt').is_file()
|
||||
assert (builddir / 'a.b.x.txt').is_file()
|
||||
assert (builddir / 'a.b.x.y.txt').is_file()
|
||||
|
||||
with open(builddir / 'a.b.c.txt', encoding='utf-8') as f:
|
||||
txt = f.read()
|
||||
@ -147,7 +147,7 @@ def test_pep_0420_enabled_separate(make_app, apidoc):
|
||||
@pytest.mark.apidoc(coderoot='test-apidoc-pep420/a')
|
||||
def test_pep_0420_disabled(make_app, apidoc):
|
||||
outdir = apidoc.outdir
|
||||
assert (outdir / 'conf.py').isfile()
|
||||
assert (outdir / 'conf.py').is_file()
|
||||
assert not (outdir / 'a.b.c.rst').exists()
|
||||
assert not (outdir / 'a.b.x.rst').exists()
|
||||
|
||||
@ -161,8 +161,8 @@ def test_pep_0420_disabled(make_app, apidoc):
|
||||
coderoot='test-apidoc-pep420/a/b')
|
||||
def test_pep_0420_disabled_top_level_verify(make_app, apidoc):
|
||||
outdir = apidoc.outdir
|
||||
assert (outdir / 'conf.py').isfile()
|
||||
assert (outdir / 'c.rst').isfile()
|
||||
assert (outdir / 'conf.py').is_file()
|
||||
assert (outdir / 'c.rst').is_file()
|
||||
assert not (outdir / 'x.rst').exists()
|
||||
|
||||
with open(outdir / 'c.rst', encoding='utf-8') as f:
|
||||
@ -181,8 +181,8 @@ def test_pep_0420_disabled_top_level_verify(make_app, apidoc):
|
||||
coderoot='test-apidoc-trailing-underscore')
|
||||
def test_trailing_underscore(make_app, apidoc):
|
||||
outdir = apidoc.outdir
|
||||
assert (outdir / 'conf.py').isfile()
|
||||
assert (outdir / 'package_.rst').isfile()
|
||||
assert (outdir / 'conf.py').is_file()
|
||||
assert (outdir / 'package_.rst').is_file()
|
||||
|
||||
app = make_app('text', srcdir=outdir)
|
||||
app.build()
|
||||
@ -203,13 +203,13 @@ def test_trailing_underscore(make_app, apidoc):
|
||||
)
|
||||
def test_excludes(apidoc):
|
||||
outdir = apidoc.outdir
|
||||
assert (outdir / 'conf.py').isfile()
|
||||
assert (outdir / 'a.rst').isfile()
|
||||
assert (outdir / 'a.b.rst').isfile()
|
||||
assert (outdir / 'a.b.c.rst').isfile() # generated because not empty
|
||||
assert not (outdir / 'a.b.e.rst').isfile() # skipped because of empty after excludes
|
||||
assert (outdir / 'a.b.x.rst').isfile()
|
||||
assert (outdir / 'a.b.x.y.rst').isfile()
|
||||
assert (outdir / 'conf.py').is_file()
|
||||
assert (outdir / 'a.rst').is_file()
|
||||
assert (outdir / 'a.b.rst').is_file()
|
||||
assert (outdir / 'a.b.c.rst').is_file() # generated because not empty
|
||||
assert not (outdir / 'a.b.e.rst').is_file() # skipped because of empty after excludes
|
||||
assert (outdir / 'a.b.x.rst').is_file()
|
||||
assert (outdir / 'a.b.x.y.rst').is_file()
|
||||
|
||||
|
||||
@pytest.mark.apidoc(
|
||||
@ -220,11 +220,11 @@ def test_excludes(apidoc):
|
||||
def test_excludes_subpackage_should_be_skipped(apidoc):
|
||||
"""Subpackage exclusion should work."""
|
||||
outdir = apidoc.outdir
|
||||
assert (outdir / 'conf.py').isfile()
|
||||
assert (outdir / 'a.rst').isfile()
|
||||
assert (outdir / 'a.b.rst').isfile()
|
||||
assert (outdir / 'a.b.c.rst').isfile() # generated because not empty
|
||||
assert not (outdir / 'a.b.e.f.rst').isfile() # skipped because 'b/e' subpackage is skipped
|
||||
assert (outdir / 'conf.py').is_file()
|
||||
assert (outdir / 'a.rst').is_file()
|
||||
assert (outdir / 'a.b.rst').is_file()
|
||||
assert (outdir / 'a.b.c.rst').is_file() # generated because not empty
|
||||
assert not (outdir / 'a.b.e.f.rst').is_file() # skipped because 'b/e' subpackage is skipped
|
||||
|
||||
|
||||
@pytest.mark.apidoc(
|
||||
@ -235,11 +235,11 @@ def test_excludes_subpackage_should_be_skipped(apidoc):
|
||||
def test_excludes_module_should_be_skipped(apidoc):
|
||||
"""Module exclusion should work."""
|
||||
outdir = apidoc.outdir
|
||||
assert (outdir / 'conf.py').isfile()
|
||||
assert (outdir / 'a.rst').isfile()
|
||||
assert (outdir / 'a.b.rst').isfile()
|
||||
assert (outdir / 'a.b.c.rst').isfile() # generated because not empty
|
||||
assert not (outdir / 'a.b.e.f.rst').isfile() # skipped because of empty after excludes
|
||||
assert (outdir / 'conf.py').is_file()
|
||||
assert (outdir / 'a.rst').is_file()
|
||||
assert (outdir / 'a.b.rst').is_file()
|
||||
assert (outdir / 'a.b.c.rst').is_file() # generated because not empty
|
||||
assert not (outdir / 'a.b.e.f.rst').is_file() # skipped because of empty after excludes
|
||||
|
||||
|
||||
@pytest.mark.apidoc(
|
||||
@ -250,11 +250,11 @@ def test_excludes_module_should_be_skipped(apidoc):
|
||||
def test_excludes_module_should_not_be_skipped(apidoc):
|
||||
"""Module should be included if no excludes are used."""
|
||||
outdir = apidoc.outdir
|
||||
assert (outdir / 'conf.py').isfile()
|
||||
assert (outdir / 'a.rst').isfile()
|
||||
assert (outdir / 'a.b.rst').isfile()
|
||||
assert (outdir / 'a.b.c.rst').isfile() # generated because not empty
|
||||
assert (outdir / 'a.b.e.f.rst').isfile() # skipped because of empty after excludes
|
||||
assert (outdir / 'conf.py').is_file()
|
||||
assert (outdir / 'a.rst').is_file()
|
||||
assert (outdir / 'a.b.rst').is_file()
|
||||
assert (outdir / 'a.b.c.rst').is_file() # generated because not empty
|
||||
assert (outdir / 'a.b.e.f.rst').is_file() # skipped because of empty after excludes
|
||||
|
||||
|
||||
@pytest.mark.apidoc(
|
||||
@ -268,8 +268,8 @@ def test_excludes_module_should_not_be_skipped(apidoc):
|
||||
)
|
||||
def test_multibyte_parameters(make_app, apidoc):
|
||||
outdir = apidoc.outdir
|
||||
assert (outdir / 'conf.py').isfile()
|
||||
assert (outdir / 'index.rst').isfile()
|
||||
assert (outdir / 'conf.py').is_file()
|
||||
assert (outdir / 'index.rst').is_file()
|
||||
|
||||
conf_py = (outdir / 'conf.py').read_text(encoding='utf8')
|
||||
assert "project = 'プロジェクト名'" in conf_py
|
||||
@ -289,7 +289,7 @@ def test_multibyte_parameters(make_app, apidoc):
|
||||
)
|
||||
def test_extension_parsed(make_app, apidoc):
|
||||
outdir = apidoc.outdir
|
||||
assert (outdir / 'conf.py').isfile()
|
||||
assert (outdir / 'conf.py').is_file()
|
||||
|
||||
with open(outdir / 'conf.py', encoding='utf-8') as f:
|
||||
rst = f.read()
|
||||
@ -307,7 +307,7 @@ def test_toc_all_references_should_exist_pep420_enabled(make_app, apidoc):
|
||||
and what is created. This is the variant with pep420 enabled.
|
||||
"""
|
||||
outdir = apidoc.outdir
|
||||
assert (outdir / 'conf.py').isfile()
|
||||
assert (outdir / 'conf.py').is_file()
|
||||
|
||||
toc = extract_toc(outdir / 'mypackage.rst')
|
||||
|
||||
@ -319,7 +319,7 @@ def test_toc_all_references_should_exist_pep420_enabled(make_app, apidoc):
|
||||
continue
|
||||
found_refs.append(ref)
|
||||
filename = f"{ref}.rst"
|
||||
if not (outdir / filename).isfile():
|
||||
if not (outdir / filename).is_file():
|
||||
missing_files.append(filename)
|
||||
|
||||
assert len(missing_files) == 0, \
|
||||
@ -337,7 +337,7 @@ def test_toc_all_references_should_exist_pep420_disabled(make_app, apidoc):
|
||||
and what is created. This is the variant with pep420 disabled.
|
||||
"""
|
||||
outdir = apidoc.outdir
|
||||
assert (outdir / 'conf.py').isfile()
|
||||
assert (outdir / 'conf.py').is_file()
|
||||
|
||||
toc = extract_toc(outdir / 'mypackage.rst')
|
||||
|
||||
@ -349,7 +349,7 @@ def test_toc_all_references_should_exist_pep420_disabled(make_app, apidoc):
|
||||
continue
|
||||
filename = f"{ref}.rst"
|
||||
found_refs.append(ref)
|
||||
if not (outdir / filename).isfile():
|
||||
if not (outdir / filename).is_file():
|
||||
missing_files.append(filename)
|
||||
|
||||
assert len(missing_files) == 0, \
|
||||
@ -382,44 +382,44 @@ def test_subpackage_in_toc(make_app, apidoc):
|
||||
are not skipped (issue #4520)
|
||||
"""
|
||||
outdir = apidoc.outdir
|
||||
assert (outdir / 'conf.py').isfile()
|
||||
assert (outdir / 'conf.py').is_file()
|
||||
|
||||
assert (outdir / 'parent.rst').isfile()
|
||||
assert (outdir / 'parent.rst').is_file()
|
||||
with open(outdir / 'parent.rst', encoding='utf-8') as f:
|
||||
parent = f.read()
|
||||
assert 'parent.child' in parent
|
||||
|
||||
assert (outdir / 'parent.child.rst').isfile()
|
||||
assert (outdir / 'parent.child.rst').is_file()
|
||||
with open(outdir / 'parent.child.rst', encoding='utf-8') as f:
|
||||
parent_child = f.read()
|
||||
assert 'parent.child.foo' in parent_child
|
||||
|
||||
assert (outdir / 'parent.child.foo.rst').isfile()
|
||||
assert (outdir / 'parent.child.foo.rst').is_file()
|
||||
|
||||
|
||||
def test_private(tempdir):
|
||||
(tempdir / 'hello.py').write_text('', encoding='utf8')
|
||||
(tempdir / '_world.py').write_text('', encoding='utf8')
|
||||
def test_private(tmp_path):
|
||||
(tmp_path / 'hello.py').write_text('', encoding='utf8')
|
||||
(tmp_path / '_world.py').write_text('', encoding='utf8')
|
||||
|
||||
# without --private option
|
||||
apidoc_main(['-o', tempdir, tempdir])
|
||||
assert (tempdir / 'hello.rst').exists()
|
||||
assert ':private-members:' not in (tempdir / 'hello.rst').read_text(encoding='utf8')
|
||||
assert not (tempdir / '_world.rst').exists()
|
||||
apidoc_main(['-o', str(tmp_path), str(tmp_path)])
|
||||
assert (tmp_path / 'hello.rst').exists()
|
||||
assert ':private-members:' not in (tmp_path / 'hello.rst').read_text(encoding='utf8')
|
||||
assert not (tmp_path / '_world.rst').exists()
|
||||
|
||||
# with --private option
|
||||
apidoc_main(['--private', '-f', '-o', tempdir, tempdir])
|
||||
assert (tempdir / 'hello.rst').exists()
|
||||
assert ':private-members:' in (tempdir / 'hello.rst').read_text(encoding='utf8')
|
||||
assert (tempdir / '_world.rst').exists()
|
||||
apidoc_main(['--private', '-f', '-o', str(tmp_path), str(tmp_path)])
|
||||
assert (tmp_path / 'hello.rst').exists()
|
||||
assert ':private-members:' in (tmp_path / 'hello.rst').read_text(encoding='utf8')
|
||||
assert (tmp_path / '_world.rst').exists()
|
||||
|
||||
|
||||
def test_toc_file(tempdir):
|
||||
outdir = path(tempdir)
|
||||
(outdir / 'module').makedirs()
|
||||
def test_toc_file(tmp_path):
|
||||
outdir = tmp_path
|
||||
(outdir / 'module').mkdir(parents=True, exist_ok=True)
|
||||
(outdir / 'example.py').write_text('', encoding='utf8')
|
||||
(outdir / 'module' / 'example.py').write_text('', encoding='utf8')
|
||||
apidoc_main(['-o', tempdir, tempdir])
|
||||
apidoc_main(['-o', str(tmp_path), str(tmp_path)])
|
||||
assert (outdir / 'modules.rst').exists()
|
||||
|
||||
content = (outdir / 'modules.rst').read_text(encoding='utf8')
|
||||
@ -432,10 +432,10 @@ def test_toc_file(tempdir):
|
||||
" example\n")
|
||||
|
||||
|
||||
def test_module_file(tempdir):
|
||||
outdir = path(tempdir)
|
||||
def test_module_file(tmp_path):
|
||||
outdir = tmp_path
|
||||
(outdir / 'example.py').write_text('', encoding='utf8')
|
||||
apidoc_main(['-o', tempdir, tempdir])
|
||||
apidoc_main(['-o', str(tmp_path), str(tmp_path)])
|
||||
assert (outdir / 'example.rst').exists()
|
||||
|
||||
content = (outdir / 'example.rst').read_text(encoding='utf8')
|
||||
@ -448,10 +448,10 @@ def test_module_file(tempdir):
|
||||
" :show-inheritance:\n")
|
||||
|
||||
|
||||
def test_module_file_noheadings(tempdir):
|
||||
outdir = path(tempdir)
|
||||
def test_module_file_noheadings(tmp_path):
|
||||
outdir = tmp_path
|
||||
(outdir / 'example.py').write_text('', encoding='utf8')
|
||||
apidoc_main(['--no-headings', '-o', tempdir, tempdir])
|
||||
apidoc_main(['--no-headings', '-o', str(tmp_path), str(tmp_path)])
|
||||
assert (outdir / 'example.rst').exists()
|
||||
|
||||
content = (outdir / 'example.rst').read_text(encoding='utf8')
|
||||
@ -461,15 +461,15 @@ def test_module_file_noheadings(tempdir):
|
||||
" :show-inheritance:\n")
|
||||
|
||||
|
||||
def test_package_file(tempdir):
|
||||
outdir = path(tempdir)
|
||||
(outdir / 'testpkg').makedirs()
|
||||
def test_package_file(tmp_path):
|
||||
outdir = tmp_path
|
||||
(outdir / 'testpkg').mkdir(parents=True, exist_ok=True)
|
||||
(outdir / 'testpkg' / '__init__.py').write_text('', encoding='utf8')
|
||||
(outdir / 'testpkg' / 'hello.py').write_text('', encoding='utf8')
|
||||
(outdir / 'testpkg' / 'world.py').write_text('', encoding='utf8')
|
||||
(outdir / 'testpkg' / 'subpkg').makedirs()
|
||||
(outdir / 'testpkg' / 'subpkg').mkdir(parents=True, exist_ok=True)
|
||||
(outdir / 'testpkg' / 'subpkg' / '__init__.py').write_text('', encoding='utf8')
|
||||
apidoc_main(['-o', tempdir, tempdir / 'testpkg'])
|
||||
apidoc_main(['-o', str(outdir), str(outdir / 'testpkg')])
|
||||
assert (outdir / 'testpkg.rst').exists()
|
||||
assert (outdir / 'testpkg.subpkg.rst').exists()
|
||||
|
||||
@ -525,12 +525,12 @@ def test_package_file(tempdir):
|
||||
" :show-inheritance:\n")
|
||||
|
||||
|
||||
def test_package_file_separate(tempdir):
|
||||
outdir = path(tempdir)
|
||||
(outdir / 'testpkg').makedirs()
|
||||
def test_package_file_separate(tmp_path):
|
||||
outdir = tmp_path
|
||||
(outdir / 'testpkg').mkdir(parents=True, exist_ok=True)
|
||||
(outdir / 'testpkg' / '__init__.py').write_text('', encoding='utf8')
|
||||
(outdir / 'testpkg' / 'example.py').write_text('', encoding='utf8')
|
||||
apidoc_main(['--separate', '-o', tempdir, tempdir / 'testpkg'])
|
||||
apidoc_main(['--separate', '-o', str(tmp_path), str(tmp_path / 'testpkg')])
|
||||
assert (outdir / 'testpkg.rst').exists()
|
||||
assert (outdir / 'testpkg.example.rst').exists()
|
||||
|
||||
@ -564,12 +564,12 @@ def test_package_file_separate(tempdir):
|
||||
" :show-inheritance:\n")
|
||||
|
||||
|
||||
def test_package_file_module_first(tempdir):
|
||||
outdir = path(tempdir)
|
||||
(outdir / 'testpkg').makedirs()
|
||||
def test_package_file_module_first(tmp_path):
|
||||
outdir = tmp_path
|
||||
(outdir / 'testpkg').mkdir(parents=True, exist_ok=True)
|
||||
(outdir / 'testpkg' / '__init__.py').write_text('', encoding='utf8')
|
||||
(outdir / 'testpkg' / 'example.py').write_text('', encoding='utf8')
|
||||
apidoc_main(['--module-first', '-o', tempdir, tempdir])
|
||||
apidoc_main(['--module-first', '-o', str(tmp_path), str(tmp_path)])
|
||||
|
||||
content = (outdir / 'testpkg.rst').read_text(encoding='utf8')
|
||||
assert content == ("testpkg package\n"
|
||||
@ -592,11 +592,11 @@ def test_package_file_module_first(tempdir):
|
||||
" :show-inheritance:\n")
|
||||
|
||||
|
||||
def test_package_file_without_submodules(tempdir):
|
||||
outdir = path(tempdir)
|
||||
(outdir / 'testpkg').makedirs()
|
||||
def test_package_file_without_submodules(tmp_path):
|
||||
outdir = tmp_path
|
||||
(outdir / 'testpkg').mkdir(parents=True, exist_ok=True)
|
||||
(outdir / 'testpkg' / '__init__.py').write_text('', encoding='utf8')
|
||||
apidoc_main(['-o', tempdir, tempdir / 'testpkg'])
|
||||
apidoc_main(['-o', str(tmp_path), str(tmp_path / 'testpkg')])
|
||||
assert (outdir / 'testpkg.rst').exists()
|
||||
|
||||
content = (outdir / 'testpkg.rst').read_text(encoding='utf8')
|
||||
@ -612,11 +612,11 @@ def test_package_file_without_submodules(tempdir):
|
||||
" :show-inheritance:\n")
|
||||
|
||||
|
||||
def test_namespace_package_file(tempdir):
|
||||
outdir = path(tempdir)
|
||||
(outdir / 'testpkg').makedirs()
|
||||
def test_namespace_package_file(tmp_path):
|
||||
outdir = tmp_path
|
||||
(outdir / 'testpkg').mkdir(parents=True, exist_ok=True)
|
||||
(outdir / 'testpkg' / 'example.py').write_text('', encoding='utf8')
|
||||
apidoc_main(['--implicit-namespace', '-o', tempdir, tempdir / 'testpkg'])
|
||||
apidoc_main(['--implicit-namespace', '-o', str(tmp_path), str(tmp_path / 'testpkg')])
|
||||
assert (outdir / 'testpkg.rst').exists()
|
||||
|
||||
content = (outdir / 'testpkg.rst').read_text(encoding='utf8')
|
||||
@ -637,7 +637,7 @@ def test_namespace_package_file(tempdir):
|
||||
" :show-inheritance:\n")
|
||||
|
||||
|
||||
def test_no_duplicates(rootdir, tempdir):
|
||||
def test_no_duplicates(rootdir, tmp_path):
|
||||
"""Make sure that a ".pyx" and ".so" don't cause duplicate listings.
|
||||
|
||||
We can't use pytest.mark.apidoc here as we use a different set of arguments
|
||||
@ -650,11 +650,11 @@ def test_no_duplicates(rootdir, tempdir):
|
||||
sphinx.ext.apidoc.PY_SUFFIXES += ('.so',)
|
||||
|
||||
package = rootdir / 'test-apidoc-duplicates' / 'fish_licence'
|
||||
outdir = tempdir / 'out'
|
||||
apidoc_main(['-o', outdir, "-T", package, "--implicit-namespaces"])
|
||||
outdir = tmp_path / 'out'
|
||||
apidoc_main(['-o', str(outdir), "-T", str(package), "--implicit-namespaces"])
|
||||
|
||||
# Ensure the module has been documented
|
||||
assert (outdir / 'fish_licence.rst').isfile()
|
||||
assert os.path.isfile(outdir / 'fish_licence.rst')
|
||||
|
||||
# Ensure the submodule only appears once
|
||||
text = (outdir / 'fish_licence.rst').read_text(encoding="utf-8")
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
import sys
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
@ -199,7 +200,7 @@ def str_content(elem):
|
||||
def test_escaping(app, status, warning):
|
||||
app.builder.build_all()
|
||||
|
||||
outdir = app.builder.outdir
|
||||
outdir = Path(app.builder.outdir)
|
||||
|
||||
docpage = outdir / 'underscore_module_.xml'
|
||||
assert docpage.exists()
|
||||
@ -452,7 +453,7 @@ def test_autosummary_generate_overwrite1(app_params, make_app):
|
||||
args, kwargs = app_params
|
||||
srcdir = kwargs.get('srcdir')
|
||||
|
||||
(srcdir / 'generated').makedirs(exist_ok=True)
|
||||
(srcdir / 'generated').mkdir(parents=True, exist_ok=True)
|
||||
(srcdir / 'generated' / 'autosummary_dummy_module.rst').write_text('', encoding='utf8')
|
||||
|
||||
app = make_app(*args, **kwargs)
|
||||
@ -467,7 +468,7 @@ def test_autosummary_generate_overwrite2(app_params, make_app):
|
||||
args, kwargs = app_params
|
||||
srcdir = kwargs.get('srcdir')
|
||||
|
||||
(srcdir / 'generated').makedirs(exist_ok=True)
|
||||
(srcdir / 'generated').mkdir(parents=True, exist_ok=True)
|
||||
(srcdir / 'generated' / 'autosummary_dummy_module.rst').write_text('', encoding='utf8')
|
||||
|
||||
app = make_app(*args, **kwargs)
|
||||
@ -669,8 +670,8 @@ def test_invalid_autosummary_generate(app, status, warning):
|
||||
assert 'WARNING: autosummary_generate: file not found: unknown.rst' in warning.getvalue()
|
||||
|
||||
|
||||
def test_autogen(rootdir, tempdir):
|
||||
def test_autogen(rootdir, tmp_path):
|
||||
with chdir(rootdir / 'test-templating'):
|
||||
args = ['-o', tempdir, '-t', '.', 'autosummary_templating.txt']
|
||||
args = ['-o', str(tmp_path), '-t', '.', 'autosummary_templating.txt']
|
||||
autogen_main(args)
|
||||
assert (tempdir / 'sphinx.application.TemplateBridge.rst').exists()
|
||||
assert (tmp_path / 'sphinx.application.TemplateBridge.rst').exists()
|
||||
|
@ -214,7 +214,7 @@ def test_import_classes(rootdir):
|
||||
from sphinx.util.i18n import CatalogInfo
|
||||
|
||||
try:
|
||||
sys.path.append(rootdir / 'test-ext-inheritance_diagram')
|
||||
sys.path.append(str(rootdir / 'test-ext-inheritance_diagram'))
|
||||
from example.sphinx import DummyClass
|
||||
|
||||
# got exception for unknown class or module
|
||||
|
@ -91,14 +91,14 @@ def test_fetch_inventory_redirection(_read_from_url, InventoryFile, app, status,
|
||||
|
||||
|
||||
@pytest.mark.xfail(os.name != 'posix', reason="Path separator mismatch issue")
|
||||
def test_missing_reference(tempdir, app, status, warning):
|
||||
inv_file = tempdir / 'inventory'
|
||||
def test_missing_reference(tmp_path, app, status, warning):
|
||||
inv_file = tmp_path / 'inventory'
|
||||
inv_file.write_bytes(inventory_v2)
|
||||
set_config(app, {
|
||||
'https://docs.python.org/': inv_file,
|
||||
'py3k': ('https://docs.python.org/py3k/', inv_file),
|
||||
'py3krel': ('py3k', inv_file), # relative path
|
||||
'py3krelparent': ('../../py3k', inv_file), # relative path, parent dir
|
||||
'https://docs.python.org/': str(inv_file),
|
||||
'py3k': ('https://docs.python.org/py3k/', str(inv_file)),
|
||||
'py3krel': ('py3k', str(inv_file)), # relative path
|
||||
'py3krelparent': ('../../py3k', str(inv_file)), # relative path, parent dir
|
||||
})
|
||||
|
||||
# load the inventory and check if it's done correctly
|
||||
@ -169,11 +169,11 @@ def test_missing_reference(tempdir, app, status, warning):
|
||||
assert rn['refuri'] == 'https://docs.python.org/docname.html'
|
||||
|
||||
|
||||
def test_missing_reference_pydomain(tempdir, app, status, warning):
|
||||
inv_file = tempdir / 'inventory'
|
||||
def test_missing_reference_pydomain(tmp_path, app, status, warning):
|
||||
inv_file = tmp_path / 'inventory'
|
||||
inv_file.write_bytes(inventory_v2)
|
||||
set_config(app, {
|
||||
'https://docs.python.org/': inv_file,
|
||||
'https://docs.python.org/': str(inv_file),
|
||||
})
|
||||
|
||||
# load the inventory and check if it's done correctly
|
||||
@ -209,11 +209,11 @@ def test_missing_reference_pydomain(tempdir, app, status, warning):
|
||||
assert rn.astext() == 'A TERM'
|
||||
|
||||
|
||||
def test_missing_reference_stddomain(tempdir, app, status, warning):
|
||||
inv_file = tempdir / 'inventory'
|
||||
def test_missing_reference_stddomain(tmp_path, app, status, warning):
|
||||
inv_file = tmp_path / 'inventory'
|
||||
inv_file.write_bytes(inventory_v2)
|
||||
set_config(app, {
|
||||
'cmd': ('https://docs.python.org/', inv_file),
|
||||
'cmd': ('https://docs.python.org/', str(inv_file)),
|
||||
})
|
||||
|
||||
# load the inventory and check if it's done correctly
|
||||
@ -240,11 +240,11 @@ def test_missing_reference_stddomain(tempdir, app, status, warning):
|
||||
|
||||
|
||||
@pytest.mark.sphinx('html', testroot='ext-intersphinx-cppdomain')
|
||||
def test_missing_reference_cppdomain(tempdir, app, status, warning):
|
||||
inv_file = tempdir / 'inventory'
|
||||
def test_missing_reference_cppdomain(tmp_path, app, status, warning):
|
||||
inv_file = tmp_path / 'inventory'
|
||||
inv_file.write_bytes(inventory_v2)
|
||||
set_config(app, {
|
||||
'https://docs.python.org/': inv_file,
|
||||
'https://docs.python.org/': str(inv_file),
|
||||
})
|
||||
|
||||
# load the inventory and check if it's done correctly
|
||||
@ -266,11 +266,11 @@ def test_missing_reference_cppdomain(tempdir, app, status, warning):
|
||||
' title="(in foo v2.0)"><span class="n"><span class="pre">bartype</span></span></a>' in html)
|
||||
|
||||
|
||||
def test_missing_reference_jsdomain(tempdir, app, status, warning):
|
||||
inv_file = tempdir / 'inventory'
|
||||
def test_missing_reference_jsdomain(tmp_path, app, status, warning):
|
||||
inv_file = tmp_path / 'inventory'
|
||||
inv_file.write_bytes(inventory_v2)
|
||||
set_config(app, {
|
||||
'https://docs.python.org/': inv_file,
|
||||
'https://docs.python.org/': str(inv_file),
|
||||
})
|
||||
|
||||
# load the inventory and check if it's done correctly
|
||||
@ -290,11 +290,11 @@ def test_missing_reference_jsdomain(tempdir, app, status, warning):
|
||||
assert rn.astext() == 'baz()'
|
||||
|
||||
|
||||
def test_missing_reference_disabled_domain(tempdir, app, status, warning):
|
||||
inv_file = tempdir / 'inventory'
|
||||
def test_missing_reference_disabled_domain(tmp_path, app, status, warning):
|
||||
inv_file = tmp_path / 'inventory'
|
||||
inv_file.write_bytes(inventory_v2)
|
||||
set_config(app, {
|
||||
'inv': ('https://docs.python.org/', inv_file),
|
||||
'inv': ('https://docs.python.org/', str(inv_file)),
|
||||
})
|
||||
|
||||
# load the inventory and check if it's done correctly
|
||||
@ -353,11 +353,11 @@ def test_missing_reference_disabled_domain(tempdir, app, status, warning):
|
||||
|
||||
|
||||
@pytest.mark.xfail(os.name != 'posix', reason="Path separator mismatch issue")
|
||||
def test_inventory_not_having_version(tempdir, app, status, warning):
|
||||
inv_file = tempdir / 'inventory'
|
||||
def test_inventory_not_having_version(tmp_path, app, status, warning):
|
||||
inv_file = tmp_path / 'inventory'
|
||||
inv_file.write_bytes(inventory_v2_not_having_version)
|
||||
set_config(app, {
|
||||
'https://docs.python.org/': inv_file,
|
||||
'https://docs.python.org/': str(inv_file),
|
||||
})
|
||||
|
||||
# load the inventory and check if it's done correctly
|
||||
@ -371,20 +371,20 @@ def test_inventory_not_having_version(tempdir, app, status, warning):
|
||||
assert rn[0].astext() == 'Long Module desc'
|
||||
|
||||
|
||||
def test_load_mappings_warnings(tempdir, app, status, warning):
|
||||
def test_load_mappings_warnings(tmp_path, app, status, warning):
|
||||
"""
|
||||
load_mappings issues a warning if new-style mapping
|
||||
identifiers are not string
|
||||
"""
|
||||
inv_file = tempdir / 'inventory'
|
||||
inv_file = tmp_path / 'inventory'
|
||||
inv_file.write_bytes(inventory_v2)
|
||||
set_config(app, {
|
||||
'https://docs.python.org/': inv_file,
|
||||
'py3k': ('https://docs.python.org/py3k/', inv_file),
|
||||
'repoze.workflow': ('http://docs.repoze.org/workflow/', inv_file),
|
||||
'https://docs.python.org/': str(inv_file),
|
||||
'py3k': ('https://docs.python.org/py3k/', str(inv_file)),
|
||||
'repoze.workflow': ('http://docs.repoze.org/workflow/', str(inv_file)),
|
||||
'django-taggit': ('http://django-taggit.readthedocs.org/en/latest/',
|
||||
inv_file),
|
||||
12345: ('http://www.sphinx-doc.org/en/stable/', inv_file),
|
||||
str(inv_file)),
|
||||
12345: ('http://www.sphinx-doc.org/en/stable/', str(inv_file)),
|
||||
})
|
||||
|
||||
# load the inventory and check if it's done correctly
|
||||
@ -396,8 +396,8 @@ def test_load_mappings_warnings(tempdir, app, status, warning):
|
||||
assert 'intersphinx identifier 12345 is not string. Ignored' in warnings[1]
|
||||
|
||||
|
||||
def test_load_mappings_fallback(tempdir, app, status, warning):
|
||||
inv_file = tempdir / 'inventory'
|
||||
def test_load_mappings_fallback(tmp_path, app, status, warning):
|
||||
inv_file = tmp_path / 'inventory'
|
||||
inv_file.write_bytes(inventory_v2)
|
||||
set_config(app, {})
|
||||
|
||||
@ -419,7 +419,7 @@ def test_load_mappings_fallback(tempdir, app, status, warning):
|
||||
# add fallbacks to mapping
|
||||
app.config.intersphinx_mapping = {
|
||||
'fallback': ('https://docs.python.org/py3k/', ('/invalid/inventory/path',
|
||||
inv_file)),
|
||||
str(inv_file))),
|
||||
}
|
||||
normalize_intersphinx_mapping(app, app.config)
|
||||
load_mappings(app)
|
||||
@ -493,9 +493,9 @@ def test_inspect_main_noargs(capsys):
|
||||
assert stderr == expected + "\n"
|
||||
|
||||
|
||||
def test_inspect_main_file(capsys, tempdir):
|
||||
def test_inspect_main_file(capsys, tmp_path):
|
||||
"""inspect_main interface, with file argument"""
|
||||
inv_file = tempdir / 'inventory'
|
||||
inv_file = tmp_path / 'inventory'
|
||||
inv_file.write_bytes(inventory_v2)
|
||||
|
||||
inspect_main([str(inv_file)])
|
||||
@ -532,7 +532,7 @@ def test_intersphinx_role(app, warning):
|
||||
inv_file = app.srcdir / 'inventory'
|
||||
inv_file.write_bytes(inventory_v2)
|
||||
app.config.intersphinx_mapping = {
|
||||
'inv': ('http://example.org/', inv_file),
|
||||
'inv': ('http://example.org/', str(inv_file)),
|
||||
}
|
||||
app.config.intersphinx_cache_limit = 0
|
||||
app.config.nitpicky = True
|
||||
|
@ -4,8 +4,11 @@ Runs the text builder in the test root.
|
||||
"""
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
import pygments
|
||||
import pytest
|
||||
@ -20,7 +23,6 @@ from sphinx.testing.util import (
|
||||
assert_re_search,
|
||||
assert_startswith,
|
||||
etree_parse,
|
||||
path,
|
||||
strip_escseq,
|
||||
)
|
||||
from sphinx.util.nodes import NodeMatcher
|
||||
@ -35,28 +37,29 @@ sphinx_intl = pytest.mark.sphinx(
|
||||
|
||||
|
||||
def read_po(pathname):
|
||||
with pathname.open(encoding='utf-8') as f:
|
||||
with open(pathname, encoding='utf-8') as f:
|
||||
return pofile.read_po(f)
|
||||
|
||||
|
||||
def write_mo(pathname, po):
|
||||
with pathname.open('wb') as f:
|
||||
with open(pathname, 'wb') as f:
|
||||
return mofile.write_mo(f, po)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _setup_intl(app_params):
|
||||
srcdir = path(app_params.kwargs['srcdir'])
|
||||
assert isinstance(app_params.kwargs['srcdir'], Path)
|
||||
srcdir = app_params.kwargs['srcdir']
|
||||
for dirpath, _dirs, files in os.walk(srcdir):
|
||||
dirpath = path(dirpath)
|
||||
dirpath = Path(dirpath)
|
||||
for f in [f for f in files if f.endswith('.po')]:
|
||||
po = dirpath / f
|
||||
po = str(dirpath / f)
|
||||
mo = srcdir / 'xx' / 'LC_MESSAGES' / (
|
||||
os.path.relpath(po[:-3], srcdir) + '.mo')
|
||||
if not mo.parent.exists():
|
||||
mo.parent.makedirs()
|
||||
mo.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if not mo.exists() or mo.stat().st_mtime < po.stat().st_mtime:
|
||||
if not mo.exists() or os.stat(mo).st_mtime < os.stat(po).st_mtime:
|
||||
# compile .mo file only if needed
|
||||
write_mo(mo, read_po(po))
|
||||
|
||||
@ -697,13 +700,13 @@ def test_gettext_dont_rebuild_mo(make_app, app_params):
|
||||
# When rewriting the timestamp of mo file, the number of documents to be
|
||||
# updated will be changed.
|
||||
mtime = (app0.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').stat().st_mtime
|
||||
(app0.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').utime((mtime + 5, mtime + 5))
|
||||
os.utime(app0.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo', (mtime + 5, mtime + 5))
|
||||
update_targets = get_update_targets(app0)
|
||||
assert update_targets[1] == {'bom'}, update_targets
|
||||
|
||||
# Because doctree for gettext builder can not be shared with other builders,
|
||||
# erase doctreedir before gettext build.
|
||||
app0.doctreedir.rmtree()
|
||||
shutil.rmtree(app0.doctreedir)
|
||||
|
||||
# phase2: build document with gettext builder.
|
||||
# The mo file in the srcdir directory is retained.
|
||||
@ -715,7 +718,7 @@ def test_gettext_dont_rebuild_mo(make_app, app_params):
|
||||
assert update_targets[1] == set(), update_targets
|
||||
# Even if the timestamp of the mo file is updated, the number of documents
|
||||
# to be updated is 0. gettext builder does not rebuild because of mo update.
|
||||
(app0.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').utime((mtime + 10, mtime + 10))
|
||||
os.utime(app0.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo', (mtime + 10, mtime + 10))
|
||||
update_targets = get_update_targets(app)
|
||||
assert update_targets[1] == set(), update_targets
|
||||
|
||||
@ -867,7 +870,7 @@ def test_html_rebuild_mo(app):
|
||||
assert len(updated) == 0
|
||||
|
||||
mtime = (app.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').stat().st_mtime
|
||||
(app.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').utime((mtime + 5, mtime + 5))
|
||||
os.utime(app.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo', (mtime + 5, mtime + 5))
|
||||
app.env.find_files(app.config, app.builder)
|
||||
_, updated, _ = app.env.get_outdated_files(config_changed=False)
|
||||
assert len(updated) == 1
|
||||
@ -1401,7 +1404,7 @@ def getwarning(warnings):
|
||||
})
|
||||
def test_gettext_allow_fuzzy_translations(app):
|
||||
locale_dir = app.srcdir / 'locales' / 'de' / 'LC_MESSAGES'
|
||||
locale_dir.makedirs()
|
||||
locale_dir.mkdir(parents=True, exist_ok=True)
|
||||
with (locale_dir / 'index.po').open('wb') as f:
|
||||
catalog = Catalog()
|
||||
catalog.add('features', 'FEATURES', flags=('fuzzy',))
|
||||
@ -1420,7 +1423,7 @@ def test_gettext_allow_fuzzy_translations(app):
|
||||
})
|
||||
def test_gettext_disallow_fuzzy_translations(app):
|
||||
locale_dir = app.srcdir / 'locales' / 'de' / 'LC_MESSAGES'
|
||||
locale_dir.makedirs()
|
||||
locale_dir.mkdir(parents=True, exist_ok=True)
|
||||
with (locale_dir / 'index.po').open('wb') as f:
|
||||
catalog = Catalog()
|
||||
catalog.add('features', 'FEATURES', flags=('fuzzy',))
|
||||
@ -1439,7 +1442,7 @@ def test_customize_system_message(make_app, app_params, sphinx_test_tempdir):
|
||||
|
||||
# prepare message catalog (.po)
|
||||
locale_dir = sphinx_test_tempdir / 'basic' / 'locales' / 'de' / 'LC_MESSAGES'
|
||||
locale_dir.makedirs()
|
||||
locale_dir.mkdir(parents=True, exist_ok=True)
|
||||
with (locale_dir / 'sphinx.po').open('wb') as f:
|
||||
catalog = Catalog()
|
||||
catalog.add('Quick search', 'QUICK SEARCH')
|
||||
|
@ -6,7 +6,7 @@ from sphinx.project import Project
|
||||
|
||||
|
||||
def test_project_discover(rootdir):
|
||||
project = Project(rootdir / 'test-root', {})
|
||||
project = Project(str(rootdir / 'test-root'), {})
|
||||
|
||||
docnames = {'autodoc', 'bom', 'extapi', 'extensions', 'footnote', 'images',
|
||||
'includes', 'index', 'lists', 'markup', 'math', 'objects',
|
||||
@ -48,7 +48,7 @@ def test_project_path2doc(app):
|
||||
assert project.path2doc('index.foo.rst') == 'index.foo'
|
||||
assert project.path2doc('index') is None
|
||||
assert project.path2doc('path/to/index.rst') == 'path/to/index'
|
||||
assert project.path2doc(app.srcdir / 'to/index.rst') == 'to/index'
|
||||
assert project.path2doc(str(app.srcdir / 'to/index.rst')) == 'to/index'
|
||||
|
||||
|
||||
@pytest.mark.sphinx(srcdir='project_doc2path', testroot='basic')
|
||||
@ -56,17 +56,17 @@ def test_project_doc2path(app):
|
||||
source_suffix = {'.rst': 'restructuredtext', '.txt': 'restructuredtext'}
|
||||
|
||||
project = Project(app.srcdir, source_suffix)
|
||||
assert project.doc2path('index') == (app.srcdir / 'index.rst')
|
||||
assert project.doc2path('index') == str(app.srcdir / 'index.rst')
|
||||
|
||||
# first source_suffix is used for missing file
|
||||
assert project.doc2path('foo') == (app.srcdir / 'foo.rst')
|
||||
assert project.doc2path('foo') == str(app.srcdir / 'foo.rst')
|
||||
|
||||
# matched source_suffix is used if exists
|
||||
(app.srcdir / 'foo.txt').write_text('', encoding='utf8')
|
||||
assert project.doc2path('foo') == (app.srcdir / 'foo.txt')
|
||||
assert project.doc2path('foo') == str(app.srcdir / 'foo.txt')
|
||||
|
||||
# absolute path
|
||||
assert project.doc2path('index', basedir=True) == (app.srcdir / 'index.rst')
|
||||
assert project.doc2path('index', basedir=True) == str(app.srcdir / 'index.rst')
|
||||
|
||||
# relative path
|
||||
assert project.doc2path('index', basedir=False) == 'index.rst'
|
||||
|
@ -40,7 +40,7 @@ def test_ModuleAnalyzer_for_module(rootdir):
|
||||
assert analyzer.srcname in (SPHINX_MODULE_PATH,
|
||||
os.path.abspath(SPHINX_MODULE_PATH))
|
||||
|
||||
path = rootdir / 'test-pycode'
|
||||
path = str(rootdir / 'test-pycode')
|
||||
sys.path.insert(0, path)
|
||||
try:
|
||||
analyzer = ModuleAnalyzer.for_module('cp_1251_coded')
|
||||
|
@ -85,9 +85,9 @@ def test_do_prompt_with_nonascii():
|
||||
assert result == '\u30c9\u30a4\u30c4'
|
||||
|
||||
|
||||
def test_quickstart_defaults(tempdir):
|
||||
def test_quickstart_defaults(tmp_path):
|
||||
answers = {
|
||||
'Root path': tempdir,
|
||||
'Root path': str(tmp_path),
|
||||
'Project name': 'Sphinx Test',
|
||||
'Author name': 'Georg Brandl',
|
||||
'Project version': '0.1',
|
||||
@ -97,8 +97,8 @@ def test_quickstart_defaults(tempdir):
|
||||
qs.ask_user(d)
|
||||
qs.generate(d)
|
||||
|
||||
conffile = tempdir / 'conf.py'
|
||||
assert conffile.isfile()
|
||||
conffile = tmp_path / 'conf.py'
|
||||
assert conffile.is_file()
|
||||
ns = {}
|
||||
exec(conffile.read_text(encoding='utf8'), ns) # NoQA: S102
|
||||
assert ns['extensions'] == []
|
||||
@ -109,16 +109,16 @@ def test_quickstart_defaults(tempdir):
|
||||
assert ns['release'] == '0.1'
|
||||
assert ns['html_static_path'] == ['_static']
|
||||
|
||||
assert (tempdir / '_static').isdir()
|
||||
assert (tempdir / '_templates').isdir()
|
||||
assert (tempdir / 'index.rst').isfile()
|
||||
assert (tempdir / 'Makefile').isfile()
|
||||
assert (tempdir / 'make.bat').isfile()
|
||||
assert (tmp_path / '_static').is_dir()
|
||||
assert (tmp_path / '_templates').is_dir()
|
||||
assert (tmp_path / 'index.rst').is_file()
|
||||
assert (tmp_path / 'Makefile').is_file()
|
||||
assert (tmp_path / 'make.bat').is_file()
|
||||
|
||||
|
||||
def test_quickstart_all_answers(tempdir):
|
||||
def test_quickstart_all_answers(tmp_path):
|
||||
answers = {
|
||||
'Root path': tempdir,
|
||||
'Root path': str(tmp_path),
|
||||
'Separate source and build': 'y',
|
||||
'Name prefix for templates': '.',
|
||||
'Project name': 'STASI™',
|
||||
@ -147,8 +147,8 @@ def test_quickstart_all_answers(tempdir):
|
||||
qs.ask_user(d)
|
||||
qs.generate(d)
|
||||
|
||||
conffile = tempdir / 'source' / 'conf.py'
|
||||
assert conffile.isfile()
|
||||
conffile = tmp_path / 'source' / 'conf.py'
|
||||
assert conffile.is_file()
|
||||
ns = {}
|
||||
exec(conffile.read_text(encoding='utf8'), ns) # NoQA: S102
|
||||
assert ns['extensions'] == [
|
||||
@ -165,15 +165,15 @@ def test_quickstart_all_answers(tempdir):
|
||||
assert ns['todo_include_todos'] is True
|
||||
assert ns['html_static_path'] == ['.static']
|
||||
|
||||
assert (tempdir / 'build').isdir()
|
||||
assert (tempdir / 'source' / '.static').isdir()
|
||||
assert (tempdir / 'source' / '.templates').isdir()
|
||||
assert (tempdir / 'source' / 'contents.txt').isfile()
|
||||
assert (tmp_path / 'build').is_dir()
|
||||
assert (tmp_path / 'source' / '.static').is_dir()
|
||||
assert (tmp_path / 'source' / '.templates').is_dir()
|
||||
assert (tmp_path / 'source' / 'contents.txt').is_file()
|
||||
|
||||
|
||||
def test_generated_files_eol(tempdir):
|
||||
def test_generated_files_eol(tmp_path):
|
||||
answers = {
|
||||
'Root path': tempdir,
|
||||
'Root path': str(tmp_path),
|
||||
'Project name': 'Sphinx Test',
|
||||
'Author name': 'Georg Brandl',
|
||||
'Project version': '0.1',
|
||||
@ -187,13 +187,13 @@ def test_generated_files_eol(tempdir):
|
||||
content = filename.read_bytes().decode()
|
||||
assert all(l[-len(eol):] == eol for l in content.splitlines(keepends=True))
|
||||
|
||||
assert_eol(tempdir / 'make.bat', '\r\n')
|
||||
assert_eol(tempdir / 'Makefile', '\n')
|
||||
assert_eol(tmp_path / 'make.bat', '\r\n')
|
||||
assert_eol(tmp_path / 'Makefile', '\n')
|
||||
|
||||
|
||||
def test_quickstart_and_build(tempdir):
|
||||
def test_quickstart_and_build(tmp_path):
|
||||
answers = {
|
||||
'Root path': tempdir,
|
||||
'Root path': str(tmp_path),
|
||||
'Project name': 'Fullwidth characters: \u30c9\u30a4\u30c4',
|
||||
'Author name': 'Georg Brandl',
|
||||
'Project version': '0.1',
|
||||
@ -204,10 +204,10 @@ def test_quickstart_and_build(tempdir):
|
||||
qs.generate(d)
|
||||
|
||||
app = application.Sphinx(
|
||||
tempdir, # srcdir
|
||||
tempdir, # confdir
|
||||
(tempdir / '_build' / 'html'), # outdir
|
||||
(tempdir / '_build' / '.doctree'), # doctreedir
|
||||
tmp_path, # srcdir
|
||||
tmp_path, # confdir
|
||||
(tmp_path / '_build' / 'html'), # outdir
|
||||
(tmp_path / '_build' / '.doctree'), # doctreedir
|
||||
'html', # buildername
|
||||
status=StringIO(),
|
||||
warning=warnfile)
|
||||
@ -216,9 +216,9 @@ def test_quickstart_and_build(tempdir):
|
||||
assert not warnings
|
||||
|
||||
|
||||
def test_default_filename(tempdir):
|
||||
def test_default_filename(tmp_path):
|
||||
answers = {
|
||||
'Root path': tempdir,
|
||||
'Root path': str(tmp_path),
|
||||
'Project name': '\u30c9\u30a4\u30c4', # Fullwidth characters only
|
||||
'Author name': 'Georg Brandl',
|
||||
'Project version': '0.1',
|
||||
@ -228,25 +228,25 @@ def test_default_filename(tempdir):
|
||||
qs.ask_user(d)
|
||||
qs.generate(d)
|
||||
|
||||
conffile = tempdir / 'conf.py'
|
||||
assert conffile.isfile()
|
||||
conffile = tmp_path / 'conf.py'
|
||||
assert conffile.is_file()
|
||||
ns = {}
|
||||
exec(conffile.read_text(encoding='utf8'), ns) # NoQA: S102
|
||||
|
||||
|
||||
def test_extensions(tempdir):
|
||||
def test_extensions(tmp_path):
|
||||
qs.main(['-q', '-p', 'project_name', '-a', 'author',
|
||||
'--extensions', 'foo,bar,baz', tempdir])
|
||||
'--extensions', 'foo,bar,baz', str(tmp_path)])
|
||||
|
||||
conffile = tempdir / 'conf.py'
|
||||
assert conffile.isfile()
|
||||
conffile = tmp_path / 'conf.py'
|
||||
assert conffile.is_file()
|
||||
ns = {}
|
||||
exec(conffile.read_text(encoding='utf8'), ns) # NoQA: S102
|
||||
assert ns['extensions'] == ['foo', 'bar', 'baz']
|
||||
|
||||
|
||||
def test_exits_when_existing_confpy(monkeypatch):
|
||||
# The code detects existing conf.py with path.isfile()
|
||||
# The code detects existing conf.py with path.is_file()
|
||||
# so we mock it as True with pytest's monkeypatch
|
||||
def mock_isfile(path):
|
||||
return True
|
||||
|
@ -27,9 +27,9 @@ def test_theme_api(app, status, warning):
|
||||
|
||||
# test Theme class API
|
||||
assert set(app.registry.html_themes.keys()) == set(themes)
|
||||
assert app.registry.html_themes['test-theme'] == app.srcdir / 'test_theme' / 'test-theme'
|
||||
assert app.registry.html_themes['ziptheme'] == app.srcdir / 'ziptheme.zip'
|
||||
assert app.registry.html_themes['staticfiles'] == app.srcdir / 'test_theme' / 'staticfiles'
|
||||
assert app.registry.html_themes['test-theme'] == str(app.srcdir / 'test_theme' / 'test-theme')
|
||||
assert app.registry.html_themes['ziptheme'] == str(app.srcdir / 'ziptheme.zip')
|
||||
assert app.registry.html_themes['staticfiles'] == str(app.srcdir / 'test_theme' / 'staticfiles')
|
||||
|
||||
# test Theme instance API
|
||||
theme = app.builder.theme
|
||||
|
@ -33,10 +33,6 @@ def test_ensuredir():
|
||||
ensuredir(path)
|
||||
assert os.path.isdir(path)
|
||||
|
||||
with tempfile.NamedTemporaryFile() as tmp:
|
||||
with pytest.raises(OSError):
|
||||
ensuredir(tmp.name)
|
||||
|
||||
|
||||
def test_import_object():
|
||||
module = import_object('sphinx')
|
||||
|
@ -15,42 +15,42 @@ class DummyTemplateLoader(BuiltinTemplateLoader):
|
||||
self.init(builder)
|
||||
|
||||
|
||||
def test_copy_asset_file(tempdir):
|
||||
def test_copy_asset_file(tmp_path):
|
||||
renderer = DummyTemplateLoader()
|
||||
|
||||
# copy normal file
|
||||
src = (tempdir / 'asset.txt')
|
||||
src = (tmp_path / 'asset.txt')
|
||||
src.write_text('# test data')
|
||||
dest = (tempdir / 'output.txt')
|
||||
dest = (tmp_path / 'output.txt')
|
||||
|
||||
copy_asset_file(src, dest)
|
||||
assert dest.exists()
|
||||
assert src.read_text(encoding='utf8') == dest.read_text(encoding='utf8')
|
||||
|
||||
# copy template file
|
||||
src = (tempdir / 'asset.txt_t')
|
||||
src = (tmp_path / 'asset.txt_t')
|
||||
src.write_text('# {{var1}} data')
|
||||
dest = (tempdir / 'output.txt_t')
|
||||
dest = (tmp_path / 'output.txt_t')
|
||||
|
||||
copy_asset_file(src, dest, {'var1': 'template'}, renderer)
|
||||
copy_asset_file(str(src), str(dest), {'var1': 'template'}, renderer)
|
||||
assert not dest.exists()
|
||||
assert (tempdir / 'output.txt').exists()
|
||||
assert (tempdir / 'output.txt').read_text(encoding='utf8') == '# template data'
|
||||
assert (tmp_path / 'output.txt').exists()
|
||||
assert (tmp_path / 'output.txt').read_text(encoding='utf8') == '# template data'
|
||||
|
||||
# copy template file to subdir
|
||||
src = (tempdir / 'asset.txt_t')
|
||||
src = (tmp_path / 'asset.txt_t')
|
||||
src.write_text('# {{var1}} data')
|
||||
subdir1 = (tempdir / 'subdir')
|
||||
subdir1.makedirs()
|
||||
subdir1 = (tmp_path / 'subdir')
|
||||
subdir1.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
copy_asset_file(src, subdir1, {'var1': 'template'}, renderer)
|
||||
assert (subdir1 / 'asset.txt').exists()
|
||||
assert (subdir1 / 'asset.txt').read_text(encoding='utf8') == '# template data'
|
||||
|
||||
# copy template file without context
|
||||
src = (tempdir / 'asset.txt_t')
|
||||
subdir2 = (tempdir / 'subdir2')
|
||||
subdir2.makedirs()
|
||||
src = (tmp_path / 'asset.txt_t')
|
||||
subdir2 = (tmp_path / 'subdir2')
|
||||
subdir2.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
copy_asset_file(src, subdir2)
|
||||
assert not (subdir2 / 'asset.txt').exists()
|
||||
@ -58,28 +58,28 @@ def test_copy_asset_file(tempdir):
|
||||
assert (subdir2 / 'asset.txt_t').read_text(encoding='utf8') == '# {{var1}} data'
|
||||
|
||||
|
||||
def test_copy_asset(tempdir):
|
||||
def test_copy_asset(tmp_path):
|
||||
renderer = DummyTemplateLoader()
|
||||
|
||||
# prepare source files
|
||||
source = (tempdir / 'source')
|
||||
source.makedirs()
|
||||
source = (tmp_path / 'source')
|
||||
source.mkdir(parents=True, exist_ok=True)
|
||||
(source / 'index.rst').write_text('index.rst', encoding='utf8')
|
||||
(source / 'foo.rst_t').write_text('{{var1}}.rst', encoding='utf8')
|
||||
(source / '_static').makedirs()
|
||||
(source / '_static').mkdir(parents=True, exist_ok=True)
|
||||
(source / '_static' / 'basic.css').write_text('basic.css', encoding='utf8')
|
||||
(source / '_templates').makedirs()
|
||||
(source / '_templates').mkdir(parents=True, exist_ok=True)
|
||||
(source / '_templates' / 'layout.html').write_text('layout.html', encoding='utf8')
|
||||
(source / '_templates' / 'sidebar.html_t').write_text('sidebar: {{var2}}', encoding='utf8')
|
||||
|
||||
# copy a single file
|
||||
assert not (tempdir / 'test1').exists()
|
||||
copy_asset(source / 'index.rst', tempdir / 'test1')
|
||||
assert (tempdir / 'test1').exists()
|
||||
assert (tempdir / 'test1/index.rst').exists()
|
||||
assert not (tmp_path / 'test1').exists()
|
||||
copy_asset(source / 'index.rst', tmp_path / 'test1')
|
||||
assert (tmp_path / 'test1').exists()
|
||||
assert (tmp_path / 'test1/index.rst').exists()
|
||||
|
||||
# copy directories
|
||||
destdir = tempdir / 'test2'
|
||||
destdir = tmp_path / 'test2'
|
||||
copy_asset(source, destdir, context={'var1': 'bar', 'var2': 'baz'}, renderer=renderer)
|
||||
assert (destdir / 'index.rst').exists()
|
||||
assert (destdir / 'foo.rst').exists()
|
||||
@ -93,7 +93,7 @@ def test_copy_asset(tempdir):
|
||||
def excluded(path):
|
||||
return ('sidebar.html' in path or 'basic.css' in path)
|
||||
|
||||
destdir = tempdir / 'test3'
|
||||
destdir = tmp_path / 'test3'
|
||||
copy_asset(source, destdir, excluded,
|
||||
context={'var1': 'bar', 'var2': 'baz'}, renderer=renderer)
|
||||
assert (destdir / 'index.rst').exists()
|
||||
|
@ -29,12 +29,12 @@ def test_catalog_info_for_sub_domain_file_and_path():
|
||||
assert cat.mo_path == os.path.join('path', 'sub/domain.mo')
|
||||
|
||||
|
||||
def test_catalog_outdated(tempdir):
|
||||
(tempdir / 'test.po').write_text('#', encoding='utf8')
|
||||
cat = i18n.CatalogInfo(tempdir, 'test', 'utf-8')
|
||||
def test_catalog_outdated(tmp_path):
|
||||
(tmp_path / 'test.po').write_text('#', encoding='utf8')
|
||||
cat = i18n.CatalogInfo(tmp_path, 'test', 'utf-8')
|
||||
assert cat.is_outdated() # if mo is not exist
|
||||
|
||||
mo_file = (tempdir / 'test.mo')
|
||||
mo_file = (tmp_path / 'test.mo')
|
||||
mo_file.write_text('#', encoding='utf8')
|
||||
assert not cat.is_outdated() # if mo is exist and newer than po
|
||||
|
||||
@ -42,9 +42,9 @@ def test_catalog_outdated(tempdir):
|
||||
assert cat.is_outdated() # if mo is exist and older than po
|
||||
|
||||
|
||||
def test_catalog_write_mo(tempdir):
|
||||
(tempdir / 'test.po').write_text('#', encoding='utf8')
|
||||
cat = i18n.CatalogInfo(tempdir, 'test', 'utf-8')
|
||||
def test_catalog_write_mo(tmp_path):
|
||||
(tmp_path / 'test.po').write_text('#', encoding='utf8')
|
||||
cat = i18n.CatalogInfo(tmp_path, 'test', 'utf-8')
|
||||
cat.write_mo('en')
|
||||
assert os.path.exists(cat.mo_path)
|
||||
with open(cat.mo_path, 'rb') as f:
|
||||
@ -147,45 +147,45 @@ def test_get_filename_for_language(app):
|
||||
'/subdir/en/foo.png')
|
||||
|
||||
|
||||
def test_CatalogRepository(tempdir):
|
||||
(tempdir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
|
||||
(tempdir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#', encoding='utf8')
|
||||
(tempdir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#', encoding='utf8')
|
||||
(tempdir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs()
|
||||
(tempdir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test3.po').write_text('#', encoding='utf8')
|
||||
(tempdir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#', encoding='utf8')
|
||||
(tempdir / 'loc1' / 'xx' / 'LC_MESSAGES' / '.dotdir').makedirs()
|
||||
(tempdir / 'loc1' / 'xx' / 'LC_MESSAGES' / '.dotdir' / 'test5.po').write_text('#', encoding='utf8')
|
||||
(tempdir / 'loc1' / 'yy' / 'LC_MESSAGES').makedirs()
|
||||
(tempdir / 'loc1' / 'yy' / 'LC_MESSAGES' / 'test6.po').write_text('#', encoding='utf8')
|
||||
(tempdir / 'loc2' / 'xx' / 'LC_MESSAGES').makedirs()
|
||||
(tempdir / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#', encoding='utf8')
|
||||
(tempdir / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test7.po').write_text('#', encoding='utf8')
|
||||
def test_CatalogRepository(tmp_path):
|
||||
(tmp_path / 'loc1' / 'xx' / 'LC_MESSAGES').mkdir(parents=True, exist_ok=True)
|
||||
(tmp_path / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#', encoding='utf8')
|
||||
(tmp_path / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#', encoding='utf8')
|
||||
(tmp_path / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').mkdir(parents=True, exist_ok=True)
|
||||
(tmp_path / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test3.po').write_text('#', encoding='utf8')
|
||||
(tmp_path / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#', encoding='utf8')
|
||||
(tmp_path / 'loc1' / 'xx' / 'LC_MESSAGES' / '.dotdir').mkdir(parents=True, exist_ok=True)
|
||||
(tmp_path / 'loc1' / 'xx' / 'LC_MESSAGES' / '.dotdir' / 'test5.po').write_text('#', encoding='utf8')
|
||||
(tmp_path / 'loc1' / 'yy' / 'LC_MESSAGES').mkdir(parents=True, exist_ok=True)
|
||||
(tmp_path / 'loc1' / 'yy' / 'LC_MESSAGES' / 'test6.po').write_text('#', encoding='utf8')
|
||||
(tmp_path / 'loc2' / 'xx' / 'LC_MESSAGES').mkdir(parents=True, exist_ok=True)
|
||||
(tmp_path / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#', encoding='utf8')
|
||||
(tmp_path / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test7.po').write_text('#', encoding='utf8')
|
||||
|
||||
# for language xx
|
||||
repo = i18n.CatalogRepository(tempdir, ['loc1', 'loc2'], 'xx', 'utf-8')
|
||||
assert list(repo.locale_dirs) == [str(tempdir / 'loc1'),
|
||||
str(tempdir / 'loc2')]
|
||||
repo = i18n.CatalogRepository(tmp_path, ['loc1', 'loc2'], 'xx', 'utf-8')
|
||||
assert list(repo.locale_dirs) == [str(tmp_path / 'loc1'),
|
||||
str(tmp_path / 'loc2')]
|
||||
assert all(isinstance(c, i18n.CatalogInfo) for c in repo.catalogs)
|
||||
assert sorted(c.domain for c in repo.catalogs) == ['sub/test3', 'sub/test4',
|
||||
'test1', 'test1', 'test2', 'test7']
|
||||
|
||||
# for language yy
|
||||
repo = i18n.CatalogRepository(tempdir, ['loc1', 'loc2'], 'yy', 'utf-8')
|
||||
repo = i18n.CatalogRepository(tmp_path, ['loc1', 'loc2'], 'yy', 'utf-8')
|
||||
assert sorted(c.domain for c in repo.catalogs) == ['test6']
|
||||
|
||||
# unknown languages
|
||||
repo = i18n.CatalogRepository(tempdir, ['loc1', 'loc2'], 'zz', 'utf-8')
|
||||
repo = i18n.CatalogRepository(tmp_path, ['loc1', 'loc2'], 'zz', 'utf-8')
|
||||
assert sorted(c.domain for c in repo.catalogs) == []
|
||||
|
||||
# no languages
|
||||
repo = i18n.CatalogRepository(tempdir, ['loc1', 'loc2'], None, 'utf-8')
|
||||
repo = i18n.CatalogRepository(tmp_path, ['loc1', 'loc2'], None, 'utf-8')
|
||||
assert sorted(c.domain for c in repo.catalogs) == []
|
||||
|
||||
# unknown locale_dirs
|
||||
repo = i18n.CatalogRepository(tempdir, ['loc3'], None, 'utf-8')
|
||||
repo = i18n.CatalogRepository(tmp_path, ['loc3'], None, 'utf-8')
|
||||
assert sorted(c.domain for c in repo.catalogs) == []
|
||||
|
||||
# no locale_dirs
|
||||
repo = i18n.CatalogRepository(tempdir, [], None, 'utf-8')
|
||||
repo = i18n.CatalogRepository(tmp_path, [], None, 'utf-8')
|
||||
assert sorted(c.domain for c in repo.catalogs) == []
|
||||
|
@ -1,5 +1,5 @@
|
||||
"""Test inventory util functions."""
|
||||
|
||||
import os
|
||||
import posixpath
|
||||
import zlib
|
||||
from io import BytesIO
|
||||
@ -88,12 +88,12 @@ def test_read_inventory_v2_not_having_version():
|
||||
|
||||
def _write_appconfig(dir, language, prefix=None):
|
||||
prefix = prefix or language
|
||||
(dir / prefix).makedirs()
|
||||
os.makedirs(dir / prefix, exist_ok=True)
|
||||
(dir / prefix / 'conf.py').write_text(f'language = "{language}"', encoding='utf8')
|
||||
(dir / prefix / 'index.rst').write_text('index.rst', encoding='utf8')
|
||||
assert sorted((dir / prefix).listdir()) == ['conf.py', 'index.rst']
|
||||
assert sorted(os.listdir(dir / prefix)) == ['conf.py', 'index.rst']
|
||||
assert (dir / prefix / 'index.rst').exists()
|
||||
return (dir / prefix)
|
||||
return dir / prefix
|
||||
|
||||
|
||||
def _build_inventory(srcdir):
|
||||
@ -103,13 +103,13 @@ def _build_inventory(srcdir):
|
||||
return (app.outdir / 'objects.inv')
|
||||
|
||||
|
||||
def test_inventory_localization(tempdir):
|
||||
def test_inventory_localization(tmp_path):
|
||||
# Build an app using Estonian (EE) locale
|
||||
srcdir_et = _write_appconfig(tempdir, "et")
|
||||
srcdir_et = _write_appconfig(tmp_path, "et")
|
||||
inventory_et = _build_inventory(srcdir_et)
|
||||
|
||||
# Build the same app using English (US) locale
|
||||
srcdir_en = _write_appconfig(tempdir, "en")
|
||||
srcdir_en = _write_appconfig(tmp_path, "en")
|
||||
inventory_en = _build_inventory(srcdir_en)
|
||||
|
||||
# Ensure that the inventory contents differ
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""Test the versioning implementation."""
|
||||
|
||||
import pickle
|
||||
import shutil
|
||||
|
||||
import pytest
|
||||
|
||||
@ -15,7 +16,7 @@ def _setup_module(rootdir, sphinx_test_tempdir):
|
||||
global app, original, original_uids
|
||||
srcdir = sphinx_test_tempdir / 'test-versioning'
|
||||
if not srcdir.exists():
|
||||
(rootdir / 'test-versioning').copytree(srcdir)
|
||||
shutil.copytree(rootdir / 'test-versioning', srcdir)
|
||||
app = SphinxTestApp(srcdir=srcdir)
|
||||
app.builder.env.app = app
|
||||
app.connect('doctree-resolved', on_doctree_resolved)
|
||||
|
Loading…
Reference in New Issue
Block a user