Fix #2720, #4034: Incorrect links with `:download:`, duplicate names, and parallel builds

This commit is contained in:
Takeshi KOMIYA 2018-09-03 00:07:17 +09:00
parent c6e0d92d10
commit 34cc1b5c95
6 changed files with 105 additions and 16 deletions

View File

@ -9,6 +9,7 @@ Incompatible changes
* #5282: html theme: refer ``pygments_style`` settings of HTML themes
preferentially
* The URL of download files are changed
Deprecated
----------
@ -30,6 +31,8 @@ Bugs fixed
* #5348: download reference to remote file is not displayed
* #5282: html theme: ``pygments_style`` of theme was overrided by ``conf.py``
by default
* #2720, #4034: Incorrect links with ``:download:``, duplicate names, and
parallel builds
Testing
--------

View File

@ -864,10 +864,10 @@ class StandaloneHTMLBuilder(Builder):
for src in status_iterator(self.env.dlfiles, __('copying downloadable files... '),
"brown", len(self.env.dlfiles), self.app.verbosity,
stringify_func=to_relpath):
dest = self.env.dlfiles[src][1]
try:
copyfile(path.join(self.srcdir, src),
path.join(self.outdir, '_downloads', dest))
dest = path.join(self.outdir, '_downloads', self.env.dlfiles[src][1])
ensuredir(path.dirname(dest))
copyfile(path.join(self.srcdir, src), dest)
except EnvironmentError as err:
logger.warning(__('cannot copy downloadable file %r: %s'),
path.join(self.srcdir, src), err)

View File

@ -28,7 +28,7 @@ from sphinx.environment.adapters.toctree import TocTree
from sphinx.errors import SphinxError, BuildEnvironmentError, DocumentError, ExtensionError
from sphinx.locale import __
from sphinx.transforms import SphinxTransformer
from sphinx.util import get_matching_docs, FilenameUniqDict
from sphinx.util import get_matching_docs, DownloadFiles, FilenameUniqDict
from sphinx.util import logging
from sphinx.util.docutils import LoggingReporter
from sphinx.util.i18n import find_catalog_files
@ -190,7 +190,8 @@ class BuildEnvironment(object):
# these map absolute path -> (docnames, unique filename)
self.images = FilenameUniqDict() # type: FilenameUniqDict
self.dlfiles = FilenameUniqDict() # type: FilenameUniqDict
self.dlfiles = DownloadFiles() # type: DownloadFiles
# filename -> (set of docnames, destination)
# the original URI for images
self.original_image_uri = {} # type: Dict[unicode, unicode]

View File

@ -22,6 +22,7 @@ import warnings
from codecs import BOM_UTF8
from collections import deque
from datetime import datetime
from hashlib import md5
from os import path
from time import mktime, strptime
@ -167,6 +168,37 @@ class FilenameUniqDict(dict):
self._existing = state
class DownloadFiles(dict):
"""A special dictionary for download files.
.. important:: This class would be refactored in nearly future.
Hence don't hack this directly.
"""
def add_file(self, docname, filename):
# type: (unicode, unicode) -> None
if filename not in self:
digest = md5(filename.encode('utf-8')).hexdigest()
dest = '%s/%s' % (digest, os.path.basename(filename))
self[filename] = (set(), dest)
self[filename][0].add(docname)
return self[filename][1]
def purge_doc(self, docname):
# type: (unicode) -> None
for filename, (docs, dest) in list(self.items()):
docs.discard(docname)
if not docs:
del self[filename]
def merge_other(self, docnames, other):
# type: (Set[unicode], Dict[unicode, Tuple[Set[unicode], Any]]) -> None
for filename, (docs, dest) in other.items():
for docname in docs & set(docnames):
self.add_file(docname, filename)
def copy_static_entry(source, targetdir, builder, context={},
exclude_matchers=(), level=0):
# type: (unicode, unicode, Any, Dict, Tuple[Callable, ...], int) -> None

View File

@ -151,7 +151,7 @@ def test_html_warnings(app, warning):
(".//img[@src='../_images/rimg.png']", ''),
],
'subdir/includes.html': [
(".//a[@href='../_downloads/img.png']", ''),
(".//a[@class='reference download internal']", ''),
(".//img[@src='../_images/img.png']", ''),
(".//p", 'This is an include file.'),
(".//pre/span", 'line 1'),
@ -159,8 +159,7 @@ def test_html_warnings(app, warning):
],
'includes.html': [
(".//pre", u'Max Strauß'),
(".//a[@href='_downloads/img.png']", ''),
(".//a[@href='_downloads/img1.png']", ''),
(".//a[@class='reference download internal']", ''),
(".//pre/span", u'"quotes"'),
(".//pre/span", u"'included'"),
(".//pre/span[@class='s2']", u'üöä'),
@ -421,6 +420,31 @@ def test_html_output(app, cached_etree_parse, fname, expect):
check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect)
@pytest.mark.sphinx('html', tags=['testtag'], confoverrides={
'html_context.hckey_co': 'hcval_co'})
@pytest.mark.test_params(shared_result='test_build_html_output')
def test_html_download(app):
app.build()
# subdir/includes.html
result = (app.outdir / 'subdir' / 'includes.html').text()
pattern = ('<a class="reference download internal" download="" '
'href="../(_downloads/.*/img.png)">')
matched = re.search(pattern, result)
assert matched
assert (app.outdir / matched.group(1)).exists()
filename = matched.group(1)
# includes.html
result = (app.outdir / 'includes.html').text()
pattern = ('<a class="reference download internal" download="" '
'href="(_downloads/.*/img.png)">')
matched = re.search(pattern, result)
assert matched
assert (app.outdir / matched.group(1)).exists()
assert matched.group(1) == filename
@pytest.mark.sphinx('html', testroot='build-html-translator')
def test_html_translator(app):
app.build()

View File

@ -14,7 +14,9 @@
:license: BSD, see LICENSE for details.
"""
import re
import xml.etree.cElementTree as ElementTree
from hashlib import md5
import pytest
from html5lib import getTreeBuilder, HTMLParser
@ -58,7 +60,7 @@ def cached_etree_parse():
(".//img[@src='../_images/rimg.png']", ''),
],
'subdir/includes.html': [
(".//a[@href='../_downloads/img.png']", ''),
(".//a[@class='reference download internal']", ''),
(".//img[@src='../_images/img.png']", ''),
(".//p", 'This is an include file.'),
(".//pre/span", 'line 1'),
@ -66,8 +68,7 @@ def cached_etree_parse():
],
'includes.html': [
(".//pre", u'Max Strauß'),
(".//a[@href='_downloads/img.png']", ''),
(".//a[@href='_downloads/img1.png']", ''),
(".//a[@class='reference download internal']", ''),
(".//pre/span", u'"quotes"'),
(".//pre/span", u"'included'"),
(".//pre/span[@class='s2']", u'üöä'),
@ -323,17 +324,45 @@ def test_html5_output(app, cached_etree_parse, fname, expect):
check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect)
@pytest.mark.sphinx('html', tags=['testtag'], confoverrides={
'html_context.hckey_co': 'hcval_co',
'html_experimental_html5_writer': True})
@pytest.mark.test_params(shared_result='test_build_html_output')
def test_html_download(app):
app.build()
# subdir/includes.html
result = (app.outdir / 'subdir' / 'includes.html').text()
pattern = ('<a class="reference download internal" download="" '
'href="../(_downloads/.*/img.png)">')
matched = re.search(pattern, result)
assert matched
assert (app.outdir / matched.group(1)).exists()
filename = matched.group(1)
# includes.html
result = (app.outdir / 'includes.html').text()
pattern = ('<a class="reference download internal" download="" '
'href="(_downloads/.*/img.png)">')
matched = re.search(pattern, result)
assert matched
assert (app.outdir / matched.group(1)).exists()
assert matched.group(1) == filename
@pytest.mark.sphinx('html', testroot='roles-download',
confoverrides={'html_experimental_html5_writer': True})
def test_html_download_role(app, status, warning):
app.build()
assert (app.outdir / '_downloads' / 'dummy.dat').exists()
digest = md5((app.srcdir / 'dummy.dat').encode('utf-8')).hexdigest()
assert (app.outdir / '_downloads' / digest / 'dummy.dat').exists()
content = (app.outdir / 'index.html').text()
assert ('<li><p><a class="reference download internal" download="" '
'href="_downloads/dummy.dat">'
'<code class="xref download docutils literal notranslate">'
'<span class="pre">dummy.dat</span></code></a></p></li>' in content)
assert (('<li><p><a class="reference download internal" download="" '
'href="_downloads/%s/dummy.dat">'
'<code class="xref download docutils literal notranslate">'
'<span class="pre">dummy.dat</span></code></a></p></li>' % digest)
in content)
assert ('<li><p><code class="xref download docutils literal notranslate">'
'<span class="pre">not_found.dat</span></code></p></li>' in content)
assert ('<li><p><a class="reference download external" download="" '