2016-10-06 04:59:24 -05:00
|
|
|
"""
|
|
|
|
test_build_linkcheck
|
|
|
|
~~~~~~~~~~~~~~~~~~~~
|
|
|
|
|
|
|
|
Test the build process with manpage builder with the test root.
|
|
|
|
|
2019-12-31 23:27:43 -06:00
|
|
|
:copyright: Copyright 2007-2020 by the Sphinx team, see AUTHORS.
|
2016-10-06 04:59:24 -05:00
|
|
|
:license: BSD, see LICENSE for details.
|
|
|
|
"""
|
|
|
|
|
2020-02-07 01:51:03 -06:00
|
|
|
import json
|
|
|
|
import re
|
2019-11-13 08:39:47 -06:00
|
|
|
from unittest import mock
|
2017-01-05 10:14:47 -06:00
|
|
|
import pytest
|
2016-10-06 04:59:24 -05:00
|
|
|
|
|
|
|
|
2017-01-05 10:14:47 -06:00
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck', freshenv=True)
|
2016-03-23 11:38:46 -05:00
|
|
|
def test_defaults(app, status, warning):
|
2016-10-06 04:59:24 -05:00
|
|
|
app.builder.build_all()
|
|
|
|
|
|
|
|
assert (app.outdir / 'output.txt').exists()
|
2020-01-31 20:58:51 -06:00
|
|
|
content = (app.outdir / 'output.txt').read_text()
|
2016-10-06 04:59:24 -05:00
|
|
|
|
2016-03-23 11:38:46 -05:00
|
|
|
print(content)
|
2020-02-07 01:51:03 -06:00
|
|
|
# looking for '#top' and '#does-not-exist' not found should fail
|
2016-03-23 11:38:46 -05:00
|
|
|
assert "Anchor 'top' not found" in content
|
2018-07-16 16:17:44 -05:00
|
|
|
assert "Anchor 'does-not-exist' not found" in content
|
|
|
|
# looking for non-existent URL should fail
|
|
|
|
assert " Max retries exceeded with url: /doesnotexist" in content
|
2019-02-05 09:21:52 -06:00
|
|
|
# images should fail
|
2019-06-05 09:42:24 -05:00
|
|
|
assert "Not Found for url: https://www.google.com/image.png" in content
|
|
|
|
assert "Not Found for url: https://www.google.com/image2.png" in content
|
2020-02-09 05:22:22 -06:00
|
|
|
# looking for local file should fail
|
|
|
|
assert "[broken] path/to/notfound" in content
|
|
|
|
assert len(content.splitlines()) == 6
|
2016-03-23 11:38:46 -05:00
|
|
|
|
|
|
|
|
2020-02-07 01:51:03 -06:00
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck', freshenv=True)
|
|
|
|
def test_defaults_json(app, status, warning):
|
|
|
|
app.builder.build_all()
|
|
|
|
|
|
|
|
assert (app.outdir / 'output.json').exists()
|
|
|
|
content = (app.outdir / 'output.json').read_text()
|
|
|
|
print(content)
|
|
|
|
|
|
|
|
rows = [json.loads(x) for x in content.splitlines()]
|
|
|
|
row = rows[0]
|
|
|
|
for attr in ["filename", "lineno", "status", "code", "uri",
|
|
|
|
"info"]:
|
|
|
|
assert attr in row
|
|
|
|
|
2020-02-09 05:22:22 -06:00
|
|
|
assert len(content.splitlines()) == 10
|
|
|
|
assert len(rows) == 10
|
2020-02-07 01:51:03 -06:00
|
|
|
# the output order of the rows is not stable
|
|
|
|
# due to possible variance in network latency
|
|
|
|
rowsby = {row["uri"]:row for row in rows}
|
|
|
|
assert rowsby["https://www.google.com#!bar"] == {
|
|
|
|
'filename': 'links.txt',
|
|
|
|
'lineno': 10,
|
|
|
|
'status': 'working',
|
|
|
|
'code': 0,
|
|
|
|
'uri': 'https://www.google.com#!bar',
|
|
|
|
'info': ''
|
|
|
|
}
|
|
|
|
# looking for non-existent URL should fail
|
|
|
|
dnerow = rowsby['https://localhost:7777/doesnotexist']
|
|
|
|
assert dnerow['filename'] == 'links.txt'
|
|
|
|
assert dnerow['lineno'] == 13
|
|
|
|
assert dnerow['status'] == 'broken'
|
|
|
|
assert dnerow['code'] == 0
|
|
|
|
assert dnerow['uri'] == 'https://localhost:7777/doesnotexist'
|
|
|
|
assert rowsby['https://www.google.com/image2.png'] == {
|
|
|
|
'filename': 'links.txt',
|
2020-02-09 05:22:22 -06:00
|
|
|
'lineno': 18,
|
2020-02-07 01:51:03 -06:00
|
|
|
'status': 'broken',
|
|
|
|
'code': 0,
|
|
|
|
'uri': 'https://www.google.com/image2.png',
|
|
|
|
'info': '404 Client Error: Not Found for url: https://www.google.com/image2.png'
|
|
|
|
}
|
|
|
|
# looking for '#top' and '#does-not-exist' not found should fail
|
|
|
|
assert "Anchor 'top' not found" == \
|
|
|
|
rowsby["https://www.google.com/#top"]["info"]
|
|
|
|
assert "Anchor 'does-not-exist' not found" == \
|
|
|
|
rowsby["http://www.sphinx-doc.org/en/1.7/intro.html#does-not-exist"]["info"]
|
|
|
|
# images should fail
|
|
|
|
assert "Not Found for url: https://www.google.com/image.png" in \
|
|
|
|
rowsby["https://www.google.com/image.png"]["info"]
|
|
|
|
|
|
|
|
|
2017-01-05 10:14:47 -06:00
|
|
|
@pytest.mark.sphinx(
|
|
|
|
'linkcheck', testroot='linkcheck', freshenv=True,
|
2018-07-16 16:17:44 -05:00
|
|
|
confoverrides={'linkcheck_anchors_ignore': ["^!", "^top$"],
|
|
|
|
'linkcheck_ignore': [
|
|
|
|
'https://localhost:7777/doesnotexist',
|
2019-02-05 09:21:52 -06:00
|
|
|
'http://www.sphinx-doc.org/en/1.7/intro.html#',
|
2019-06-05 09:42:24 -05:00
|
|
|
'https://www.google.com/image.png',
|
2020-02-09 05:22:22 -06:00
|
|
|
'https://www.google.com/image2.png',
|
|
|
|
'path/to/notfound']
|
2018-07-16 16:17:44 -05:00
|
|
|
})
|
2016-03-23 11:38:46 -05:00
|
|
|
def test_anchors_ignored(app, status, warning):
|
|
|
|
app.builder.build_all()
|
2016-10-06 04:59:24 -05:00
|
|
|
|
2016-03-23 11:38:46 -05:00
|
|
|
assert (app.outdir / 'output.txt').exists()
|
2020-01-31 20:58:51 -06:00
|
|
|
content = (app.outdir / 'output.txt').read_text()
|
2016-03-23 11:38:46 -05:00
|
|
|
|
|
|
|
# expect all ok when excluding #top
|
|
|
|
assert not content
|
2019-11-13 08:39:47 -06:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.sphinx(
|
|
|
|
'linkcheck', testroot='linkcheck', freshenv=True,
|
|
|
|
confoverrides={'linkcheck_auth': [
|
|
|
|
(r'.+google\.com/image.+', 'authinfo1'),
|
|
|
|
(r'.+google\.com.+', 'authinfo2'),
|
|
|
|
]
|
|
|
|
})
|
|
|
|
def test_auth(app, status, warning):
|
|
|
|
mock_req = mock.MagicMock()
|
|
|
|
mock_req.return_value = 'fake-response'
|
|
|
|
|
|
|
|
with mock.patch.multiple('requests', get=mock_req, head=mock_req):
|
|
|
|
app.builder.build_all()
|
|
|
|
for c_args, c_kwargs in mock_req.call_args_list:
|
|
|
|
if 'google.com/image' in c_args[0]:
|
|
|
|
assert c_kwargs['auth'] == 'authinfo1'
|
|
|
|
elif 'google.com' in c_args[0]:
|
|
|
|
assert c_kwargs['auth'] == 'authinfo2'
|
|
|
|
else:
|
|
|
|
assert not c_kwargs['auth']
|
2020-05-31 11:37:15 -05:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.sphinx(
|
|
|
|
'linkcheck', testroot='linkcheck', freshenv=True,
|
|
|
|
confoverrides={'linkcheck_request_headers': {
|
|
|
|
"https://localhost:7777/": {
|
|
|
|
"Accept": "text/html",
|
|
|
|
},
|
|
|
|
"http://www.sphinx-doc.org": { # no slash at the end
|
|
|
|
"Accept": "application/json",
|
|
|
|
},
|
|
|
|
"*": {
|
|
|
|
"X-Secret": "open sesami",
|
|
|
|
}
|
|
|
|
}})
|
|
|
|
def test_linkcheck_request_headers(app, status, warning):
|
|
|
|
mock_req = mock.MagicMock()
|
|
|
|
mock_req.return_value = 'fake-response'
|
|
|
|
|
|
|
|
with mock.patch.multiple('requests', get=mock_req, head=mock_req):
|
|
|
|
app.builder.build_all()
|
|
|
|
for args, kwargs in mock_req.call_args_list:
|
|
|
|
url = args[0]
|
|
|
|
headers = kwargs.get('headers', {})
|
|
|
|
if "https://localhost:7777" in url:
|
|
|
|
assert headers["Accept"] == "text/html"
|
|
|
|
elif 'http://www.sphinx-doc.org' in url:
|
|
|
|
assert headers["Accept"] == "application/json"
|
|
|
|
elif 'https://www.google.com' in url:
|
|
|
|
assert headers["Accept"] == "text/html,application/xhtml+xml;q=0.9,*/*;q=0.8"
|
|
|
|
assert headers["X-Secret"] == "open sesami"
|
|
|
|
else:
|
|
|
|
assert headers["Accept"] == "text/html,application/xhtml+xml;q=0.9,*/*;q=0.8"
|