2016-10-06 04:59:24 -05:00
|
|
|
"""Test the build process with manpage builder with the test root."""
|
|
|
|
|
2020-10-04 14:40:14 -05:00
|
|
|
from __future__ import annotations
|
2022-12-30 14:14:18 -06:00
|
|
|
|
2023-02-17 16:11:14 -06:00
|
|
|
import base64
|
2020-10-03 05:59:47 -05:00
|
|
|
import http.server
|
2020-02-07 01:51:03 -06:00
|
|
|
import json
|
2020-10-04 14:40:14 -05:00
|
|
|
import re
|
2020-11-11 04:17:29 -06:00
|
|
|
import textwrap
|
2020-10-04 14:40:14 -05:00
|
|
|
import time
|
|
|
|
import wsgiref.handlers
|
|
|
|
from datetime import datetime
|
2022-09-10 11:26:41 -05:00
|
|
|
from os import path
|
2021-02-06 12:07:40 -06:00
|
|
|
from queue import Queue
|
2020-10-04 14:40:14 -05:00
|
|
|
from unittest import mock
|
2020-11-11 05:00:27 -06:00
|
|
|
|
2017-01-05 10:14:47 -06:00
|
|
|
import pytest
|
2016-10-06 04:59:24 -05:00
|
|
|
|
2021-02-06 12:07:40 -06:00
|
|
|
from sphinx.builders.linkcheck import HyperlinkAvailabilityCheckWorker, RateLimit
|
2021-04-29 06:20:01 -05:00
|
|
|
from sphinx.testing.util import strip_escseq
|
2023-07-20 15:38:21 -05:00
|
|
|
from sphinx.util import requests
|
2020-10-04 14:40:14 -05:00
|
|
|
from sphinx.util.console import strip_colors
|
|
|
|
|
2020-11-27 16:10:36 -06:00
|
|
|
from .utils import CERT_FILE, http_server, https_server
|
2020-11-08 14:57:06 -06:00
|
|
|
|
2020-10-04 14:40:14 -05:00
|
|
|
ts_re = re.compile(r".*\[(?P<ts>.*)\].*")
|
2022-09-10 11:26:41 -05:00
|
|
|
SPHINX_DOCS_INDEX = path.abspath(path.join(__file__, "..", "roots", "test-linkcheck", "sphinx-docs-index.html"))
|
|
|
|
|
|
|
|
|
|
|
|
class DefaultsHandler(http.server.BaseHTTPRequestHandler):
|
|
|
|
def do_HEAD(self):
|
|
|
|
if self.path[1:].rstrip() == "":
|
|
|
|
self.send_response(200, "OK")
|
|
|
|
self.end_headers()
|
2023-05-09 11:09:35 -05:00
|
|
|
elif self.path[1:].rstrip() == "anchor.html":
|
|
|
|
self.send_response(200, "OK")
|
|
|
|
self.end_headers()
|
2022-09-10 11:26:41 -05:00
|
|
|
else:
|
|
|
|
self.send_response(404, "Not Found")
|
|
|
|
self.end_headers()
|
|
|
|
|
|
|
|
def do_GET(self):
|
|
|
|
self.do_HEAD()
|
|
|
|
if self.path[1:].rstrip() == "":
|
|
|
|
self.wfile.write(b"ok\n\n")
|
2023-05-09 11:09:35 -05:00
|
|
|
elif self.path[1:].rstrip() == "anchor.html":
|
|
|
|
doc = '<!DOCTYPE html><html><body><a id="found"></a></body></html>'
|
|
|
|
self.wfile.write(doc.encode('utf-8'))
|
2020-10-04 14:40:14 -05:00
|
|
|
|
2016-10-06 04:59:24 -05:00
|
|
|
|
2017-01-05 10:14:47 -06:00
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck', freshenv=True)
|
2020-11-11 06:48:07 -06:00
|
|
|
def test_defaults(app):
|
2022-09-10 11:26:41 -05:00
|
|
|
with http_server(DefaultsHandler):
|
|
|
|
app.build()
|
2016-10-06 04:59:24 -05:00
|
|
|
|
2022-09-10 11:26:41 -05:00
|
|
|
# Text output
|
2016-10-06 04:59:24 -05:00
|
|
|
assert (app.outdir / 'output.txt').exists()
|
2022-04-26 21:04:19 -05:00
|
|
|
content = (app.outdir / 'output.txt').read_text(encoding='utf8')
|
2016-10-06 04:59:24 -05:00
|
|
|
|
2020-02-07 01:51:03 -06:00
|
|
|
# looking for '#top' and '#does-not-exist' not found should fail
|
2016-03-23 11:38:46 -05:00
|
|
|
assert "Anchor 'top' not found" in content
|
2018-07-16 16:17:44 -05:00
|
|
|
assert "Anchor 'does-not-exist' not found" in content
|
2019-02-05 09:21:52 -06:00
|
|
|
# images should fail
|
2022-09-10 11:26:41 -05:00
|
|
|
assert "Not Found for url: http://localhost:7777/image.png" in content
|
|
|
|
assert "Not Found for url: http://localhost:7777/image2.png" in content
|
2020-02-09 05:22:22 -06:00
|
|
|
# looking for local file should fail
|
|
|
|
assert "[broken] path/to/notfound" in content
|
2022-09-10 11:26:41 -05:00
|
|
|
assert len(content.splitlines()) == 5
|
2020-02-07 01:51:03 -06:00
|
|
|
|
2022-09-10 11:26:41 -05:00
|
|
|
# JSON output
|
2020-02-07 01:51:03 -06:00
|
|
|
assert (app.outdir / 'output.json').exists()
|
2022-04-26 21:04:19 -05:00
|
|
|
content = (app.outdir / 'output.json').read_text(encoding='utf8')
|
2020-02-07 01:51:03 -06:00
|
|
|
|
|
|
|
rows = [json.loads(x) for x in content.splitlines()]
|
|
|
|
row = rows[0]
|
2022-09-10 11:26:41 -05:00
|
|
|
for attr in ("filename", "lineno", "status", "code", "uri", "info"):
|
2020-02-07 01:51:03 -06:00
|
|
|
assert attr in row
|
|
|
|
|
2023-05-09 11:09:35 -05:00
|
|
|
assert len(content.splitlines()) == 10
|
|
|
|
assert len(rows) == 10
|
2020-02-07 01:51:03 -06:00
|
|
|
# the output order of the rows is not stable
|
|
|
|
# due to possible variance in network latency
|
2020-11-15 02:03:26 -06:00
|
|
|
rowsby = {row["uri"]: row for row in rows}
|
2022-09-10 11:26:41 -05:00
|
|
|
assert rowsby["http://localhost:7777#!bar"] == {
|
|
|
|
'filename': 'links.rst',
|
|
|
|
'lineno': 5,
|
2020-02-07 01:51:03 -06:00
|
|
|
'status': 'working',
|
|
|
|
'code': 0,
|
2022-09-10 11:26:41 -05:00
|
|
|
'uri': 'http://localhost:7777#!bar',
|
2023-02-17 16:11:14 -06:00
|
|
|
'info': '',
|
2020-02-07 01:51:03 -06:00
|
|
|
}
|
2022-09-10 11:26:41 -05:00
|
|
|
assert rowsby['http://localhost:7777/image2.png'] == {
|
|
|
|
'filename': 'links.rst',
|
|
|
|
'lineno': 13,
|
2020-02-07 01:51:03 -06:00
|
|
|
'status': 'broken',
|
|
|
|
'code': 0,
|
2022-09-10 11:26:41 -05:00
|
|
|
'uri': 'http://localhost:7777/image2.png',
|
|
|
|
'info': '404 Client Error: Not Found for url: http://localhost:7777/image2.png',
|
2020-02-07 01:51:03 -06:00
|
|
|
}
|
|
|
|
# looking for '#top' and '#does-not-exist' not found should fail
|
2022-09-10 11:26:41 -05:00
|
|
|
assert rowsby["http://localhost:7777/#top"]["info"] == "Anchor 'top' not found"
|
|
|
|
assert rowsby["http://localhost:7777#does-not-exist"]["info"] == "Anchor 'does-not-exist' not found"
|
2020-02-07 01:51:03 -06:00
|
|
|
# images should fail
|
2022-09-10 11:26:41 -05:00
|
|
|
assert "Not Found for url: http://localhost:7777/image.png" in rowsby["http://localhost:7777/image.png"]["info"]
|
2023-05-09 11:09:35 -05:00
|
|
|
# anchor should be found
|
|
|
|
assert rowsby['http://localhost:7777/anchor.html#found'] == {
|
|
|
|
'filename': 'links.rst',
|
|
|
|
'lineno': 14,
|
|
|
|
'status': 'working',
|
|
|
|
'code': 0,
|
|
|
|
'uri': 'http://localhost:7777/anchor.html#found',
|
|
|
|
'info': '',
|
|
|
|
}
|
2022-09-10 11:26:41 -05:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-too-many-retries', freshenv=True)
|
|
|
|
def test_too_many_retries(app):
|
2023-04-06 16:24:49 -05:00
|
|
|
with http_server(DefaultsHandler):
|
|
|
|
app.build()
|
2022-09-10 11:26:41 -05:00
|
|
|
|
|
|
|
# Text output
|
|
|
|
assert (app.outdir / 'output.txt').exists()
|
|
|
|
content = (app.outdir / 'output.txt').read_text(encoding='utf8')
|
|
|
|
|
|
|
|
# looking for non-existent URL should fail
|
|
|
|
assert " Max retries exceeded with url: /doesnotexist" in content
|
|
|
|
|
|
|
|
# JSON output
|
|
|
|
assert (app.outdir / 'output.json').exists()
|
|
|
|
content = (app.outdir / 'output.json').read_text(encoding='utf8')
|
|
|
|
|
|
|
|
assert len(content.splitlines()) == 1
|
|
|
|
row = json.loads(content)
|
|
|
|
# the output order of the rows is not stable
|
|
|
|
# due to possible variance in network latency
|
|
|
|
|
|
|
|
# looking for non-existent URL should fail
|
|
|
|
assert row['filename'] == 'index.rst'
|
|
|
|
assert row['lineno'] == 1
|
|
|
|
assert row['status'] == 'broken'
|
|
|
|
assert row['code'] == 0
|
|
|
|
assert row['uri'] == 'https://localhost:7777/doesnotexist'
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-raw-node', freshenv=True)
|
|
|
|
def test_raw_node(app):
|
|
|
|
with http_server(OKHandler):
|
|
|
|
app.build()
|
|
|
|
|
|
|
|
# JSON output
|
|
|
|
assert (app.outdir / 'output.json').exists()
|
|
|
|
content = (app.outdir / 'output.json').read_text(encoding='utf8')
|
|
|
|
|
|
|
|
assert len(content.splitlines()) == 1
|
|
|
|
row = json.loads(content)
|
|
|
|
|
2022-08-07 04:02:45 -05:00
|
|
|
# raw nodes' url should be checked too
|
2022-09-10 11:26:41 -05:00
|
|
|
assert row == {
|
|
|
|
'filename': 'index.rst',
|
|
|
|
'lineno': 1,
|
|
|
|
'status': 'working',
|
|
|
|
'code': 0,
|
|
|
|
'uri': 'http://localhost:7777/',
|
|
|
|
'info': '',
|
2022-08-07 04:02:45 -05:00
|
|
|
}
|
2020-02-07 01:51:03 -06:00
|
|
|
|
|
|
|
|
2017-01-05 10:14:47 -06:00
|
|
|
@pytest.mark.sphinx(
|
2022-09-10 11:26:41 -05:00
|
|
|
'linkcheck', testroot='linkcheck-anchors-ignore', freshenv=True,
|
|
|
|
confoverrides={'linkcheck_anchors_ignore': ["^!", "^top$"]})
|
2020-11-11 06:48:07 -06:00
|
|
|
def test_anchors_ignored(app):
|
2022-09-10 11:26:41 -05:00
|
|
|
with http_server(OKHandler):
|
|
|
|
app.build()
|
2016-10-06 04:59:24 -05:00
|
|
|
|
2016-03-23 11:38:46 -05:00
|
|
|
assert (app.outdir / 'output.txt').exists()
|
2022-04-26 21:04:19 -05:00
|
|
|
content = (app.outdir / 'output.txt').read_text(encoding='utf8')
|
2016-03-23 11:38:46 -05:00
|
|
|
|
|
|
|
# expect all ok when excluding #top
|
|
|
|
assert not content
|
2019-11-13 08:39:47 -06:00
|
|
|
|
2020-11-15 02:03:26 -06:00
|
|
|
|
2020-11-11 03:50:45 -06:00
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-anchor', freshenv=True)
|
2020-11-11 06:48:07 -06:00
|
|
|
def test_raises_for_invalid_status(app):
|
2020-11-08 14:57:06 -06:00
|
|
|
class InternalServerErrorHandler(http.server.BaseHTTPRequestHandler):
|
|
|
|
def do_GET(self):
|
|
|
|
self.send_error(500, "Internal Server Error")
|
|
|
|
|
|
|
|
with http_server(InternalServerErrorHandler):
|
2021-01-27 11:52:00 -06:00
|
|
|
app.build()
|
2022-04-26 21:04:19 -05:00
|
|
|
content = (app.outdir / 'output.txt').read_text(encoding='utf8')
|
2020-10-03 05:59:47 -05:00
|
|
|
assert content == (
|
|
|
|
"index.rst:1: [broken] http://localhost:7777/#anchor: "
|
|
|
|
"500 Server Error: Internal Server Error "
|
|
|
|
"for url: http://localhost:7777/\n"
|
|
|
|
)
|
|
|
|
|
2019-11-13 08:39:47 -06:00
|
|
|
|
2022-09-10 11:26:41 -05:00
|
|
|
def capture_headers_handler(records):
|
|
|
|
class HeadersDumperHandler(http.server.BaseHTTPRequestHandler):
|
|
|
|
def do_HEAD(self):
|
|
|
|
self.do_GET()
|
2020-11-11 08:12:31 -06:00
|
|
|
|
2022-09-10 11:26:41 -05:00
|
|
|
def do_GET(self):
|
|
|
|
self.send_response(200, "OK")
|
|
|
|
self.end_headers()
|
|
|
|
records.append(self.headers.as_string())
|
|
|
|
return HeadersDumperHandler
|
2020-11-11 08:12:31 -06:00
|
|
|
|
2020-11-15 02:03:26 -06:00
|
|
|
|
2019-11-13 08:39:47 -06:00
|
|
|
@pytest.mark.sphinx(
|
2020-11-11 08:12:31 -06:00
|
|
|
'linkcheck', testroot='linkcheck-localserver', freshenv=True,
|
2019-11-13 08:39:47 -06:00
|
|
|
confoverrides={'linkcheck_auth': [
|
2020-11-11 08:12:31 -06:00
|
|
|
(r'^$', ('no', 'match')),
|
|
|
|
(r'^http://localhost:7777/$', ('user1', 'password')),
|
|
|
|
(r'.*local.*', ('user2', 'hunter2')),
|
|
|
|
]})
|
2022-09-10 11:26:41 -05:00
|
|
|
def test_auth_header_uses_first_match(app):
|
|
|
|
records = []
|
|
|
|
with http_server(capture_headers_handler(records)):
|
2021-01-27 11:52:00 -06:00
|
|
|
app.build()
|
2022-09-10 11:26:41 -05:00
|
|
|
|
|
|
|
stdout = "\n".join(records)
|
2023-02-17 16:11:14 -06:00
|
|
|
encoded_auth = base64.b64encode(b'user1:password').decode('ascii')
|
|
|
|
assert f"Authorization: Basic {encoded_auth}\n" in stdout
|
2020-11-11 08:12:31 -06:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.sphinx(
|
|
|
|
'linkcheck', testroot='linkcheck-localserver', freshenv=True,
|
|
|
|
confoverrides={'linkcheck_auth': [(r'^$', ('user1', 'password'))]})
|
2022-09-10 11:26:41 -05:00
|
|
|
def test_auth_header_no_match(app):
|
|
|
|
records = []
|
|
|
|
with http_server(capture_headers_handler(records)):
|
2021-01-27 11:52:00 -06:00
|
|
|
app.build()
|
2022-09-10 11:26:41 -05:00
|
|
|
|
|
|
|
stdout = "\n".join(records)
|
2020-11-11 08:12:31 -06:00
|
|
|
assert "Authorization" not in stdout
|
2020-05-31 11:37:15 -05:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.sphinx(
|
2020-11-11 09:26:22 -06:00
|
|
|
'linkcheck', testroot='linkcheck-localserver', freshenv=True,
|
2020-05-31 11:37:15 -05:00
|
|
|
confoverrides={'linkcheck_request_headers': {
|
2020-11-11 09:26:22 -06:00
|
|
|
"http://localhost:7777/": {
|
2020-05-31 11:37:15 -05:00
|
|
|
"Accept": "text/html",
|
|
|
|
},
|
|
|
|
"*": {
|
|
|
|
"X-Secret": "open sesami",
|
2023-02-17 16:11:14 -06:00
|
|
|
},
|
2020-05-31 11:37:15 -05:00
|
|
|
}})
|
2022-09-10 11:26:41 -05:00
|
|
|
def test_linkcheck_request_headers(app):
|
|
|
|
records = []
|
|
|
|
with http_server(capture_headers_handler(records)):
|
2021-01-27 11:52:00 -06:00
|
|
|
app.build()
|
2020-11-11 09:26:22 -06:00
|
|
|
|
2022-09-10 11:26:41 -05:00
|
|
|
stdout = "\n".join(records)
|
2020-11-11 09:26:22 -06:00
|
|
|
assert "Accept: text/html\n" in stdout
|
|
|
|
assert "X-Secret" not in stdout
|
|
|
|
assert "sesami" not in stdout
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.sphinx(
|
|
|
|
'linkcheck', testroot='linkcheck-localserver', freshenv=True,
|
|
|
|
confoverrides={'linkcheck_request_headers': {
|
|
|
|
"http://localhost:7777": {"Accept": "application/json"},
|
2023-02-17 16:11:14 -06:00
|
|
|
"*": {"X-Secret": "open sesami"},
|
2020-11-11 09:26:22 -06:00
|
|
|
}})
|
2022-09-10 11:26:41 -05:00
|
|
|
def test_linkcheck_request_headers_no_slash(app):
|
|
|
|
records = []
|
|
|
|
with http_server(capture_headers_handler(records)):
|
2021-01-27 11:52:00 -06:00
|
|
|
app.build()
|
2020-05-31 11:37:15 -05:00
|
|
|
|
2022-09-10 11:26:41 -05:00
|
|
|
stdout = "\n".join(records)
|
2020-11-11 09:26:22 -06:00
|
|
|
assert "Accept: application/json\n" in stdout
|
|
|
|
assert "X-Secret" not in stdout
|
|
|
|
assert "sesami" not in stdout
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.sphinx(
|
|
|
|
'linkcheck', testroot='linkcheck-localserver', freshenv=True,
|
|
|
|
confoverrides={'linkcheck_request_headers': {
|
|
|
|
"http://do.not.match.org": {"Accept": "application/json"},
|
2023-02-17 16:11:14 -06:00
|
|
|
"*": {"X-Secret": "open sesami"},
|
2020-11-11 09:26:22 -06:00
|
|
|
}})
|
2022-09-10 11:26:41 -05:00
|
|
|
def test_linkcheck_request_headers_default(app):
|
|
|
|
records = []
|
|
|
|
with http_server(capture_headers_handler(records)):
|
2021-01-27 11:52:00 -06:00
|
|
|
app.build()
|
2020-11-11 09:26:22 -06:00
|
|
|
|
2022-09-10 11:26:41 -05:00
|
|
|
stdout = "\n".join(records)
|
2020-11-11 09:26:22 -06:00
|
|
|
assert "Accepts: application/json\n" not in stdout
|
|
|
|
assert "X-Secret: open sesami\n" in stdout
|
2020-11-11 04:17:29 -06:00
|
|
|
|
2020-11-15 02:03:26 -06:00
|
|
|
|
2020-11-11 04:17:29 -06:00
|
|
|
def make_redirect_handler(*, support_head):
|
|
|
|
class RedirectOnceHandler(http.server.BaseHTTPRequestHandler):
|
|
|
|
def do_HEAD(self):
|
|
|
|
if support_head:
|
|
|
|
self.do_GET()
|
|
|
|
else:
|
|
|
|
self.send_response(405, "Method Not Allowed")
|
|
|
|
self.end_headers()
|
|
|
|
|
|
|
|
def do_GET(self):
|
|
|
|
if self.path == "/?redirected=1":
|
|
|
|
self.send_response(204, "No content")
|
|
|
|
else:
|
|
|
|
self.send_response(302, "Found")
|
|
|
|
self.send_header("Location", "http://localhost:7777/?redirected=1")
|
|
|
|
self.end_headers()
|
|
|
|
|
|
|
|
def log_date_time_string(self):
|
|
|
|
"""Strip date and time from logged messages for assertions."""
|
|
|
|
return ""
|
|
|
|
|
|
|
|
return RedirectOnceHandler
|
|
|
|
|
2020-11-15 02:03:26 -06:00
|
|
|
|
2020-11-11 04:17:29 -06:00
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True)
|
2021-04-29 06:20:01 -05:00
|
|
|
def test_follows_redirects_on_HEAD(app, capsys, warning):
|
2020-11-11 04:17:29 -06:00
|
|
|
with http_server(make_redirect_handler(support_head=True)):
|
2021-01-27 11:52:00 -06:00
|
|
|
app.build()
|
2020-11-11 04:17:29 -06:00
|
|
|
stdout, stderr = capsys.readouterr()
|
2022-04-26 21:04:19 -05:00
|
|
|
content = (app.outdir / 'output.txt').read_text(encoding='utf8')
|
2020-11-11 04:17:29 -06:00
|
|
|
assert content == (
|
|
|
|
"index.rst:1: [redirected with Found] "
|
|
|
|
"http://localhost:7777/ to http://localhost:7777/?redirected=1\n"
|
|
|
|
)
|
|
|
|
assert stderr == textwrap.dedent(
|
|
|
|
"""\
|
|
|
|
127.0.0.1 - - [] "HEAD / HTTP/1.1" 302 -
|
|
|
|
127.0.0.1 - - [] "HEAD /?redirected=1 HTTP/1.1" 204 -
|
2023-02-17 16:11:14 -06:00
|
|
|
""",
|
2020-11-11 04:17:29 -06:00
|
|
|
)
|
2021-04-29 06:20:01 -05:00
|
|
|
assert warning.getvalue() == ''
|
2020-11-11 04:17:29 -06:00
|
|
|
|
2020-11-15 02:03:26 -06:00
|
|
|
|
2020-11-11 04:17:29 -06:00
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True)
|
2021-04-29 06:20:01 -05:00
|
|
|
def test_follows_redirects_on_GET(app, capsys, warning):
|
2020-11-11 04:17:29 -06:00
|
|
|
with http_server(make_redirect_handler(support_head=False)):
|
2021-01-27 11:52:00 -06:00
|
|
|
app.build()
|
2020-11-11 04:17:29 -06:00
|
|
|
stdout, stderr = capsys.readouterr()
|
2022-04-26 21:04:19 -05:00
|
|
|
content = (app.outdir / 'output.txt').read_text(encoding='utf8')
|
2020-11-11 04:17:29 -06:00
|
|
|
assert content == (
|
|
|
|
"index.rst:1: [redirected with Found] "
|
|
|
|
"http://localhost:7777/ to http://localhost:7777/?redirected=1\n"
|
|
|
|
)
|
|
|
|
assert stderr == textwrap.dedent(
|
|
|
|
"""\
|
|
|
|
127.0.0.1 - - [] "HEAD / HTTP/1.1" 405 -
|
|
|
|
127.0.0.1 - - [] "GET / HTTP/1.1" 302 -
|
|
|
|
127.0.0.1 - - [] "GET /?redirected=1 HTTP/1.1" 204 -
|
2023-02-17 16:11:14 -06:00
|
|
|
""",
|
2020-11-11 04:17:29 -06:00
|
|
|
)
|
2021-04-29 06:20:01 -05:00
|
|
|
assert warning.getvalue() == ''
|
|
|
|
|
|
|
|
|
2021-05-20 12:04:01 -05:00
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-warn-redirects',
|
|
|
|
freshenv=True, confoverrides={
|
2023-02-17 16:11:14 -06:00
|
|
|
'linkcheck_allowed_redirects': {'http://localhost:7777/.*1': '.*'},
|
2021-04-29 09:37:38 -05:00
|
|
|
})
|
2021-05-20 12:04:01 -05:00
|
|
|
def test_linkcheck_allowed_redirects(app, warning):
|
2021-04-29 09:37:38 -05:00
|
|
|
with http_server(make_redirect_handler(support_head=False)):
|
|
|
|
app.build()
|
|
|
|
|
2022-04-17 20:32:06 -05:00
|
|
|
with open(app.outdir / 'output.json', encoding='utf-8') as fp:
|
2022-09-10 11:26:41 -05:00
|
|
|
rows = [json.loads(l) for l in fp.readlines()]
|
|
|
|
|
|
|
|
assert len(rows) == 2
|
|
|
|
records = {row["uri"]: row for row in rows}
|
|
|
|
assert records["http://localhost:7777/path1"]["status"] == "working"
|
|
|
|
assert records["http://localhost:7777/path2"] == {
|
|
|
|
'filename': 'index.rst',
|
|
|
|
'lineno': 3,
|
|
|
|
'status': 'redirected',
|
|
|
|
'code': 302,
|
|
|
|
'uri': 'http://localhost:7777/path2',
|
|
|
|
'info': 'http://localhost:7777/?redirected=1',
|
|
|
|
}
|
2021-05-20 12:04:01 -05:00
|
|
|
|
2022-09-10 11:26:41 -05:00
|
|
|
assert ("index.rst:3: WARNING: redirect http://localhost:7777/path2 - with Found to "
|
2021-05-20 12:04:01 -05:00
|
|
|
"http://localhost:7777/?redirected=1\n" in strip_escseq(warning.getvalue()))
|
|
|
|
assert len(warning.getvalue().splitlines()) == 1
|
2021-04-29 09:37:38 -05:00
|
|
|
|
|
|
|
|
2020-11-11 11:31:02 -06:00
|
|
|
class OKHandler(http.server.BaseHTTPRequestHandler):
|
|
|
|
def do_HEAD(self):
|
|
|
|
self.send_response(200, "OK")
|
|
|
|
self.end_headers()
|
|
|
|
|
|
|
|
def do_GET(self):
|
|
|
|
self.do_HEAD()
|
|
|
|
self.wfile.write(b"ok\n")
|
|
|
|
|
2020-11-15 02:03:26 -06:00
|
|
|
|
2020-11-09 15:26:02 -06:00
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True)
|
2020-11-14 05:49:47 -06:00
|
|
|
def test_invalid_ssl(app):
|
2020-11-09 15:26:02 -06:00
|
|
|
# Link indicates SSL should be used (https) but the server does not handle it.
|
|
|
|
with http_server(OKHandler):
|
2023-07-20 15:38:21 -05:00
|
|
|
with mock.patch("sphinx.builders.linkcheck.requests.get", wraps=requests.get) as get_request:
|
|
|
|
app.build()
|
|
|
|
assert not get_request.called
|
2020-11-09 15:26:02 -06:00
|
|
|
|
2022-04-17 20:32:06 -05:00
|
|
|
with open(app.outdir / 'output.json', encoding='utf-8') as fp:
|
2020-11-09 15:26:02 -06:00
|
|
|
content = json.load(fp)
|
|
|
|
assert content["status"] == "broken"
|
|
|
|
assert content["filename"] == "index.rst"
|
|
|
|
assert content["lineno"] == 1
|
|
|
|
assert content["uri"] == "https://localhost:7777/"
|
|
|
|
assert "SSLError" in content["info"]
|
2020-11-11 11:31:02 -06:00
|
|
|
|
2020-11-15 02:03:26 -06:00
|
|
|
|
2020-11-11 11:31:02 -06:00
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True)
|
|
|
|
def test_connect_to_selfsigned_fails(app):
|
|
|
|
with https_server(OKHandler):
|
2021-01-27 11:52:00 -06:00
|
|
|
app.build()
|
2020-11-11 11:31:02 -06:00
|
|
|
|
2022-04-17 20:32:06 -05:00
|
|
|
with open(app.outdir / 'output.json', encoding='utf-8') as fp:
|
2020-11-11 11:31:02 -06:00
|
|
|
content = json.load(fp)
|
|
|
|
assert content["status"] == "broken"
|
|
|
|
assert content["filename"] == "index.rst"
|
|
|
|
assert content["lineno"] == 1
|
|
|
|
assert content["uri"] == "https://localhost:7777/"
|
|
|
|
assert "[SSL: CERTIFICATE_VERIFY_FAILED]" in content["info"]
|
|
|
|
|
2020-11-15 02:03:26 -06:00
|
|
|
|
2020-11-11 11:31:02 -06:00
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True)
|
|
|
|
def test_connect_to_selfsigned_with_tls_verify_false(app):
|
|
|
|
app.config.tls_verify = False
|
|
|
|
with https_server(OKHandler):
|
2021-01-27 11:52:00 -06:00
|
|
|
app.build()
|
2020-11-11 11:31:02 -06:00
|
|
|
|
2022-04-17 20:32:06 -05:00
|
|
|
with open(app.outdir / 'output.json', encoding='utf-8') as fp:
|
2020-11-11 11:31:02 -06:00
|
|
|
content = json.load(fp)
|
|
|
|
assert content == {
|
|
|
|
"code": 0,
|
|
|
|
"status": "working",
|
|
|
|
"filename": "index.rst",
|
|
|
|
"lineno": 1,
|
|
|
|
"uri": "https://localhost:7777/",
|
|
|
|
"info": "",
|
|
|
|
}
|
|
|
|
|
2020-11-15 02:03:26 -06:00
|
|
|
|
2020-11-11 11:31:02 -06:00
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True)
|
|
|
|
def test_connect_to_selfsigned_with_tls_cacerts(app):
|
|
|
|
app.config.tls_cacerts = CERT_FILE
|
|
|
|
with https_server(OKHandler):
|
2021-01-27 11:52:00 -06:00
|
|
|
app.build()
|
2020-11-11 11:31:02 -06:00
|
|
|
|
2022-04-17 20:32:06 -05:00
|
|
|
with open(app.outdir / 'output.json', encoding='utf-8') as fp:
|
2020-11-11 11:31:02 -06:00
|
|
|
content = json.load(fp)
|
|
|
|
assert content == {
|
|
|
|
"code": 0,
|
|
|
|
"status": "working",
|
|
|
|
"filename": "index.rst",
|
|
|
|
"lineno": 1,
|
|
|
|
"uri": "https://localhost:7777/",
|
|
|
|
"info": "",
|
|
|
|
}
|
|
|
|
|
2020-11-15 02:03:26 -06:00
|
|
|
|
2020-11-11 11:31:02 -06:00
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True)
|
2020-11-27 16:10:36 -06:00
|
|
|
def test_connect_to_selfsigned_with_requests_env_var(monkeypatch, app):
|
|
|
|
monkeypatch.setenv("REQUESTS_CA_BUNDLE", CERT_FILE)
|
|
|
|
with https_server(OKHandler):
|
2021-01-27 11:52:00 -06:00
|
|
|
app.build()
|
2020-11-11 11:31:02 -06:00
|
|
|
|
2022-04-17 20:32:06 -05:00
|
|
|
with open(app.outdir / 'output.json', encoding='utf-8') as fp:
|
2020-11-11 11:31:02 -06:00
|
|
|
content = json.load(fp)
|
|
|
|
assert content == {
|
|
|
|
"code": 0,
|
|
|
|
"status": "working",
|
|
|
|
"filename": "index.rst",
|
|
|
|
"lineno": 1,
|
|
|
|
"uri": "https://localhost:7777/",
|
|
|
|
"info": "",
|
|
|
|
}
|
2020-11-14 12:56:15 -06:00
|
|
|
|
2020-11-15 02:03:26 -06:00
|
|
|
|
2020-11-14 12:56:15 -06:00
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True)
|
|
|
|
def test_connect_to_selfsigned_nonexistent_cert_file(app):
|
|
|
|
app.config.tls_cacerts = "does/not/exist"
|
|
|
|
with https_server(OKHandler):
|
2021-01-27 11:52:00 -06:00
|
|
|
app.build()
|
2020-11-14 12:56:15 -06:00
|
|
|
|
2022-04-17 20:32:06 -05:00
|
|
|
with open(app.outdir / 'output.json', encoding='utf-8') as fp:
|
2020-11-14 12:56:15 -06:00
|
|
|
content = json.load(fp)
|
|
|
|
assert content == {
|
|
|
|
"code": 0,
|
|
|
|
"status": "broken",
|
|
|
|
"filename": "index.rst",
|
|
|
|
"lineno": 1,
|
|
|
|
"uri": "https://localhost:7777/",
|
|
|
|
"info": "Could not find a suitable TLS CA certificate bundle, invalid path: does/not/exist",
|
|
|
|
}
|
2020-11-22 10:52:44 -06:00
|
|
|
|
|
|
|
|
2022-09-10 11:26:41 -05:00
|
|
|
class InfiniteRedirectOnHeadHandler(http.server.BaseHTTPRequestHandler):
|
|
|
|
def do_HEAD(self):
|
|
|
|
self.send_response(302, "Found")
|
|
|
|
self.send_header("Location", "http://localhost:7777/")
|
|
|
|
self.end_headers()
|
|
|
|
|
|
|
|
def do_GET(self):
|
|
|
|
self.send_response(200, "OK")
|
|
|
|
self.end_headers()
|
|
|
|
self.wfile.write(b"ok\n")
|
|
|
|
|
|
|
|
|
2020-11-22 10:52:44 -06:00
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True)
|
2022-09-10 11:26:41 -05:00
|
|
|
def test_TooManyRedirects_on_HEAD(app, monkeypatch):
|
|
|
|
import requests.sessions
|
2020-11-22 10:52:44 -06:00
|
|
|
|
2022-09-10 11:26:41 -05:00
|
|
|
monkeypatch.setattr(requests.sessions, "DEFAULT_REDIRECT_LIMIT", 5)
|
2020-11-22 10:52:44 -06:00
|
|
|
|
|
|
|
with http_server(InfiniteRedirectOnHeadHandler):
|
2021-01-27 11:52:00 -06:00
|
|
|
app.build()
|
2020-11-22 10:52:44 -06:00
|
|
|
|
2022-04-17 20:32:06 -05:00
|
|
|
with open(app.outdir / 'output.json', encoding='utf-8') as fp:
|
2020-11-22 10:52:44 -06:00
|
|
|
content = json.load(fp)
|
|
|
|
assert content == {
|
|
|
|
"code": 0,
|
|
|
|
"status": "working",
|
|
|
|
"filename": "index.rst",
|
|
|
|
"lineno": 1,
|
|
|
|
"uri": "http://localhost:7777/",
|
|
|
|
"info": "",
|
|
|
|
}
|
2020-10-04 14:40:14 -05:00
|
|
|
|
|
|
|
|
|
|
|
def make_retry_after_handler(responses):
|
|
|
|
class RetryAfterHandler(http.server.BaseHTTPRequestHandler):
|
|
|
|
def do_HEAD(self):
|
|
|
|
status, retry_after = responses.pop(0)
|
|
|
|
self.send_response(status)
|
|
|
|
if retry_after:
|
|
|
|
self.send_header('Retry-After', retry_after)
|
|
|
|
self.end_headers()
|
|
|
|
|
|
|
|
def log_date_time_string(self):
|
|
|
|
"""Strip date and time from logged messages for assertions."""
|
|
|
|
return ""
|
|
|
|
|
|
|
|
return RetryAfterHandler
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True)
|
|
|
|
def test_too_many_requests_retry_after_int_delay(app, capsys, status):
|
|
|
|
with http_server(make_retry_after_handler([(429, "0"), (200, None)])), \
|
|
|
|
mock.patch("sphinx.builders.linkcheck.DEFAULT_DELAY", 0), \
|
|
|
|
mock.patch("sphinx.builders.linkcheck.QUEUE_POLL_SECS", 0.01):
|
2021-01-27 11:52:00 -06:00
|
|
|
app.build()
|
2022-04-26 21:04:19 -05:00
|
|
|
content = (app.outdir / 'output.json').read_text(encoding='utf8')
|
2020-10-04 14:40:14 -05:00
|
|
|
assert json.loads(content) == {
|
|
|
|
"filename": "index.rst",
|
|
|
|
"lineno": 1,
|
|
|
|
"status": "working",
|
|
|
|
"code": 0,
|
|
|
|
"uri": "http://localhost:7777/",
|
|
|
|
"info": "",
|
|
|
|
}
|
|
|
|
rate_limit_log = "-rate limited- http://localhost:7777/ | sleeping...\n"
|
|
|
|
assert rate_limit_log in strip_colors(status.getvalue())
|
|
|
|
_stdout, stderr = capsys.readouterr()
|
|
|
|
assert stderr == textwrap.dedent(
|
|
|
|
"""\
|
|
|
|
127.0.0.1 - - [] "HEAD / HTTP/1.1" 429 -
|
|
|
|
127.0.0.1 - - [] "HEAD / HTTP/1.1" 200 -
|
2023-02-17 16:11:14 -06:00
|
|
|
""",
|
2020-10-04 14:40:14 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True)
|
|
|
|
def test_too_many_requests_retry_after_HTTP_date(app, capsys):
|
|
|
|
now = datetime.now().timetuple()
|
|
|
|
retry_after = wsgiref.handlers.format_date_time(time.mktime(now))
|
|
|
|
with http_server(make_retry_after_handler([(429, retry_after), (200, None)])):
|
2021-01-27 11:52:00 -06:00
|
|
|
app.build()
|
2022-04-26 21:04:19 -05:00
|
|
|
content = (app.outdir / 'output.json').read_text(encoding='utf8')
|
2020-10-04 14:40:14 -05:00
|
|
|
assert json.loads(content) == {
|
|
|
|
"filename": "index.rst",
|
|
|
|
"lineno": 1,
|
|
|
|
"status": "working",
|
|
|
|
"code": 0,
|
|
|
|
"uri": "http://localhost:7777/",
|
|
|
|
"info": "",
|
|
|
|
}
|
|
|
|
_stdout, stderr = capsys.readouterr()
|
|
|
|
assert stderr == textwrap.dedent(
|
|
|
|
"""\
|
|
|
|
127.0.0.1 - - [] "HEAD / HTTP/1.1" 429 -
|
|
|
|
127.0.0.1 - - [] "HEAD / HTTP/1.1" 200 -
|
2023-02-17 16:11:14 -06:00
|
|
|
""",
|
2020-10-04 14:40:14 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True)
|
|
|
|
def test_too_many_requests_retry_after_without_header(app, capsys):
|
|
|
|
with http_server(make_retry_after_handler([(429, None), (200, None)])),\
|
|
|
|
mock.patch("sphinx.builders.linkcheck.DEFAULT_DELAY", 0):
|
2021-01-27 11:52:00 -06:00
|
|
|
app.build()
|
2022-04-26 21:04:19 -05:00
|
|
|
content = (app.outdir / 'output.json').read_text(encoding='utf8')
|
2020-10-04 14:40:14 -05:00
|
|
|
assert json.loads(content) == {
|
|
|
|
"filename": "index.rst",
|
|
|
|
"lineno": 1,
|
|
|
|
"status": "working",
|
|
|
|
"code": 0,
|
|
|
|
"uri": "http://localhost:7777/",
|
|
|
|
"info": "",
|
|
|
|
}
|
|
|
|
_stdout, stderr = capsys.readouterr()
|
|
|
|
assert stderr == textwrap.dedent(
|
|
|
|
"""\
|
|
|
|
127.0.0.1 - - [] "HEAD / HTTP/1.1" 429 -
|
|
|
|
127.0.0.1 - - [] "HEAD / HTTP/1.1" 200 -
|
2023-02-17 16:11:14 -06:00
|
|
|
""",
|
2020-10-04 14:40:14 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True)
|
2022-09-10 11:26:41 -05:00
|
|
|
def test_too_many_requests_user_timeout(app):
|
2020-10-04 14:40:14 -05:00
|
|
|
app.config.linkcheck_rate_limit_timeout = 0.0
|
|
|
|
with http_server(make_retry_after_handler([(429, None)])):
|
2021-01-27 11:52:00 -06:00
|
|
|
app.build()
|
2022-04-26 21:04:19 -05:00
|
|
|
content = (app.outdir / 'output.json').read_text(encoding='utf8')
|
2020-10-04 14:40:14 -05:00
|
|
|
assert json.loads(content) == {
|
|
|
|
"filename": "index.rst",
|
|
|
|
"lineno": 1,
|
|
|
|
"status": "broken",
|
|
|
|
"code": 0,
|
|
|
|
"uri": "http://localhost:7777/",
|
|
|
|
"info": "429 Client Error: Too Many Requests for url: http://localhost:7777/",
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
class FakeResponse:
|
2022-12-16 05:41:54 -06:00
|
|
|
headers: dict[str, str] = {}
|
2020-10-04 14:40:14 -05:00
|
|
|
url = "http://localhost/"
|
|
|
|
|
|
|
|
|
|
|
|
def test_limit_rate_default_sleep(app):
|
2021-02-06 12:07:40 -06:00
|
|
|
worker = HyperlinkAvailabilityCheckWorker(app.env, app.config, Queue(), Queue(), {})
|
2020-10-04 14:40:14 -05:00
|
|
|
with mock.patch('time.time', return_value=0.0):
|
2023-07-20 15:14:00 -05:00
|
|
|
next_check = worker.limit_rate(FakeResponse.url, FakeResponse.headers.get("Retry-After"))
|
2020-10-04 14:40:14 -05:00
|
|
|
assert next_check == 60.0
|
|
|
|
|
|
|
|
|
|
|
|
def test_limit_rate_user_max_delay(app):
|
|
|
|
app.config.linkcheck_rate_limit_timeout = 0.0
|
2021-02-06 12:07:40 -06:00
|
|
|
worker = HyperlinkAvailabilityCheckWorker(app.env, app.config, Queue(), Queue(), {})
|
2023-07-20 15:14:00 -05:00
|
|
|
next_check = worker.limit_rate(FakeResponse.url, FakeResponse.headers.get("Retry-After"))
|
2020-10-04 14:40:14 -05:00
|
|
|
assert next_check is None
|
|
|
|
|
|
|
|
|
|
|
|
def test_limit_rate_doubles_previous_wait_time(app):
|
2021-02-06 12:07:40 -06:00
|
|
|
rate_limits = {"localhost": RateLimit(60.0, 0.0)}
|
|
|
|
worker = HyperlinkAvailabilityCheckWorker(app.env, app.config, Queue(), Queue(),
|
|
|
|
rate_limits)
|
2020-10-04 14:40:14 -05:00
|
|
|
with mock.patch('time.time', return_value=0.0):
|
2023-07-20 15:14:00 -05:00
|
|
|
next_check = worker.limit_rate(FakeResponse.url, FakeResponse.headers.get("Retry-After"))
|
2020-10-04 14:40:14 -05:00
|
|
|
assert next_check == 120.0
|
|
|
|
|
|
|
|
|
|
|
|
def test_limit_rate_clips_wait_time_to_max_time(app):
|
|
|
|
app.config.linkcheck_rate_limit_timeout = 90.0
|
2021-02-06 12:07:40 -06:00
|
|
|
rate_limits = {"localhost": RateLimit(60.0, 0.0)}
|
|
|
|
worker = HyperlinkAvailabilityCheckWorker(app.env, app.config, Queue(), Queue(),
|
|
|
|
rate_limits)
|
2020-10-04 14:40:14 -05:00
|
|
|
with mock.patch('time.time', return_value=0.0):
|
2023-07-20 15:14:00 -05:00
|
|
|
next_check = worker.limit_rate(FakeResponse.url, FakeResponse.headers.get("Retry-After"))
|
2020-10-04 14:40:14 -05:00
|
|
|
assert next_check == 90.0
|
|
|
|
|
|
|
|
|
|
|
|
def test_limit_rate_bails_out_after_waiting_max_time(app):
|
|
|
|
app.config.linkcheck_rate_limit_timeout = 90.0
|
2021-02-06 12:07:40 -06:00
|
|
|
rate_limits = {"localhost": RateLimit(90.0, 0.0)}
|
|
|
|
worker = HyperlinkAvailabilityCheckWorker(app.env, app.config, Queue(), Queue(),
|
|
|
|
rate_limits)
|
2023-07-20 15:14:00 -05:00
|
|
|
next_check = worker.limit_rate(FakeResponse.url, FakeResponse.headers.get("Retry-After"))
|
2020-10-04 14:40:14 -05:00
|
|
|
assert next_check is None
|
2021-06-07 12:57:15 -05:00
|
|
|
|
|
|
|
|
2021-06-09 22:40:40 -05:00
|
|
|
class ConnectionResetHandler(http.server.BaseHTTPRequestHandler):
|
|
|
|
def do_HEAD(self):
|
2021-06-10 10:42:36 -05:00
|
|
|
self.connection.close()
|
2021-06-09 22:40:40 -05:00
|
|
|
|
|
|
|
def do_GET(self):
|
|
|
|
self.send_response(200, "OK")
|
|
|
|
self.end_headers()
|
|
|
|
|
|
|
|
|
2021-06-10 10:41:47 -05:00
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True)
|
2021-06-07 12:57:15 -05:00
|
|
|
def test_get_after_head_raises_connection_error(app):
|
2021-06-10 10:41:11 -05:00
|
|
|
with http_server(ConnectionResetHandler):
|
2021-06-09 22:40:40 -05:00
|
|
|
app.build()
|
2022-04-26 21:04:19 -05:00
|
|
|
content = (app.outdir / 'output.txt').read_text(encoding='utf8')
|
2021-06-07 15:50:42 -05:00
|
|
|
assert not content
|
2022-04-26 21:04:19 -05:00
|
|
|
content = (app.outdir / 'output.json').read_text(encoding='utf8')
|
2021-06-10 10:43:14 -05:00
|
|
|
assert json.loads(content) == {
|
|
|
|
"filename": "index.rst",
|
|
|
|
"lineno": 1,
|
|
|
|
"status": "working",
|
|
|
|
"code": 0,
|
|
|
|
"uri": "http://localhost:7777/",
|
|
|
|
"info": "",
|
|
|
|
}
|
2021-11-26 11:14:29 -06:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-documents_exclude', freshenv=True)
|
|
|
|
def test_linkcheck_exclude_documents(app):
|
2023-04-06 16:24:49 -05:00
|
|
|
with http_server(DefaultsHandler):
|
|
|
|
app.build()
|
2021-11-26 11:14:29 -06:00
|
|
|
|
2022-04-17 20:32:06 -05:00
|
|
|
with open(app.outdir / 'output.json', encoding='utf-8') as fp:
|
2021-11-26 11:14:29 -06:00
|
|
|
content = [json.loads(record) for record in fp]
|
|
|
|
|
|
|
|
assert content == [
|
|
|
|
{
|
|
|
|
'filename': 'broken_link.rst',
|
|
|
|
'lineno': 4,
|
|
|
|
'status': 'ignored',
|
|
|
|
'code': 0,
|
|
|
|
'uri': 'https://www.sphinx-doc.org/this-is-a-broken-link',
|
|
|
|
'info': 'broken_link matched ^broken_link$ from linkcheck_exclude_documents',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'filename': 'br0ken_link.rst',
|
|
|
|
'lineno': 4,
|
|
|
|
'status': 'ignored',
|
|
|
|
'code': 0,
|
|
|
|
'uri': 'https://www.sphinx-doc.org/this-is-another-broken-link',
|
|
|
|
'info': 'br0ken_link matched br[0-9]ken_link from linkcheck_exclude_documents',
|
|
|
|
},
|
|
|
|
]
|