mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Fix #3349: Result of `IndexBuilder.load()
` is broken
This commit is contained in:
parent
b8d8519c88
commit
1f973caf06
1
CHANGES
1
CHANGES
@ -30,6 +30,7 @@ Bugs fixed
|
||||
* #3410: return code in :confval:`release` breaks html search
|
||||
* #3427: autodoc: memory addresses are not stripped on Windows
|
||||
* #3428: xetex build tests fail due to fontspec v2.6 defining ``\strong``
|
||||
* #3349: Result of ``IndexBuilder.load()`` is broken
|
||||
|
||||
|
||||
Testing
|
||||
|
@ -270,7 +270,7 @@ class IndexBuilder(object):
|
||||
if not isinstance(frozen, dict) or \
|
||||
frozen.get('envversion') != self.env.version:
|
||||
raise ValueError('old format')
|
||||
index2fn = frozen['filenames']
|
||||
index2fn = frozen['docnames']
|
||||
self._titles = dict(zip(index2fn, frozen['titles']))
|
||||
|
||||
def load_terms(mapping):
|
||||
|
@ -9,6 +9,9 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
from six import BytesIO
|
||||
from docutils import frontend, utils
|
||||
from docutils.parsers import rst
|
||||
|
||||
@ -16,6 +19,17 @@ from sphinx.search import IndexBuilder
|
||||
from sphinx.util import jsdump
|
||||
import pytest
|
||||
|
||||
DummyEnvironment = namedtuple('DummyEnvironment', ['version', 'domains'])
|
||||
|
||||
|
||||
class DummyDomain(object):
|
||||
def __init__(self, data):
|
||||
self.data = data
|
||||
self.object_types = {}
|
||||
|
||||
def get_objects(self):
|
||||
return self.data
|
||||
|
||||
|
||||
settings = parser = None
|
||||
|
||||
@ -39,23 +53,15 @@ def is_registered_term(index, keyword):
|
||||
|
||||
|
||||
FILE_CONTENTS = '''\
|
||||
section_title
|
||||
=============
|
||||
|
||||
.. test that comments are not indexed: boson
|
||||
|
||||
test that non-comments are indexed: fermion
|
||||
'''
|
||||
|
||||
|
||||
def test_wordcollector():
|
||||
doc = utils.new_document(b'test data', settings)
|
||||
doc['file'] = 'dummy'
|
||||
parser.parse(FILE_CONTENTS, doc)
|
||||
|
||||
ix = IndexBuilder(None, 'en', {}, None)
|
||||
ix.feed('docname', 'filename', 'title', doc)
|
||||
assert 'boson' not in ix._mapping
|
||||
assert 'fermion' in ix._mapping
|
||||
|
||||
|
||||
@pytest.mark.sphinx(testroot='ext-viewcode')
|
||||
def test_objects_are_escaped(app, status, warning):
|
||||
app.builder.build_all()
|
||||
@ -123,3 +129,78 @@ def test_term_in_raw_directive(app, status, warning):
|
||||
assert not is_registered_term(searchindex, 'raw')
|
||||
assert is_registered_term(searchindex, 'rawword')
|
||||
assert not is_registered_term(searchindex, 'latex_keyword')
|
||||
|
||||
|
||||
def test_IndexBuilder():
|
||||
domain = DummyDomain([('objname', 'objdispname', 'objtype', 'docname', '#anchor', 1),
|
||||
('objname2', 'objdispname2', 'objtype2', 'docname2', '', -1)])
|
||||
env = DummyEnvironment('1.0', {'dummy': domain})
|
||||
doc = utils.new_document(b'test data', settings)
|
||||
doc['file'] = 'dummy'
|
||||
parser.parse(FILE_CONTENTS, doc)
|
||||
|
||||
# feed
|
||||
index = IndexBuilder(env, 'en', {}, None)
|
||||
index.feed('docname', 'filename', 'title', doc)
|
||||
index.feed('docname2', 'filename2', 'title2', doc)
|
||||
assert index._titles == {'docname': 'title', 'docname2': 'title2'}
|
||||
assert index._filenames == {'docname': 'filename', 'docname2': 'filename2'}
|
||||
assert index._mapping == {
|
||||
'fermion': {'docname', 'docname2'},
|
||||
'comment': {'docname', 'docname2'},
|
||||
'non': {'docname', 'docname2'},
|
||||
'index': {'docname', 'docname2'},
|
||||
'test': {'docname', 'docname2'}
|
||||
}
|
||||
assert index._title_mapping == {'section_titl': {'docname', 'docname2'}}
|
||||
assert index._objtypes == {}
|
||||
assert index._objnames == {}
|
||||
|
||||
# freeze
|
||||
assert index.freeze() == {
|
||||
'docnames': ('docname', 'docname2'),
|
||||
'envversion': '1.0',
|
||||
'filenames': ['filename', 'filename2'],
|
||||
'objects': {'': {'objname': (0, 0, 1, '#anchor')}},
|
||||
'objnames': {0: ('dummy', 'objtype', 'objtype')},
|
||||
'objtypes': {0: 'dummy:objtype'},
|
||||
'terms': {'comment': [0, 1],
|
||||
'fermion': [0, 1],
|
||||
'index': [0, 1],
|
||||
'non': [0, 1],
|
||||
'test': [0, 1]},
|
||||
'titles': ('title', 'title2'),
|
||||
'titleterms': {'section_titl': [0, 1]}
|
||||
}
|
||||
assert index._objtypes == {('dummy', 'objtype'): 0}
|
||||
assert index._objnames == {0: ('dummy', 'objtype', 'objtype')}
|
||||
|
||||
# dump / load
|
||||
stream = BytesIO()
|
||||
index.dump(stream, 'pickle')
|
||||
stream.seek(0)
|
||||
|
||||
index2 = IndexBuilder(env, 'en', {}, None)
|
||||
index2.load(stream, 'pickle')
|
||||
|
||||
assert index2._titles == index._titles
|
||||
assert index2._filenames == {}
|
||||
assert index2._mapping == index._mapping
|
||||
assert index2._title_mapping == index._title_mapping
|
||||
assert index2._objtypes == {}
|
||||
assert index2._objnames == {}
|
||||
|
||||
# prune
|
||||
index.prune(['docname2'])
|
||||
assert index._titles == {'docname2': 'title2'}
|
||||
assert index._filenames == {'docname': 'filename', 'docname2': 'filename2'}
|
||||
assert index._mapping == {
|
||||
'fermion': {'docname2'},
|
||||
'comment': {'docname2'},
|
||||
'non': {'docname2'},
|
||||
'index': {'docname2'},
|
||||
'test': {'docname2'}
|
||||
}
|
||||
assert index._title_mapping == {'section_titl': {'docname2'}}
|
||||
assert index._objtypes == {('dummy', 'objtype'): 0}
|
||||
assert index._objnames == {0: ('dummy', 'objtype', 'objtype')}
|
||||
|
Loading…
Reference in New Issue
Block a user