mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
html: refactor with progress_message()
This commit is contained in:
parent
afefeebb52
commit
231bde7461
@ -38,8 +38,7 @@ from sphinx.highlighting import PygmentsBridge
|
|||||||
from sphinx.locale import _, __
|
from sphinx.locale import _, __
|
||||||
from sphinx.search import js_index
|
from sphinx.search import js_index
|
||||||
from sphinx.theming import HTMLThemeFactory
|
from sphinx.theming import HTMLThemeFactory
|
||||||
from sphinx.util import logging, status_iterator
|
from sphinx.util import logging, progress_message, status_iterator
|
||||||
from sphinx.util.console import bold # type: ignore
|
|
||||||
from sphinx.util.docutils import is_html5_writer_available, new_document
|
from sphinx.util.docutils import is_html5_writer_available, new_document
|
||||||
from sphinx.util.fileutil import copy_asset
|
from sphinx.util.fileutil import copy_asset
|
||||||
from sphinx.util.i18n import format_date
|
from sphinx.util.i18n import format_date
|
||||||
@ -626,6 +625,7 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
|
|
||||||
def finish(self) -> None:
|
def finish(self) -> None:
|
||||||
self.finish_tasks.add_task(self.gen_indices)
|
self.finish_tasks.add_task(self.gen_indices)
|
||||||
|
self.finish_tasks.add_task(self.gen_pages_from_extensions)
|
||||||
self.finish_tasks.add_task(self.gen_additional_pages)
|
self.finish_tasks.add_task(self.gen_additional_pages)
|
||||||
self.finish_tasks.add_task(self.copy_image_files)
|
self.finish_tasks.add_task(self.copy_image_files)
|
||||||
self.finish_tasks.add_task(self.copy_download_files)
|
self.finish_tasks.add_task(self.copy_download_files)
|
||||||
@ -636,9 +636,8 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
# dump the search index
|
# dump the search index
|
||||||
self.handle_finish()
|
self.handle_finish()
|
||||||
|
|
||||||
|
@progress_message(__('generating indices'))
|
||||||
def gen_indices(self) -> None:
|
def gen_indices(self) -> None:
|
||||||
logger.info(bold(__('generating indices...')), nonl=True)
|
|
||||||
|
|
||||||
# the global general index
|
# the global general index
|
||||||
if self.use_index:
|
if self.use_index:
|
||||||
self.write_genindex()
|
self.write_genindex()
|
||||||
@ -646,16 +645,14 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
# the global domain-specific indices
|
# the global domain-specific indices
|
||||||
self.write_domain_indices()
|
self.write_domain_indices()
|
||||||
|
|
||||||
logger.info('')
|
def gen_pages_from_extensions(self) -> None:
|
||||||
|
|
||||||
def gen_additional_pages(self) -> None:
|
|
||||||
# pages from extensions
|
# pages from extensions
|
||||||
for pagelist in self.events.emit('html-collect-pages'):
|
for pagelist in self.events.emit('html-collect-pages'):
|
||||||
for pagename, context, template in pagelist:
|
for pagename, context, template in pagelist:
|
||||||
self.handle_page(pagename, context, template)
|
self.handle_page(pagename, context, template)
|
||||||
|
|
||||||
logger.info(bold(__('writing additional pages...')), nonl=True)
|
@progress_message(__('writing additional pages'))
|
||||||
|
def gen_additional_pages(self) -> None:
|
||||||
# additional pages from conf.py
|
# additional pages from conf.py
|
||||||
for pagename, template in self.config.html_additional_pages.items():
|
for pagename, template in self.config.html_additional_pages.items():
|
||||||
logger.info(' ' + pagename, nonl=True)
|
logger.info(' ' + pagename, nonl=True)
|
||||||
@ -672,8 +669,6 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
fn = path.join(self.outdir, '_static', 'opensearch.xml')
|
fn = path.join(self.outdir, '_static', 'opensearch.xml')
|
||||||
self.handle_page('opensearch', {}, 'opensearch.xml', outfilename=fn)
|
self.handle_page('opensearch', {}, 'opensearch.xml', outfilename=fn)
|
||||||
|
|
||||||
logger.info('')
|
|
||||||
|
|
||||||
def write_genindex(self) -> None:
|
def write_genindex(self) -> None:
|
||||||
# the total count of lines for each index letter, used to distribute
|
# the total count of lines for each index letter, used to distribute
|
||||||
# the entries into two columns
|
# the entries into two columns
|
||||||
@ -749,63 +744,60 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
def copy_static_files(self) -> None:
|
def copy_static_files(self) -> None:
|
||||||
try:
|
try:
|
||||||
# copy static files
|
# copy static files
|
||||||
logger.info(bold(__('copying static files... ')), nonl=True)
|
with progress_message(__('copying static files... ')):
|
||||||
ensuredir(path.join(self.outdir, '_static'))
|
ensuredir(path.join(self.outdir, '_static'))
|
||||||
# first, create pygments style file
|
# first, create pygments style file
|
||||||
with open(path.join(self.outdir, '_static', 'pygments.css'), 'w') as f:
|
with open(path.join(self.outdir, '_static', 'pygments.css'), 'w') as f:
|
||||||
f.write(self.highlighter.get_stylesheet())
|
f.write(self.highlighter.get_stylesheet())
|
||||||
# then, copy translations JavaScript file
|
# then, copy translations JavaScript file
|
||||||
if self.config.language is not None:
|
if self.config.language is not None:
|
||||||
jsfile = self._get_translations_js()
|
jsfile = self._get_translations_js()
|
||||||
if jsfile:
|
if jsfile:
|
||||||
copyfile(jsfile, path.join(self.outdir, '_static',
|
copyfile(jsfile, path.join(self.outdir, '_static',
|
||||||
'translations.js'))
|
'translations.js'))
|
||||||
|
|
||||||
# copy non-minified stemmer JavaScript file
|
# copy non-minified stemmer JavaScript file
|
||||||
if self.indexer is not None:
|
if self.indexer is not None:
|
||||||
jsfile = self.indexer.get_js_stemmer_rawcode()
|
jsfile = self.indexer.get_js_stemmer_rawcode()
|
||||||
if jsfile:
|
if jsfile:
|
||||||
copyfile(jsfile, path.join(self.outdir, '_static', '_stemmer.js'))
|
copyfile(jsfile, path.join(self.outdir, '_static', '_stemmer.js'))
|
||||||
|
|
||||||
ctx = self.globalcontext.copy()
|
ctx = self.globalcontext.copy()
|
||||||
|
|
||||||
# add context items for search function used in searchtools.js_t
|
# add context items for search function used in searchtools.js_t
|
||||||
if self.indexer is not None:
|
if self.indexer is not None:
|
||||||
ctx.update(self.indexer.context_for_searchtool())
|
ctx.update(self.indexer.context_for_searchtool())
|
||||||
|
|
||||||
# then, copy over theme-supplied static files
|
# then, copy over theme-supplied static files
|
||||||
if self.theme:
|
if self.theme:
|
||||||
for theme_path in self.theme.get_theme_dirs()[::-1]:
|
for theme_path in self.theme.get_theme_dirs()[::-1]:
|
||||||
entry = path.join(theme_path, 'static')
|
entry = path.join(theme_path, 'static')
|
||||||
copy_asset(entry, path.join(self.outdir, '_static'), excluded=DOTFILES,
|
copy_asset(entry, path.join(self.outdir, '_static'), excluded=DOTFILES,
|
||||||
|
context=ctx, renderer=self.templates)
|
||||||
|
# then, copy over all user-supplied static files
|
||||||
|
excluded = Matcher(self.config.exclude_patterns + ["**/.*"])
|
||||||
|
for static_path in self.config.html_static_path:
|
||||||
|
entry = path.join(self.confdir, static_path)
|
||||||
|
copy_asset(entry, path.join(self.outdir, '_static'), excluded,
|
||||||
context=ctx, renderer=self.templates)
|
context=ctx, renderer=self.templates)
|
||||||
# then, copy over all user-supplied static files
|
# copy logo and favicon files if not already in static path
|
||||||
excluded = Matcher(self.config.exclude_patterns + ["**/.*"])
|
if self.config.html_logo:
|
||||||
for static_path in self.config.html_static_path:
|
entry = path.join(self.confdir, self.config.html_logo)
|
||||||
entry = path.join(self.confdir, static_path)
|
copy_asset(entry, path.join(self.outdir, '_static'))
|
||||||
copy_asset(entry, path.join(self.outdir, '_static'), excluded,
|
if self.config.html_favicon:
|
||||||
context=ctx, renderer=self.templates)
|
entry = path.join(self.confdir, self.config.html_favicon)
|
||||||
# copy logo and favicon files if not already in static path
|
copy_asset(entry, path.join(self.outdir, '_static'))
|
||||||
if self.config.html_logo:
|
|
||||||
entry = path.join(self.confdir, self.config.html_logo)
|
|
||||||
copy_asset(entry, path.join(self.outdir, '_static'))
|
|
||||||
if self.config.html_favicon:
|
|
||||||
entry = path.join(self.confdir, self.config.html_favicon)
|
|
||||||
copy_asset(entry, path.join(self.outdir, '_static'))
|
|
||||||
logger.info(__('done'))
|
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
logger.warning(__('cannot copy static file %r'), err)
|
logger.warning(__('cannot copy static file %r'), err)
|
||||||
|
|
||||||
def copy_extra_files(self) -> None:
|
def copy_extra_files(self) -> None:
|
||||||
|
"""copy html_extra_path files."""
|
||||||
try:
|
try:
|
||||||
# copy html_extra_path files
|
with progress_message(__('copying extra files')):
|
||||||
logger.info(bold(__('copying extra files... ')), nonl=True)
|
excluded = Matcher(self.config.exclude_patterns)
|
||||||
excluded = Matcher(self.config.exclude_patterns)
|
for extra_path in self.config.html_extra_path:
|
||||||
|
entry = path.join(self.confdir, extra_path)
|
||||||
for extra_path in self.config.html_extra_path:
|
copy_asset(entry, self.outdir, excluded)
|
||||||
entry = path.join(self.confdir, extra_path)
|
|
||||||
copy_asset(entry, self.outdir, excluded)
|
|
||||||
logger.info(__('done'))
|
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
logger.warning(__('cannot copy extra file %r'), err)
|
logger.warning(__('cannot copy extra file %r'), err)
|
||||||
|
|
||||||
@ -1049,27 +1041,23 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
self.finish_tasks.add_task(self.dump_search_index)
|
self.finish_tasks.add_task(self.dump_search_index)
|
||||||
self.finish_tasks.add_task(self.dump_inventory)
|
self.finish_tasks.add_task(self.dump_inventory)
|
||||||
|
|
||||||
|
@progress_message(__('dumping object inventory'))
|
||||||
def dump_inventory(self) -> None:
|
def dump_inventory(self) -> None:
|
||||||
logger.info(bold(__('dumping object inventory... ')), nonl=True)
|
|
||||||
InventoryFile.dump(path.join(self.outdir, INVENTORY_FILENAME), self.env, self)
|
InventoryFile.dump(path.join(self.outdir, INVENTORY_FILENAME), self.env, self)
|
||||||
logger.info(__('done'))
|
|
||||||
|
|
||||||
def dump_search_index(self) -> None:
|
def dump_search_index(self) -> None:
|
||||||
logger.info(
|
with progress_message(__('dumping search index in %s') % self.indexer.label()):
|
||||||
bold(__('dumping search index in %s ... ') % self.indexer.label()),
|
self.indexer.prune(self.env.all_docs)
|
||||||
nonl=True)
|
searchindexfn = path.join(self.outdir, self.searchindex_filename)
|
||||||
self.indexer.prune(self.env.all_docs)
|
# first write to a temporary file, so that if dumping fails,
|
||||||
searchindexfn = path.join(self.outdir, self.searchindex_filename)
|
# the existing index won't be overwritten
|
||||||
# first write to a temporary file, so that if dumping fails,
|
if self.indexer_dumps_unicode:
|
||||||
# the existing index won't be overwritten
|
with open(searchindexfn + '.tmp', 'w', encoding='utf-8') as ft:
|
||||||
if self.indexer_dumps_unicode:
|
self.indexer.dump(ft, self.indexer_format)
|
||||||
with open(searchindexfn + '.tmp', 'w', encoding='utf-8') as ft:
|
else:
|
||||||
self.indexer.dump(ft, self.indexer_format)
|
with open(searchindexfn + '.tmp', 'wb') as fb:
|
||||||
else:
|
self.indexer.dump(fb, self.indexer_format)
|
||||||
with open(searchindexfn + '.tmp', 'wb') as fb:
|
movefile(searchindexfn + '.tmp', searchindexfn)
|
||||||
self.indexer.dump(fb, self.indexer_format)
|
|
||||||
movefile(searchindexfn + '.tmp', searchindexfn)
|
|
||||||
logger.info(__('done'))
|
|
||||||
|
|
||||||
|
|
||||||
def convert_html_css_files(app: Sphinx, config: Config) -> None:
|
def convert_html_css_files(app: Sphinx, config: Config) -> None:
|
||||||
|
Loading…
Reference in New Issue
Block a user