mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
merge with trunk
This commit is contained in:
commit
7a40e07eda
@ -2,6 +2,7 @@
|
||||
.*\.egg
|
||||
.*\.so
|
||||
.dir-locals.el
|
||||
^\.tox
|
||||
\.DS_Store$
|
||||
^build/
|
||||
^dist/
|
||||
@ -14,3 +15,5 @@
|
||||
^env/
|
||||
\.DS_Store$
|
||||
~$
|
||||
^utils/.*3\.py$
|
||||
^distribute-
|
||||
|
75
CHANGES
75
CHANGES
@ -1,6 +1,69 @@
|
||||
Release 1.0 (in development)
|
||||
Release 1.1 (in development)
|
||||
============================
|
||||
|
||||
* Added Python 3.x support.
|
||||
|
||||
|
||||
Release 1.0.2 (Aug 14, 2010)
|
||||
============================
|
||||
|
||||
* #490: Fix cross-references to objects of types added by the
|
||||
:func:`~.Sphinx.add_object_type` API function.
|
||||
|
||||
* Fix handling of doc field types for different directive types.
|
||||
|
||||
* Allow breaking long signatures, continuing with backlash-escaped
|
||||
newlines.
|
||||
|
||||
* Fix unwanted styling of C domain references (because of a namespace
|
||||
clash with Pygments styles).
|
||||
|
||||
* Allow references to PEPs and RFCs with explicit anchors.
|
||||
|
||||
* #471: Fix LaTeX references to figures.
|
||||
|
||||
* #482: When doing a non-exact search, match only the given type
|
||||
of object.
|
||||
|
||||
* #481: Apply non-exact search for Python reference targets with
|
||||
``.name`` for modules too.
|
||||
|
||||
* #484: Fix crash when duplicating a parameter in an info field list.
|
||||
|
||||
* #487: Fix setting the default role to one provided by the
|
||||
``oldcmarkup`` extension.
|
||||
|
||||
* #488: Fix crash when json-py is installed, which provides a
|
||||
``json`` module but is incompatible to simplejson.
|
||||
|
||||
* #480: Fix handling of target naming in intersphinx.
|
||||
|
||||
* #486: Fix removal of ``!`` for all cross-reference roles.
|
||||
|
||||
|
||||
Release 1.0.1 (Jul 27, 2010)
|
||||
============================
|
||||
|
||||
* #470: Fix generated target names for reST domain objects; they
|
||||
are not in the same namespace.
|
||||
|
||||
* #266: Add Bengali language.
|
||||
|
||||
* #473: Fix a bug in parsing JavaScript object names.
|
||||
|
||||
* #474: Fix building with SingleHTMLBuilder when there is no toctree.
|
||||
|
||||
* Fix display names for objects linked to by intersphinx with
|
||||
explicit targets.
|
||||
|
||||
* Fix building with the JSON builder.
|
||||
|
||||
* Fix hyperrefs in object descriptions for LaTeX.
|
||||
|
||||
|
||||
Release 1.0 (Jul 23, 2010)
|
||||
==========================
|
||||
|
||||
Incompatible changes
|
||||
--------------------
|
||||
|
||||
@ -15,9 +78,10 @@ Incompatible changes
|
||||
- JavaScript
|
||||
- reStructuredText
|
||||
|
||||
* The old markup for defining and linking to C directives will not work
|
||||
anymore without activating the :mod:`~sphinx.ext.oldcmarkup`
|
||||
extension.
|
||||
* The old markup for defining and linking to C directives is now
|
||||
deprecated. It will not work anymore in future versions without
|
||||
activating the :mod:`~sphinx.ext.oldcmarkup` extension; in Sphinx
|
||||
1.0, it is activated by default.
|
||||
|
||||
* Removed support for old dependency versions; requirements are now:
|
||||
|
||||
@ -80,6 +144,9 @@ Features added
|
||||
- Added :confval:`html_show_copyright` config value.
|
||||
- Added :confval:`latex_show_pagerefs` and :confval:`latex_show_urls`
|
||||
config values.
|
||||
- The behavior of :confval:`html_file_suffix` changed slightly: the
|
||||
empty string now means "no suffix" instead of "default suffix", use
|
||||
``None`` for "default suffix".
|
||||
|
||||
* New builders:
|
||||
|
||||
|
36
CHANGES.DasIch
Normal file
36
CHANGES.DasIch
Normal file
@ -0,0 +1,36 @@
|
||||
Changes
|
||||
=======
|
||||
|
||||
This file contains changes made by Daniel Neuhäuser, during the Google Summer
|
||||
of Code 2010, to port Sphinx to Python 3.x. Changes are ordered descending by
|
||||
date.
|
||||
|
||||
May 16: - Added utils/convert.py which converts entire directories of python
|
||||
files with 2to3 and names the converted files foo3.py.
|
||||
- Modified the Makefile so that in case Python 3 is used the scripts in
|
||||
utils get converted with utils/convert.py and are used instead of the
|
||||
Python 2 scripts.
|
||||
|
||||
May 10: Fixed a couple of tests and made several small changes.
|
||||
|
||||
May 9: - Removed ez_setup.py which does not work with Python 3.x. and replaced
|
||||
it with distribute_setup.py
|
||||
- Use distribute (at least on 3.x) in order to run 2to3 automatically.
|
||||
- Reverted some of the changes made in revision bac40c7c924c which
|
||||
caused errors.
|
||||
- Modified tests/run.py to test against the build created by
|
||||
setup.py build in order to run the test suite with 3.x
|
||||
- Several small changes to fix 3.x compatibilty.
|
||||
|
||||
May 1: - Removed deprecated tuple parameter unpacking.
|
||||
- Removed a pre-2.3 workaround for booleans because this creates a
|
||||
deprecation warning for 3.x, in which you can't assign values to
|
||||
booleans.
|
||||
- Moved :func:`open()` calls out of the try-blocks, which fixes revision
|
||||
c577c25bd44b.
|
||||
|
||||
April 30: Made :cls:`sphinx.domains.cpp.DefExpr` unhashable as described by the
|
||||
documentation because classes in 3.x don't inherit ``__hash__`` if
|
||||
they implement ``__eq__``.
|
||||
|
||||
April 29: Removed several deprecated function/method calls.
|
7
EXAMPLES
7
EXAMPLES
@ -12,7 +12,7 @@ interesting examples.
|
||||
Documentation using the default theme
|
||||
-------------------------------------
|
||||
|
||||
* APSW: http://apsw.googlecode.com/svn/publish/index.html
|
||||
* APSW: http://apidoc.apsw.googlecode.com/hg/index.html
|
||||
* ASE: https://wiki.fysik.dtu.dk/ase/
|
||||
* boostmpi: http://documen.tician.de/boostmpi/
|
||||
* Calibre: http://calibre.kovidgoyal.net/user_manual/
|
||||
@ -38,6 +38,7 @@ Documentation using the default theme
|
||||
* PyCuda: http://documen.tician.de/pycuda/
|
||||
* Pyevolve: http://pyevolve.sourceforge.net/
|
||||
* Pylo: http://documen.tician.de/pylo/
|
||||
* PyMQI: http://packages.python.org/pymqi/
|
||||
* PyPubSub: http://pubsub.sourceforge.net/
|
||||
* pyrticle: http://documen.tician.de/pyrticle/
|
||||
* Python: http://docs.python.org/
|
||||
@ -96,6 +97,7 @@ Documentation using the sphinxdoc theme
|
||||
* Satchmo: http://www.satchmoproject.com/docs/svn/
|
||||
* Sphinx: http://sphinx.pocoo.org/
|
||||
* Sqlkit: http://sqlkit.argolinux.org/
|
||||
* Tau: http://www.tango-controls.org/static/tau/latest/doc/html/index.html
|
||||
* Total Open Station: http://tops.berlios.de/
|
||||
* WebFaction: http://docs.webfaction.com/
|
||||
|
||||
@ -109,6 +111,8 @@ Documentation using another builtin theme
|
||||
* pip: http://pip.openplans.org/ (nature)
|
||||
* Programmieren mit PyGTK und Glade (German):
|
||||
http://www.florian-diesch.de/doc/python-und-glade/online/ (agogo)
|
||||
* Spring Python: http://springpython.webfactional.com/current/sphinx/index.html
|
||||
(nature)
|
||||
* sqlparse: http://python-sqlparse.googlecode.com/svn/docs/api/index.html
|
||||
(agogo)
|
||||
* libLAS: http://liblas.org/ (nature)
|
||||
@ -139,6 +143,7 @@ Documentation using a custom theme/integrated in a site
|
||||
* Self: http://selflanguage.org/
|
||||
* SQLAlchemy: http://www.sqlalchemy.org/docs/
|
||||
* tinyTiM: http://tinytim.sourceforge.net/docs/2.0/
|
||||
* tipfy: http://www.tipfy.org/docs/
|
||||
* Werkzeug: http://werkzeug.pocoo.org/documentation/dev/
|
||||
* WFront: http://discorporate.us/projects/WFront/
|
||||
|
||||
|
@ -7,7 +7,7 @@ include TODO
|
||||
|
||||
include babel.cfg
|
||||
include Makefile
|
||||
include ez_setup.py
|
||||
include distribute_setup.py
|
||||
include sphinx-autogen.py
|
||||
include sphinx-build.py
|
||||
include sphinx-quickstart.py
|
||||
|
52
Makefile
52
Makefile
@ -1,35 +1,63 @@
|
||||
PYTHON ?= python
|
||||
|
||||
export PYTHONPATH = $(shell echo "$$PYTHONPATH"):./sphinx
|
||||
.PHONY: all check clean clean-pyc clean-patchfiles clean-backupfiles \
|
||||
clean-generated pylint reindent test covertest build convert-utils
|
||||
|
||||
.PHONY: all check clean clean-pyc clean-patchfiles pylint reindent test
|
||||
DONT_CHECK = -i build -i dist -i sphinx/style/jquery.js \
|
||||
-i sphinx/pycode/pgen2 -i sphinx/util/smartypants.py \
|
||||
-i .ropeproject -i doc/_build -i tests/path.py \
|
||||
-i tests/coverage.py -i env -i utils/convert.py \
|
||||
-i utils/reindent3.py -i utils/check_sources3.py -i .tox
|
||||
|
||||
all: clean-pyc check test
|
||||
all: clean-pyc clean-backupfiles check test
|
||||
|
||||
ifeq ($(PYTHON), python3)
|
||||
check: convert-utils
|
||||
@$(PYTHON) utils/check_sources3.py $(DONT_CHECK) .
|
||||
else
|
||||
check:
|
||||
@$(PYTHON) utils/check_sources.py -i build -i dist -i sphinx/style/jquery.js \
|
||||
-i sphinx/pycode/pgen2 -i sphinx/util/smartypants.py -i .ropeproject \
|
||||
-i doc/_build -i ez_setup.py -i tests/path.py -i tests/coverage.py -i env .
|
||||
@$(PYTHON) utils/check_sources.py $(DONT_CHECK) .
|
||||
endif
|
||||
|
||||
clean: clean-pyc clean-patchfiles
|
||||
clean: clean-pyc clean-patchfiles clean-backupfiles clean-generated
|
||||
|
||||
clean-pyc:
|
||||
find . -name '*.pyc' -exec rm -f {} +
|
||||
find . -name '*.pyo' -exec rm -f {} +
|
||||
find . -name '*~' -exec rm -f {} +
|
||||
|
||||
clean-patchfiles:
|
||||
find . -name '*.orig' -exec rm -f {} +
|
||||
find . -name '*.rej' -exec rm -f {} +
|
||||
|
||||
clean-backupfiles:
|
||||
find . -name '*~' -exec rm -f {} +
|
||||
find . -name '*.bak' -exec rm -f {} +
|
||||
|
||||
clean-generated:
|
||||
rm -f utils/*3.py*
|
||||
|
||||
pylint:
|
||||
@pylint --rcfile utils/pylintrc sphinx
|
||||
|
||||
ifeq ($(PYTHON), python3)
|
||||
reindent: convert-utils
|
||||
@$(PYTHON) utils/reindent3.py -r -n .
|
||||
else
|
||||
reindent:
|
||||
@$(PYTHON) utils/reindent.py -r -B .
|
||||
@$(PYTHON) utils/reindent.py -r -n .
|
||||
endif
|
||||
|
||||
test:
|
||||
test: build
|
||||
@cd tests; $(PYTHON) run.py -d -m '^[tT]est' $(TEST)
|
||||
|
||||
covertest:
|
||||
@cd tests; $(PYTHON) run.py -d -m '^[tT]est' --with-coverage --cover-package=sphinx $(TEST)
|
||||
covertest: build
|
||||
@cd tests; $(PYTHON) run.py -d -m '^[tT]est' --with-coverage \
|
||||
--cover-package=sphinx $(TEST)
|
||||
|
||||
build:
|
||||
@$(PYTHON) setup.py build
|
||||
|
||||
ifeq ($(PYTHON), python3)
|
||||
convert-utils:
|
||||
@python3 utils/convert.py -i utils/convert.py utils/
|
||||
endif
|
||||
|
12
README
12
README
@ -26,6 +26,18 @@ Then, direct your browser to ``_build/html/index.html``.
|
||||
Or read them online at <http://sphinx.pocoo.org/>.
|
||||
|
||||
|
||||
Testing
|
||||
=======
|
||||
|
||||
To run the tests with the interpreter available as ``python``, use::
|
||||
|
||||
make test
|
||||
|
||||
If you want to use a different interpreter, e.g. ``python3``, use::
|
||||
|
||||
PYTHON=python3 make test
|
||||
|
||||
|
||||
Contributing
|
||||
============
|
||||
|
||||
|
0
custom_fixers/__init__.py
Normal file
0
custom_fixers/__init__.py
Normal file
12
custom_fixers/fix_alt_unicode.py
Normal file
12
custom_fixers/fix_alt_unicode.py
Normal file
@ -0,0 +1,12 @@
|
||||
from lib2to3.fixer_base import BaseFix
|
||||
from lib2to3.fixer_util import Name
|
||||
|
||||
class FixAltUnicode(BaseFix):
|
||||
PATTERN = """
|
||||
func=funcdef< 'def' name='__unicode__'
|
||||
parameters< '(' NAME ')' > any+ >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
name = results['name']
|
||||
name.replace(Name('__str__', prefix=name.prefix))
|
485
distribute_setup.py
Normal file
485
distribute_setup.py
Normal file
@ -0,0 +1,485 @@
|
||||
#!python
|
||||
"""Bootstrap distribute installation
|
||||
|
||||
If you want to use setuptools in your package's setup.py, just include this
|
||||
file in the same directory with it, and add this to the top of your setup.py::
|
||||
|
||||
from distribute_setup import use_setuptools
|
||||
use_setuptools()
|
||||
|
||||
If you want to require a specific version of setuptools, set a download
|
||||
mirror, or use an alternate download directory, you can do so by supplying
|
||||
the appropriate options to ``use_setuptools()``.
|
||||
|
||||
This file can also be run as a script to install or upgrade setuptools.
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import fnmatch
|
||||
import tempfile
|
||||
import tarfile
|
||||
from distutils import log
|
||||
|
||||
try:
|
||||
from site import USER_SITE
|
||||
except ImportError:
|
||||
USER_SITE = None
|
||||
|
||||
try:
|
||||
import subprocess
|
||||
|
||||
def _python_cmd(*args):
|
||||
args = (sys.executable,) + args
|
||||
return subprocess.call(args) == 0
|
||||
|
||||
except ImportError:
|
||||
# will be used for python 2.3
|
||||
def _python_cmd(*args):
|
||||
args = (sys.executable,) + args
|
||||
# quoting arguments if windows
|
||||
if sys.platform == 'win32':
|
||||
def quote(arg):
|
||||
if ' ' in arg:
|
||||
return '"%s"' % arg
|
||||
return arg
|
||||
args = [quote(arg) for arg in args]
|
||||
return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
|
||||
|
||||
DEFAULT_VERSION = "0.6.13"
|
||||
DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
|
||||
SETUPTOOLS_FAKED_VERSION = "0.6c11"
|
||||
|
||||
SETUPTOOLS_PKG_INFO = """\
|
||||
Metadata-Version: 1.0
|
||||
Name: setuptools
|
||||
Version: %s
|
||||
Summary: xxxx
|
||||
Home-page: xxx
|
||||
Author: xxx
|
||||
Author-email: xxx
|
||||
License: xxx
|
||||
Description: xxx
|
||||
""" % SETUPTOOLS_FAKED_VERSION
|
||||
|
||||
|
||||
def _install(tarball):
|
||||
# extracting the tarball
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
log.warn('Extracting in %s', tmpdir)
|
||||
old_wd = os.getcwd()
|
||||
try:
|
||||
os.chdir(tmpdir)
|
||||
tar = tarfile.open(tarball)
|
||||
_extractall(tar)
|
||||
tar.close()
|
||||
|
||||
# going in the directory
|
||||
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
|
||||
os.chdir(subdir)
|
||||
log.warn('Now working in %s', subdir)
|
||||
|
||||
# installing
|
||||
log.warn('Installing Distribute')
|
||||
if not _python_cmd('setup.py', 'install'):
|
||||
log.warn('Something went wrong during the installation.')
|
||||
log.warn('See the error message above.')
|
||||
finally:
|
||||
os.chdir(old_wd)
|
||||
|
||||
|
||||
def _build_egg(egg, tarball, to_dir):
|
||||
# extracting the tarball
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
log.warn('Extracting in %s', tmpdir)
|
||||
old_wd = os.getcwd()
|
||||
try:
|
||||
os.chdir(tmpdir)
|
||||
tar = tarfile.open(tarball)
|
||||
_extractall(tar)
|
||||
tar.close()
|
||||
|
||||
# going in the directory
|
||||
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
|
||||
os.chdir(subdir)
|
||||
log.warn('Now working in %s', subdir)
|
||||
|
||||
# building an egg
|
||||
log.warn('Building a Distribute egg in %s', to_dir)
|
||||
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
|
||||
|
||||
finally:
|
||||
os.chdir(old_wd)
|
||||
# returning the result
|
||||
log.warn(egg)
|
||||
if not os.path.exists(egg):
|
||||
raise IOError('Could not build the egg.')
|
||||
|
||||
|
||||
def _do_download(version, download_base, to_dir, download_delay):
|
||||
egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
|
||||
% (version, sys.version_info[0], sys.version_info[1]))
|
||||
if not os.path.exists(egg):
|
||||
tarball = download_setuptools(version, download_base,
|
||||
to_dir, download_delay)
|
||||
_build_egg(egg, tarball, to_dir)
|
||||
sys.path.insert(0, egg)
|
||||
import setuptools
|
||||
setuptools.bootstrap_install_from = egg
|
||||
|
||||
|
||||
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
|
||||
to_dir=os.curdir, download_delay=15, no_fake=True):
|
||||
# making sure we use the absolute path
|
||||
to_dir = os.path.abspath(to_dir)
|
||||
was_imported = 'pkg_resources' in sys.modules or \
|
||||
'setuptools' in sys.modules
|
||||
try:
|
||||
try:
|
||||
import pkg_resources
|
||||
if not hasattr(pkg_resources, '_distribute'):
|
||||
if not no_fake:
|
||||
_fake_setuptools()
|
||||
raise ImportError
|
||||
except ImportError:
|
||||
return _do_download(version, download_base, to_dir, download_delay)
|
||||
try:
|
||||
pkg_resources.require("distribute>="+version)
|
||||
return
|
||||
except pkg_resources.VersionConflict:
|
||||
e = sys.exc_info()[1]
|
||||
if was_imported:
|
||||
sys.stderr.write(
|
||||
"The required version of distribute (>=%s) is not available,\n"
|
||||
"and can't be installed while this script is running. Please\n"
|
||||
"install a more recent version first, using\n"
|
||||
"'easy_install -U distribute'."
|
||||
"\n\n(Currently using %r)\n" % (version, e.args[0]))
|
||||
sys.exit(2)
|
||||
else:
|
||||
del pkg_resources, sys.modules['pkg_resources'] # reload ok
|
||||
return _do_download(version, download_base, to_dir,
|
||||
download_delay)
|
||||
except pkg_resources.DistributionNotFound:
|
||||
return _do_download(version, download_base, to_dir,
|
||||
download_delay)
|
||||
finally:
|
||||
if not no_fake:
|
||||
_create_fake_setuptools_pkg_info(to_dir)
|
||||
|
||||
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
|
||||
to_dir=os.curdir, delay=15):
|
||||
"""Download distribute from a specified location and return its filename
|
||||
|
||||
`version` should be a valid distribute version number that is available
|
||||
as an egg for download under the `download_base` URL (which should end
|
||||
with a '/'). `to_dir` is the directory where the egg will be downloaded.
|
||||
`delay` is the number of seconds to pause before an actual download
|
||||
attempt.
|
||||
"""
|
||||
# making sure we use the absolute path
|
||||
to_dir = os.path.abspath(to_dir)
|
||||
try:
|
||||
from urllib.request import urlopen
|
||||
except ImportError:
|
||||
from urllib2 import urlopen
|
||||
tgz_name = "distribute-%s.tar.gz" % version
|
||||
url = download_base + tgz_name
|
||||
saveto = os.path.join(to_dir, tgz_name)
|
||||
src = dst = None
|
||||
if not os.path.exists(saveto): # Avoid repeated downloads
|
||||
try:
|
||||
log.warn("Downloading %s", url)
|
||||
src = urlopen(url)
|
||||
# Read/write all in one block, so we don't create a corrupt file
|
||||
# if the download is interrupted.
|
||||
data = src.read()
|
||||
dst = open(saveto, "wb")
|
||||
dst.write(data)
|
||||
finally:
|
||||
if src:
|
||||
src.close()
|
||||
if dst:
|
||||
dst.close()
|
||||
return os.path.realpath(saveto)
|
||||
|
||||
def _no_sandbox(function):
|
||||
def __no_sandbox(*args, **kw):
|
||||
try:
|
||||
from setuptools.sandbox import DirectorySandbox
|
||||
if not hasattr(DirectorySandbox, '_old'):
|
||||
def violation(*args):
|
||||
pass
|
||||
DirectorySandbox._old = DirectorySandbox._violation
|
||||
DirectorySandbox._violation = violation
|
||||
patched = True
|
||||
else:
|
||||
patched = False
|
||||
except ImportError:
|
||||
patched = False
|
||||
|
||||
try:
|
||||
return function(*args, **kw)
|
||||
finally:
|
||||
if patched:
|
||||
DirectorySandbox._violation = DirectorySandbox._old
|
||||
del DirectorySandbox._old
|
||||
|
||||
return __no_sandbox
|
||||
|
||||
def _patch_file(path, content):
|
||||
"""Will backup the file then patch it"""
|
||||
existing_content = open(path).read()
|
||||
if existing_content == content:
|
||||
# already patched
|
||||
log.warn('Already patched.')
|
||||
return False
|
||||
log.warn('Patching...')
|
||||
_rename_path(path)
|
||||
f = open(path, 'w')
|
||||
try:
|
||||
f.write(content)
|
||||
finally:
|
||||
f.close()
|
||||
return True
|
||||
|
||||
_patch_file = _no_sandbox(_patch_file)
|
||||
|
||||
def _same_content(path, content):
|
||||
return open(path).read() == content
|
||||
|
||||
def _rename_path(path):
|
||||
new_name = path + '.OLD.%s' % time.time()
|
||||
log.warn('Renaming %s into %s', path, new_name)
|
||||
os.rename(path, new_name)
|
||||
return new_name
|
||||
|
||||
def _remove_flat_installation(placeholder):
|
||||
if not os.path.isdir(placeholder):
|
||||
log.warn('Unkown installation at %s', placeholder)
|
||||
return False
|
||||
found = False
|
||||
for file in os.listdir(placeholder):
|
||||
if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
log.warn('Could not locate setuptools*.egg-info')
|
||||
return
|
||||
|
||||
log.warn('Removing elements out of the way...')
|
||||
pkg_info = os.path.join(placeholder, file)
|
||||
if os.path.isdir(pkg_info):
|
||||
patched = _patch_egg_dir(pkg_info)
|
||||
else:
|
||||
patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
|
||||
|
||||
if not patched:
|
||||
log.warn('%s already patched.', pkg_info)
|
||||
return False
|
||||
# now let's move the files out of the way
|
||||
for element in ('setuptools', 'pkg_resources.py', 'site.py'):
|
||||
element = os.path.join(placeholder, element)
|
||||
if os.path.exists(element):
|
||||
_rename_path(element)
|
||||
else:
|
||||
log.warn('Could not find the %s element of the '
|
||||
'Setuptools distribution', element)
|
||||
return True
|
||||
|
||||
_remove_flat_installation = _no_sandbox(_remove_flat_installation)
|
||||
|
||||
def _after_install(dist):
|
||||
log.warn('After install bootstrap.')
|
||||
placeholder = dist.get_command_obj('install').install_purelib
|
||||
_create_fake_setuptools_pkg_info(placeholder)
|
||||
|
||||
def _create_fake_setuptools_pkg_info(placeholder):
|
||||
if not placeholder or not os.path.exists(placeholder):
|
||||
log.warn('Could not find the install location')
|
||||
return
|
||||
pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
|
||||
setuptools_file = 'setuptools-%s-py%s.egg-info' % \
|
||||
(SETUPTOOLS_FAKED_VERSION, pyver)
|
||||
pkg_info = os.path.join(placeholder, setuptools_file)
|
||||
if os.path.exists(pkg_info):
|
||||
log.warn('%s already exists', pkg_info)
|
||||
return
|
||||
|
||||
log.warn('Creating %s', pkg_info)
|
||||
f = open(pkg_info, 'w')
|
||||
try:
|
||||
f.write(SETUPTOOLS_PKG_INFO)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
pth_file = os.path.join(placeholder, 'setuptools.pth')
|
||||
log.warn('Creating %s', pth_file)
|
||||
f = open(pth_file, 'w')
|
||||
try:
|
||||
f.write(os.path.join(os.curdir, setuptools_file))
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
_create_fake_setuptools_pkg_info = _no_sandbox(_create_fake_setuptools_pkg_info)
|
||||
|
||||
def _patch_egg_dir(path):
|
||||
# let's check if it's already patched
|
||||
pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
|
||||
if os.path.exists(pkg_info):
|
||||
if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
|
||||
log.warn('%s already patched.', pkg_info)
|
||||
return False
|
||||
_rename_path(path)
|
||||
os.mkdir(path)
|
||||
os.mkdir(os.path.join(path, 'EGG-INFO'))
|
||||
pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
|
||||
f = open(pkg_info, 'w')
|
||||
try:
|
||||
f.write(SETUPTOOLS_PKG_INFO)
|
||||
finally:
|
||||
f.close()
|
||||
return True
|
||||
|
||||
_patch_egg_dir = _no_sandbox(_patch_egg_dir)
|
||||
|
||||
def _before_install():
|
||||
log.warn('Before install bootstrap.')
|
||||
_fake_setuptools()
|
||||
|
||||
|
||||
def _under_prefix(location):
|
||||
if 'install' not in sys.argv:
|
||||
return True
|
||||
args = sys.argv[sys.argv.index('install')+1:]
|
||||
for index, arg in enumerate(args):
|
||||
for option in ('--root', '--prefix'):
|
||||
if arg.startswith('%s=' % option):
|
||||
top_dir = arg.split('root=')[-1]
|
||||
return location.startswith(top_dir)
|
||||
elif arg == option:
|
||||
if len(args) > index:
|
||||
top_dir = args[index+1]
|
||||
return location.startswith(top_dir)
|
||||
if arg == '--user' and USER_SITE is not None:
|
||||
return location.startswith(USER_SITE)
|
||||
return True
|
||||
|
||||
|
||||
def _fake_setuptools():
|
||||
log.warn('Scanning installed packages')
|
||||
try:
|
||||
import pkg_resources
|
||||
except ImportError:
|
||||
# we're cool
|
||||
log.warn('Setuptools or Distribute does not seem to be installed.')
|
||||
return
|
||||
ws = pkg_resources.working_set
|
||||
try:
|
||||
setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools',
|
||||
replacement=False))
|
||||
except TypeError:
|
||||
# old distribute API
|
||||
setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools'))
|
||||
|
||||
if setuptools_dist is None:
|
||||
log.warn('No setuptools distribution found')
|
||||
return
|
||||
# detecting if it was already faked
|
||||
setuptools_location = setuptools_dist.location
|
||||
log.warn('Setuptools installation detected at %s', setuptools_location)
|
||||
|
||||
# if --root or --preix was provided, and if
|
||||
# setuptools is not located in them, we don't patch it
|
||||
if not _under_prefix(setuptools_location):
|
||||
log.warn('Not patching, --root or --prefix is installing Distribute'
|
||||
' in another location')
|
||||
return
|
||||
|
||||
# let's see if its an egg
|
||||
if not setuptools_location.endswith('.egg'):
|
||||
log.warn('Non-egg installation')
|
||||
res = _remove_flat_installation(setuptools_location)
|
||||
if not res:
|
||||
return
|
||||
else:
|
||||
log.warn('Egg installation')
|
||||
pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
|
||||
if (os.path.exists(pkg_info) and
|
||||
_same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
|
||||
log.warn('Already patched.')
|
||||
return
|
||||
log.warn('Patching...')
|
||||
# let's create a fake egg replacing setuptools one
|
||||
res = _patch_egg_dir(setuptools_location)
|
||||
if not res:
|
||||
return
|
||||
log.warn('Patched done.')
|
||||
_relaunch()
|
||||
|
||||
|
||||
def _relaunch():
|
||||
log.warn('Relaunching...')
|
||||
# we have to relaunch the process
|
||||
# pip marker to avoid a relaunch bug
|
||||
if sys.argv[:3] == ['-c', 'install', '--single-version-externally-managed']:
|
||||
sys.argv[0] = 'setup.py'
|
||||
args = [sys.executable] + sys.argv
|
||||
sys.exit(subprocess.call(args))
|
||||
|
||||
|
||||
def _extractall(self, path=".", members=None):
|
||||
"""Extract all members from the archive to the current working
|
||||
directory and set owner, modification time and permissions on
|
||||
directories afterwards. `path' specifies a different directory
|
||||
to extract to. `members' is optional and must be a subset of the
|
||||
list returned by getmembers().
|
||||
"""
|
||||
import copy
|
||||
import operator
|
||||
from tarfile import ExtractError
|
||||
directories = []
|
||||
|
||||
if members is None:
|
||||
members = self
|
||||
|
||||
for tarinfo in members:
|
||||
if tarinfo.isdir():
|
||||
# Extract directories with a safe mode.
|
||||
directories.append(tarinfo)
|
||||
tarinfo = copy.copy(tarinfo)
|
||||
tarinfo.mode = 448 # decimal for oct 0700
|
||||
self.extract(tarinfo, path)
|
||||
|
||||
# Reverse sort directories.
|
||||
if sys.version_info < (2, 4):
|
||||
def sorter(dir1, dir2):
|
||||
return cmp(dir1.name, dir2.name)
|
||||
directories.sort(sorter)
|
||||
directories.reverse()
|
||||
else:
|
||||
directories.sort(key=operator.attrgetter('name'), reverse=True)
|
||||
|
||||
# Set correct owner, mtime and filemode on directories.
|
||||
for tarinfo in directories:
|
||||
dirpath = os.path.join(path, tarinfo.name)
|
||||
try:
|
||||
self.chown(tarinfo, dirpath)
|
||||
self.utime(tarinfo, dirpath)
|
||||
self.chmod(tarinfo, dirpath)
|
||||
except ExtractError:
|
||||
e = sys.exc_info()[1]
|
||||
if self.errorlevel > 1:
|
||||
raise
|
||||
else:
|
||||
self._dbg(1, "tarfile: %s" % e)
|
||||
|
||||
|
||||
def main(argv, version=DEFAULT_VERSION):
|
||||
"""Install or upgrade setuptools and EasyInstall"""
|
||||
tarball = download_setuptools()
|
||||
_install(tarball)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:])
|
2
doc/_templates/indexsidebar.html
vendored
2
doc/_templates/indexsidebar.html
vendored
@ -23,6 +23,6 @@ are also available.</p>
|
||||
<input type="text" name="email" value="your@email"/>
|
||||
<input type="submit" name="sub" value="Subscribe" />
|
||||
</form>
|
||||
<p>or come to the <tt>#python-docs</tt> channel on FreeNode.</p>
|
||||
<p>or come to the <tt>#pocoo</tt> channel on FreeNode.</p>
|
||||
<p>You can also open an issue at the
|
||||
<a href="http://www.bitbucket.org/birkenfeld/sphinx/issues/">tracker</a>.</p>
|
||||
|
@ -267,11 +267,11 @@ All serialization builders outputs one file per source file and a few special
|
||||
files. They also copy the reST source files in the directory ``_sources``
|
||||
under the output directory.
|
||||
|
||||
The :class:`PickleHTMLBuilder` is a builtin subclass that implements the pickle
|
||||
The :class:`.PickleHTMLBuilder` is a builtin subclass that implements the pickle
|
||||
serialization interface.
|
||||
|
||||
The files per source file have the extensions of
|
||||
:attr:`~SerializingHTMLBuilder.out_suffix`, and are arranged in directories
|
||||
:attr:`~.SerializingHTMLBuilder.out_suffix`, and are arranged in directories
|
||||
just as the source files are. They unserialize to a dictionary (or dictionary
|
||||
like structure) with these keys:
|
||||
|
||||
@ -302,7 +302,7 @@ like structure) with these keys:
|
||||
|
||||
The special files are located in the root output directory. They are:
|
||||
|
||||
:attr:`SerializingHTMLBuilder.globalcontext_filename`
|
||||
:attr:`.SerializingHTMLBuilder.globalcontext_filename`
|
||||
A pickled dict with these keys:
|
||||
|
||||
``project``, ``copyright``, ``release``, ``version``
|
||||
@ -321,7 +321,7 @@ The special files are located in the root output directory. They are:
|
||||
``titles``
|
||||
A dictionary of all documents' titles, as HTML strings.
|
||||
|
||||
:attr:`SerializingHTMLBuilder.searchindex_filename`
|
||||
:attr:`.SerializingHTMLBuilder.searchindex_filename`
|
||||
An index that can be used for searching the documentation. It is a pickled
|
||||
list with these entries:
|
||||
|
||||
|
@ -34,9 +34,9 @@ epub_author = 'Georg Brandl'
|
||||
epub_publisher = 'http://sphinx.pocoo.org/'
|
||||
epub_scheme = 'url'
|
||||
epub_identifier = epub_publisher
|
||||
epub_pre_files = [('index', 'Welcome')]
|
||||
epub_pre_files = [('index.html', 'Welcome')]
|
||||
epub_exclude_files = ['_static/opensearch.xml', '_static/doctools.js',
|
||||
'_static/jquery.js', '_static/searchtools.js',
|
||||
'_static/jquery.js', '_static/searchtools.js', '_static/underscore.js',
|
||||
'_static/basic.css', 'search.html']
|
||||
|
||||
latex_documents = [('contents', 'sphinx.tex', 'Sphinx Documentation',
|
||||
@ -64,6 +64,10 @@ man_pages = [
|
||||
'template generator', '', 1),
|
||||
]
|
||||
|
||||
# We're not using intersphinx right now, but if we did, this would be part of
|
||||
# the mapping:
|
||||
intersphinx_mapping = {'python': ('http://docs.python.org/dev', None)}
|
||||
|
||||
|
||||
# -- Extension interface -------------------------------------------------------
|
||||
|
||||
|
@ -283,6 +283,7 @@ Project information
|
||||
|
||||
Currently supported languages are:
|
||||
|
||||
* ``bn`` -- Bengali
|
||||
* ``ca`` -- Catalan
|
||||
* ``cs`` -- Czech
|
||||
* ``da`` -- Danish
|
||||
@ -345,12 +346,12 @@ Project information
|
||||
|
||||
A boolean that decides whether module names are prepended to all
|
||||
:term:`object` names (for object types where a "module" of some kind is
|
||||
defined), e.g. for :rst:dir:`function` directives. Default is ``True``.
|
||||
defined), e.g. for :rst:dir:`py:function` directives. Default is ``True``.
|
||||
|
||||
.. confval:: show_authors
|
||||
|
||||
A boolean that decides whether :rst:dir:`moduleauthor` and :rst:dir:`sectionauthor`
|
||||
directives produce any output in the built files.
|
||||
A boolean that decides whether :rst:dir:`codeauthor` and
|
||||
:rst:dir:`sectionauthor` directives produce any output in the built files.
|
||||
|
||||
.. confval:: modindex_common_prefix
|
||||
|
||||
@ -387,6 +388,8 @@ Options for HTML output
|
||||
These options influence HTML as well as HTML Help output, and other builders
|
||||
that use Sphinx' HTMLWriter class.
|
||||
|
||||
.. XXX document html_context
|
||||
|
||||
.. confval:: html_theme
|
||||
|
||||
The "theme" that the HTML output should use. See the :doc:`section about
|
||||
@ -552,19 +555,6 @@ that use Sphinx' HTMLWriter class.
|
||||
This will render the template ``customdownload.html`` as the page
|
||||
``download.html``.
|
||||
|
||||
.. note::
|
||||
|
||||
Earlier versions of Sphinx had a value called :confval:`html_index` which
|
||||
was a clumsy way of controlling the content of the "index" document. If
|
||||
you used this feature, migrate it by adding an ``'index'`` key to this
|
||||
setting, with your custom template as the value, and in your custom
|
||||
template, use ::
|
||||
|
||||
{% extend "defindex.html" %}
|
||||
{% block tables %}
|
||||
... old template content ...
|
||||
{% endblock %}
|
||||
|
||||
.. confval:: html_domain_indices
|
||||
|
||||
If true, generate domain-specific indices in addition to the general index.
|
||||
@ -626,8 +616,8 @@ that use Sphinx' HTMLWriter class.
|
||||
|
||||
.. confval:: html_file_suffix
|
||||
|
||||
If nonempty, this is the file name suffix for generated HTML files. The
|
||||
default is ``".html"``.
|
||||
This is the file name suffix for generated HTML files. The default is
|
||||
``".html"``.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
|
||||
@ -768,8 +758,8 @@ the `Dublin Core metadata <http://dublincore.org/>`_.
|
||||
.. confval:: epub_post_files
|
||||
|
||||
Additional files that should be inserted after the text generated by Sphinx.
|
||||
It is a list of tuples containing the file name and the title. The default
|
||||
value is ``[]``.
|
||||
It is a list of tuples containing the file name and the title. This option
|
||||
can be used to add an appendix. The default value is ``[]``.
|
||||
|
||||
.. confval:: epub_exclude_files
|
||||
|
||||
@ -782,6 +772,12 @@ the `Dublin Core metadata <http://dublincore.org/>`_.
|
||||
be an integer greater than zero. The default value is 3. Note: A deeply
|
||||
nested table of contents may be difficult to navigate.
|
||||
|
||||
.. confval:: epub_tocdup
|
||||
|
||||
This flag determines if a toc entry is inserted again at the beginning of
|
||||
it's nested toc listing. This allows easier navitation to the top of
|
||||
a chapter, but can be confusing because it mixes entries of differnet
|
||||
depth in one list. The default value is ``True``.
|
||||
|
||||
.. _latex-options:
|
||||
|
||||
|
@ -52,10 +52,19 @@ flag ``:noindex:``. An example using a Python domain directive::
|
||||
|
||||
.. py:function:: spam(eggs)
|
||||
ham(eggs)
|
||||
:noindex:
|
||||
|
||||
Spam or ham the foo.
|
||||
|
||||
This describes the two Python functions ``spam`` and ``ham``. (Note that when
|
||||
signatures become too long, you can break them if you add a backslash to lines
|
||||
that are continued in the next line. Example::
|
||||
|
||||
.. py:function:: filterwarnings(action, message='', category=Warning, \
|
||||
module='', lineno=0, append=False)
|
||||
:noindex:
|
||||
|
||||
(This example also shows how to use the ``:noindex:`` flag.)
|
||||
|
||||
The domains also provide roles that link back to these object descriptions. For
|
||||
example, to link to one of the functions described in the example above, you
|
||||
could say ::
|
||||
@ -138,11 +147,12 @@ declarations:
|
||||
.. rst:directive:: .. py:currentmodule:: name
|
||||
|
||||
This directive tells Sphinx that the classes, functions etc. documented from
|
||||
here are in the given module (like :rst:dir:`py:module`), but it will not create
|
||||
index entries, an entry in the Global Module Index, or a link target for
|
||||
:rst:role:`mod`. This is helpful in situations where documentation for things in
|
||||
a module is spread over multiple files or sections -- one location has the
|
||||
:rst:dir:`py:module` directive, the others only :rst:dir:`py:currentmodule`.
|
||||
here are in the given module (like :rst:dir:`py:module`), but it will not
|
||||
create index entries, an entry in the Global Module Index, or a link target
|
||||
for :rst:role:`py:mod`. This is helpful in situations where documentation
|
||||
for things in a module is spread over multiple files or sections -- one
|
||||
location has the :rst:dir:`py:module` directive, the others only
|
||||
:rst:dir:`py:currentmodule`.
|
||||
|
||||
|
||||
The following directives are provided for module and class contents:
|
||||
@ -363,6 +373,9 @@ dot, this order is reversed. For example, in the documentation of Python's
|
||||
:mod:`codecs` module, ``:py:func:`open``` always refers to the built-in
|
||||
function, while ``:py:func:`.open``` refers to :func:`codecs.open`.
|
||||
|
||||
A similar heuristic is used to determine whether the name is an attribute of the
|
||||
currently documented class.
|
||||
|
||||
Also, if the name is prefixed with a dot, and no exact match is found, the
|
||||
target is taken as a suffix and all object names with that suffix are
|
||||
searched. For example, ``:py:meth:`.TarFile.close``` references the
|
||||
@ -370,8 +383,9 @@ searched. For example, ``:py:meth:`.TarFile.close``` references the
|
||||
``tarfile``. Since this can get ambiguous, if there is more than one possible
|
||||
match, you will get a warning from Sphinx.
|
||||
|
||||
A similar heuristic is used to determine whether the name is an attribute of the
|
||||
currently documented class.
|
||||
Note that you can combine the ``~`` and ``.`` prefixes:
|
||||
``:py:meth:`~.TarFile.close``` will reference the ``tarfile.TarFile.close()``
|
||||
method, but the visible link caption will only be ``close()``.
|
||||
|
||||
|
||||
.. _c-domain:
|
||||
|
@ -210,7 +210,7 @@ the following public API:
|
||||
standard Sphinx roles (see :ref:`xref-syntax`).
|
||||
|
||||
This method is also available under the deprecated alias
|
||||
:meth:`add_description_unit`.
|
||||
``add_description_unit``.
|
||||
|
||||
.. method:: Sphinx.add_crossref_type(directivename, rolename, indextemplate='', ref_nodeclass=None, objname='')
|
||||
|
||||
@ -272,6 +272,8 @@ the following public API:
|
||||
This allows to auto-document new types of objects. See the source of the
|
||||
autodoc module for examples on how to subclass :class:`Documenter`.
|
||||
|
||||
.. XXX add real docs for Documenter and subclassing
|
||||
|
||||
.. versionadded:: 0.6
|
||||
|
||||
.. method:: Sphinx.add_autodoc_attrgetter(type, getter)
|
||||
|
@ -27,15 +27,16 @@ two locations for documentation, while at the same time avoiding
|
||||
auto-generated-looking pure API documentation.
|
||||
|
||||
:mod:`autodoc` provides several directives that are versions of the usual
|
||||
:rst:dir:`module`, :rst:dir:`class` and so forth. On parsing time, they import the
|
||||
corresponding module and extract the docstring of the given objects, inserting
|
||||
them into the page source under a suitable :rst:dir:`module`, :rst:dir:`class` etc.
|
||||
directive.
|
||||
:rst:dir:`py:module`, :rst:dir:`py:class` and so forth. On parsing time, they
|
||||
import the corresponding module and extract the docstring of the given objects,
|
||||
inserting them into the page source under a suitable :rst:dir:`py:module`,
|
||||
:rst:dir:`py:class` etc. directive.
|
||||
|
||||
.. note::
|
||||
|
||||
Just as :rst:dir:`class` respects the current :rst:dir:`module`, :rst:dir:`autoclass`
|
||||
will also do so, and likewise with :rst:dir:`method` and :rst:dir:`class`.
|
||||
Just as :rst:dir:`py:class` respects the current :rst:dir:`py:module`,
|
||||
:rst:dir:`autoclass` will also do so. Likewise, :rst:dir:`automethod` will
|
||||
respect the current :rst:dir:`py:class`.
|
||||
|
||||
|
||||
.. rst:directive:: automodule
|
||||
@ -83,6 +84,9 @@ directive.
|
||||
will document all non-private member functions and properties (that is,
|
||||
those whose name doesn't start with ``_``).
|
||||
|
||||
For modules, ``__all__`` will be respected when looking for members; the
|
||||
order of the members will also be the order in ``__all__``.
|
||||
|
||||
You can also give an explicit list of members; only these will then be
|
||||
documented::
|
||||
|
||||
@ -127,23 +131,24 @@ directive.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
|
||||
* The :rst:dir:`automodule`, :rst:dir:`autoclass` and :rst:dir:`autoexception` directives
|
||||
also support a flag option called ``show-inheritance``. When given, a list
|
||||
of base classes will be inserted just below the class signature (when used
|
||||
with :rst:dir:`automodule`, this will be inserted for every class that is
|
||||
documented in the module).
|
||||
* The :rst:dir:`automodule`, :rst:dir:`autoclass` and
|
||||
:rst:dir:`autoexception` directives also support a flag option called
|
||||
``show-inheritance``. When given, a list of base classes will be inserted
|
||||
just below the class signature (when used with :rst:dir:`automodule`, this
|
||||
will be inserted for every class that is documented in the module).
|
||||
|
||||
.. versionadded:: 0.4
|
||||
|
||||
* All autodoc directives support the ``noindex`` flag option that has the
|
||||
same effect as for standard :rst:dir:`function` etc. directives: no index
|
||||
entries are generated for the documented object (and all autodocumented
|
||||
members).
|
||||
same effect as for standard :rst:dir:`py:function` etc. directives: no
|
||||
index entries are generated for the documented object (and all
|
||||
autodocumented members).
|
||||
|
||||
.. versionadded:: 0.4
|
||||
|
||||
* :rst:dir:`automodule` also recognizes the ``synopsis``, ``platform`` and
|
||||
``deprecated`` options that the standard :rst:dir:`module` directive supports.
|
||||
``deprecated`` options that the standard :rst:dir:`py:module` directive
|
||||
supports.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
|
||||
@ -213,8 +218,8 @@ There are also new config values that you can set:
|
||||
|
||||
``"class"``
|
||||
Only the class' docstring is inserted. This is the default. You can
|
||||
still document ``__init__`` as a separate method using :rst:dir:`automethod`
|
||||
or the ``members`` option to :rst:dir:`autoclass`.
|
||||
still document ``__init__`` as a separate method using
|
||||
:rst:dir:`automethod` or the ``members`` option to :rst:dir:`autoclass`.
|
||||
``"both"``
|
||||
Both the class' and the ``__init__`` method's docstring are concatenated
|
||||
and inserted.
|
||||
|
@ -17,7 +17,7 @@ It adds this directive:
|
||||
|
||||
This directive has one or more arguments, each giving a module or class
|
||||
name. Class names can be unqualified; in that case they are taken to exist
|
||||
in the currently described module (see :rst:dir:`module`).
|
||||
in the currently described module (see :rst:dir:`py:module`).
|
||||
|
||||
For each given class, and each class in each given module, the base classes
|
||||
are determined. Then, from all classes and their base classes, a graph is
|
||||
|
@ -9,7 +9,21 @@
|
||||
.. versionadded:: 0.5
|
||||
|
||||
This extension can generate automatic links to the documentation of objects in
|
||||
other projects. This works as follows:
|
||||
other projects.
|
||||
|
||||
Usage is simple: whenever Sphinx encounters a cross-reference that has no
|
||||
matching target in the current documentation set, it looks for targets in the
|
||||
documentation sets configured in :confval:`intersphinx_mapping`. A reference
|
||||
like ``:py:class:`zipfile.ZipFile``` can then link to the Python documentation
|
||||
for the ZipFile class, without you having to specify where it is located
|
||||
exactly.
|
||||
|
||||
When using the "new" format (see below), you can even force lookup in a foreign
|
||||
set by prefixing the link target appropriately. A link like ``:ref:`comparison
|
||||
manual <python:comparisons>``` will then link to the label "comparisons" in the
|
||||
doc set "python", if it exists.
|
||||
|
||||
Behind the scenes, this works as follows:
|
||||
|
||||
* Each Sphinx HTML build creates a file named :file:`objects.inv` that contains
|
||||
a mapping from object names to URIs relative to the HTML set's root.
|
||||
@ -70,7 +84,7 @@ linking:
|
||||
To add links to modules and objects in the Python standard library
|
||||
documentation, use::
|
||||
|
||||
intersphinx_mapping = {'python': ('http://docs.python.org/', None)}
|
||||
intersphinx_mapping = {'python': ('http://docs.python.org/3.2', None)}
|
||||
|
||||
This will download the corresponding :file:`objects.inv` file from the
|
||||
Internet and generate links to the pages under the given URI. The downloaded
|
||||
@ -80,12 +94,12 @@ linking:
|
||||
A second example, showing the meaning of a non-``None`` value of the second
|
||||
tuple item::
|
||||
|
||||
intersphinx_mapping = {'python': ('http://docs.python.org/',
|
||||
intersphinx_mapping = {'python': ('http://docs.python.org/3.2',
|
||||
'python-inv.txt')}
|
||||
|
||||
This will read the inventory from :file:`python-inv.txt` in the source
|
||||
directory, but still generate links to the pages under
|
||||
``http://docs.python.org/``. It is up to you to update the inventory file as
|
||||
``http://docs.python.org/3.2``. It is up to you to update the inventory file as
|
||||
new objects are added to the Python documentation.
|
||||
|
||||
.. confval:: intersphinx_cache_limit
|
||||
|
@ -17,15 +17,15 @@ if possible, reuse that support too.
|
||||
|
||||
.. note::
|
||||
|
||||
:mod:`sphinx.ext.mathbase` is not meant to be added to the
|
||||
:confval:`extensions` config value, instead, use either
|
||||
:mod:`sphinx.ext.pngmath` or :mod:`sphinx.ext.jsmath` as described below.
|
||||
:mod:`.mathbase` is not meant to be added to the :confval:`extensions` config
|
||||
value, instead, use either :mod:`sphinx.ext.pngmath` or
|
||||
:mod:`sphinx.ext.jsmath` as described below.
|
||||
|
||||
The input language for mathematics is LaTeX markup. This is the de-facto
|
||||
standard for plain-text math notation and has the added advantage that no
|
||||
further translation is necessary when building LaTeX output.
|
||||
|
||||
:mod:`mathbase` defines these new markup elements:
|
||||
:mod:`.mathbase` defines these new markup elements:
|
||||
|
||||
.. rst:role:: math
|
||||
|
||||
|
10
doc/faq.rst
10
doc/faq.rst
@ -134,6 +134,16 @@ some notes:
|
||||
and Bookworm_. For bookworm you can download the source from
|
||||
http://code.google.com/p/threepress/ and run your own local server.
|
||||
|
||||
* Large floating divs are not displayed properly.
|
||||
If they cover more than one page, the div is only shown on the first page.
|
||||
In that case you can copy the :file:`epub.css` from the
|
||||
``sphinx/themes/epub/static/`` directory to your local ``_static/``
|
||||
directory and remove the float settings.
|
||||
|
||||
* Files that are inserted outside of the ``toctree`` directive must be manually
|
||||
included. This sometimes applies to appendixes, e.g. the glossary or
|
||||
the indices. You can add them with the :confval:`epub_post_files` option.
|
||||
|
||||
.. _Epubcheck: http://code.google.com/p/epubcheck/
|
||||
.. _Calibre: http://calibre-ebook.com/
|
||||
.. _FBreader: http://www.fbreader.org/
|
||||
|
@ -45,13 +45,15 @@ See the :ref:`pertinent section in the FAQ list <usingwith>`.
|
||||
Prerequisites
|
||||
-------------
|
||||
|
||||
Sphinx needs at least **Python 2.4** to run. If you like to have source code
|
||||
highlighting support, you must also install the Pygments_ library, which you can
|
||||
do via setuptools' easy_install. Sphinx should work with docutils version 0.4
|
||||
or some (not broken) SVN trunk snapshot.
|
||||
Sphinx needs at least **Python 2.4** or **Python 3.1** to run, as well as the
|
||||
docutils_ and Jinja2_ libraries. Sphinx should work with docutils version 0.5
|
||||
or some (not broken) SVN trunk snapshot. If you like to have source code
|
||||
highlighting support, you must also install the Pygments_ library.
|
||||
|
||||
.. _reStructuredText: http://docutils.sf.net/rst.html
|
||||
.. _Pygments: http://pygments.org
|
||||
.. _docutils: http://docutils.sf.net/
|
||||
.. _Jinja2: http://jinja.pocoo.org/2/
|
||||
.. _Pygments: http://pygments.org/
|
||||
|
||||
|
||||
Usage
|
||||
|
@ -260,7 +260,7 @@ in a different style:
|
||||
.. rst:role:: samp
|
||||
|
||||
A piece of literal text, such as code. Within the contents, you can use
|
||||
curly braces to indicate a "variable" part, as in :rst:dir:`file`. For
|
||||
curly braces to indicate a "variable" part, as in :rst:role:`file`. For
|
||||
example, in ``:samp:`print 1+{variable}```, the part ``variable`` would be
|
||||
emphasized.
|
||||
|
||||
@ -274,13 +274,15 @@ The following roles generate external links:
|
||||
|
||||
A reference to a Python Enhancement Proposal. This generates appropriate
|
||||
index entries. The text "PEP *number*\ " is generated; in the HTML output,
|
||||
this text is a hyperlink to an online copy of the specified PEP.
|
||||
this text is a hyperlink to an online copy of the specified PEP. You can
|
||||
link to a specific section by saying ``:pep:`number#anchor```.
|
||||
|
||||
.. rst:role:: rfc
|
||||
|
||||
A reference to an Internet Request for Comments. This generates appropriate
|
||||
index entries. The text "RFC *number*\ " is generated; in the HTML output,
|
||||
this text is a hyperlink to an online copy of the specified RFC.
|
||||
this text is a hyperlink to an online copy of the specified RFC. You can
|
||||
link to a specific section by saying ``:rfc:`number#anchor```.
|
||||
|
||||
|
||||
Note that there are no special roles for including hyperlinks as you can use
|
||||
|
@ -42,15 +42,25 @@ units as well as normal text:
|
||||
Example::
|
||||
|
||||
.. versionadded:: 2.5
|
||||
The `spam` parameter.
|
||||
The *spam* parameter.
|
||||
|
||||
Note that there must be no blank line between the directive head and the
|
||||
explanation; this is to make these blocks visually continuous in the markup.
|
||||
|
||||
.. rst:directive:: .. versionchanged:: version
|
||||
|
||||
Similar to :rst:dir:`versionadded`, but describes when and what changed in the named
|
||||
feature in some way (new parameters, changed side effects, etc.).
|
||||
Similar to :rst:dir:`versionadded`, but describes when and what changed in
|
||||
the named feature in some way (new parameters, changed side effects, etc.).
|
||||
|
||||
.. rst:directive:: .. deprecated:: vesion
|
||||
|
||||
Similar to :rst:dir:`versionchanged`, but describes when the feature was
|
||||
deprecated. An explanation can also be given, for example to inform the
|
||||
reader what should be used instead. Example::
|
||||
|
||||
.. deprecated:: 3.1
|
||||
Use :func:`spam` instead.
|
||||
|
||||
|
||||
--------------
|
||||
|
||||
|
@ -151,7 +151,7 @@ The special document names (and pages generated for them) are:
|
||||
:ref:`object descriptions <basic-domain-markup>`, and from :rst:dir:`index`
|
||||
directives.
|
||||
|
||||
The module index contains one entry per :rst:dir:`module` directive.
|
||||
The Python module index contains one entry per :rst:dir:`py:module` directive.
|
||||
|
||||
The search page contains a form that uses the generated JSON search index and
|
||||
JavaScript to full-text search the generated documents for search words; it
|
||||
|
@ -21,10 +21,10 @@ No. You have several other options:
|
||||
configuration value accordingly.
|
||||
|
||||
* You can :ref:`write a custom builder <writing-builders>` that derives from
|
||||
:class:`~sphinx.builders.StandaloneHTMLBuilder` and calls your template engine
|
||||
of choice.
|
||||
:class:`~sphinx.builders.html.StandaloneHTMLBuilder` and calls your template
|
||||
engine of choice.
|
||||
|
||||
* You can use the :class:`~sphinx.builders.PickleHTMLBuilder` that produces
|
||||
* You can use the :class:`~sphinx.builders.html.PickleHTMLBuilder` that produces
|
||||
pickle files with the page contents, and postprocess them using a custom tool,
|
||||
or use them in your Web application.
|
||||
|
||||
@ -261,9 +261,9 @@ in the future.
|
||||
|
||||
.. data:: file_suffix
|
||||
|
||||
The value of the builder's :attr:`out_suffix` attribute, i.e. the file name
|
||||
extension that the output files will get. For a standard HTML builder, this
|
||||
is usually ``.html``.
|
||||
The value of the builder's :attr:`~.SerializingHTMLBuilder.out_suffix`
|
||||
attribute, i.e. the file name extension that the output files will get. For
|
||||
a standard HTML builder, this is usually ``.html``.
|
||||
|
||||
.. data:: has_source
|
||||
|
||||
|
276
ez_setup.py
276
ez_setup.py
@ -1,276 +0,0 @@
|
||||
#!python
|
||||
"""Bootstrap setuptools installation
|
||||
|
||||
If you want to use setuptools in your package's setup.py, just include this
|
||||
file in the same directory with it, and add this to the top of your setup.py::
|
||||
|
||||
from ez_setup import use_setuptools
|
||||
use_setuptools()
|
||||
|
||||
If you want to require a specific version of setuptools, set a download
|
||||
mirror, or use an alternate download directory, you can do so by supplying
|
||||
the appropriate options to ``use_setuptools()``.
|
||||
|
||||
This file can also be run as a script to install or upgrade setuptools.
|
||||
"""
|
||||
import sys
|
||||
DEFAULT_VERSION = "0.6c9"
|
||||
DEFAULT_URL = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3]
|
||||
|
||||
md5_data = {
|
||||
'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca',
|
||||
'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb',
|
||||
'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b',
|
||||
'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a',
|
||||
'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618',
|
||||
'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac',
|
||||
'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5',
|
||||
'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4',
|
||||
'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c',
|
||||
'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b',
|
||||
'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27',
|
||||
'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277',
|
||||
'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa',
|
||||
'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e',
|
||||
'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e',
|
||||
'setuptools-0.6c4-py2.3.egg': 'b0b9131acab32022bfac7f44c5d7971f',
|
||||
'setuptools-0.6c4-py2.4.egg': '2a1f9656d4fbf3c97bf946c0a124e6e2',
|
||||
'setuptools-0.6c4-py2.5.egg': '8f5a052e32cdb9c72bcf4b5526f28afc',
|
||||
'setuptools-0.6c5-py2.3.egg': 'ee9fd80965da04f2f3e6b3576e9d8167',
|
||||
'setuptools-0.6c5-py2.4.egg': 'afe2adf1c01701ee841761f5bcd8aa64',
|
||||
'setuptools-0.6c5-py2.5.egg': 'a8d3f61494ccaa8714dfed37bccd3d5d',
|
||||
'setuptools-0.6c6-py2.3.egg': '35686b78116a668847237b69d549ec20',
|
||||
'setuptools-0.6c6-py2.4.egg': '3c56af57be3225019260a644430065ab',
|
||||
'setuptools-0.6c6-py2.5.egg': 'b2f8a7520709a5b34f80946de5f02f53',
|
||||
'setuptools-0.6c7-py2.3.egg': '209fdf9adc3a615e5115b725658e13e2',
|
||||
'setuptools-0.6c7-py2.4.egg': '5a8f954807d46a0fb67cf1f26c55a82e',
|
||||
'setuptools-0.6c7-py2.5.egg': '45d2ad28f9750e7434111fde831e8372',
|
||||
'setuptools-0.6c8-py2.3.egg': '50759d29b349db8cfd807ba8303f1902',
|
||||
'setuptools-0.6c8-py2.4.egg': 'cba38d74f7d483c06e9daa6070cce6de',
|
||||
'setuptools-0.6c8-py2.5.egg': '1721747ee329dc150590a58b3e1ac95b',
|
||||
'setuptools-0.6c9-py2.3.egg': 'a83c4020414807b496e4cfbe08507c03',
|
||||
'setuptools-0.6c9-py2.4.egg': '260a2be2e5388d66bdaee06abec6342a',
|
||||
'setuptools-0.6c9-py2.5.egg': 'fe67c3e5a17b12c0e7c541b7ea43a8e6',
|
||||
'setuptools-0.6c9-py2.6.egg': 'ca37b1ff16fa2ede6e19383e7b59245a',
|
||||
}
|
||||
|
||||
import sys, os
|
||||
try: from hashlib import md5
|
||||
except ImportError: from md5 import md5
|
||||
|
||||
def _validate_md5(egg_name, data):
|
||||
if egg_name in md5_data:
|
||||
digest = md5(data).hexdigest()
|
||||
if digest != md5_data[egg_name]:
|
||||
print >>sys.stderr, (
|
||||
"md5 validation of %s failed! (Possible download problem?)"
|
||||
% egg_name
|
||||
)
|
||||
sys.exit(2)
|
||||
return data
|
||||
|
||||
def use_setuptools(
|
||||
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
|
||||
download_delay=15
|
||||
):
|
||||
"""Automatically find/download setuptools and make it available on sys.path
|
||||
|
||||
`version` should be a valid setuptools version number that is available
|
||||
as an egg for download under the `download_base` URL (which should end with
|
||||
a '/'). `to_dir` is the directory where setuptools will be downloaded, if
|
||||
it is not already available. If `download_delay` is specified, it should
|
||||
be the number of seconds that will be paused before initiating a download,
|
||||
should one be required. If an older version of setuptools is installed,
|
||||
this routine will print a message to ``sys.stderr`` and raise SystemExit in
|
||||
an attempt to abort the calling script.
|
||||
"""
|
||||
was_imported = 'pkg_resources' in sys.modules or 'setuptools' in sys.modules
|
||||
def do_download():
|
||||
egg = download_setuptools(version, download_base, to_dir, download_delay)
|
||||
sys.path.insert(0, egg)
|
||||
import setuptools; setuptools.bootstrap_install_from = egg
|
||||
try:
|
||||
import pkg_resources
|
||||
except ImportError:
|
||||
return do_download()
|
||||
try:
|
||||
pkg_resources.require("setuptools>="+version); return
|
||||
except pkg_resources.VersionConflict, e:
|
||||
if was_imported:
|
||||
print >>sys.stderr, (
|
||||
"The required version of setuptools (>=%s) is not available, and\n"
|
||||
"can't be installed while this script is running. Please install\n"
|
||||
" a more recent version first, using 'easy_install -U setuptools'."
|
||||
"\n\n(Currently using %r)"
|
||||
) % (version, e.args[0])
|
||||
sys.exit(2)
|
||||
else:
|
||||
del pkg_resources, sys.modules['pkg_resources'] # reload ok
|
||||
return do_download()
|
||||
except pkg_resources.DistributionNotFound:
|
||||
return do_download()
|
||||
|
||||
def download_setuptools(
|
||||
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
|
||||
delay = 15
|
||||
):
|
||||
"""Download setuptools from a specified location and return its filename
|
||||
|
||||
`version` should be a valid setuptools version number that is available
|
||||
as an egg for download under the `download_base` URL (which should end
|
||||
with a '/'). `to_dir` is the directory where the egg will be downloaded.
|
||||
`delay` is the number of seconds to pause before an actual download attempt.
|
||||
"""
|
||||
import urllib2, shutil
|
||||
egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3])
|
||||
url = download_base + egg_name
|
||||
saveto = os.path.join(to_dir, egg_name)
|
||||
src = dst = None
|
||||
if not os.path.exists(saveto): # Avoid repeated downloads
|
||||
try:
|
||||
from distutils import log
|
||||
if delay:
|
||||
log.warn("""
|
||||
---------------------------------------------------------------------------
|
||||
This script requires setuptools version %s to run (even to display
|
||||
help). I will attempt to download it for you (from
|
||||
%s), but
|
||||
you may need to enable firewall access for this script first.
|
||||
I will start the download in %d seconds.
|
||||
|
||||
(Note: if this machine does not have network access, please obtain the file
|
||||
|
||||
%s
|
||||
|
||||
and place it in this directory before rerunning this script.)
|
||||
---------------------------------------------------------------------------""",
|
||||
version, download_base, delay, url
|
||||
); from time import sleep; sleep(delay)
|
||||
log.warn("Downloading %s", url)
|
||||
src = urllib2.urlopen(url)
|
||||
# Read/write all in one block, so we don't create a corrupt file
|
||||
# if the download is interrupted.
|
||||
data = _validate_md5(egg_name, src.read())
|
||||
dst = open(saveto,"wb"); dst.write(data)
|
||||
finally:
|
||||
if src: src.close()
|
||||
if dst: dst.close()
|
||||
return os.path.realpath(saveto)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def main(argv, version=DEFAULT_VERSION):
|
||||
"""Install or upgrade setuptools and EasyInstall"""
|
||||
try:
|
||||
import setuptools
|
||||
except ImportError:
|
||||
egg = None
|
||||
try:
|
||||
egg = download_setuptools(version, delay=0)
|
||||
sys.path.insert(0,egg)
|
||||
from setuptools.command.easy_install import main
|
||||
return main(list(argv)+[egg]) # we're done here
|
||||
finally:
|
||||
if egg and os.path.exists(egg):
|
||||
os.unlink(egg)
|
||||
else:
|
||||
if setuptools.__version__ == '0.0.1':
|
||||
print >>sys.stderr, (
|
||||
"You have an obsolete version of setuptools installed. Please\n"
|
||||
"remove it from your system entirely before rerunning this script."
|
||||
)
|
||||
sys.exit(2)
|
||||
|
||||
req = "setuptools>="+version
|
||||
import pkg_resources
|
||||
try:
|
||||
pkg_resources.require(req)
|
||||
except pkg_resources.VersionConflict:
|
||||
try:
|
||||
from setuptools.command.easy_install import main
|
||||
except ImportError:
|
||||
from easy_install import main
|
||||
main(list(argv)+[download_setuptools(delay=0)])
|
||||
sys.exit(0) # try to force an exit
|
||||
else:
|
||||
if argv:
|
||||
from setuptools.command.easy_install import main
|
||||
main(argv)
|
||||
else:
|
||||
print "Setuptools version",version,"or greater has been installed."
|
||||
print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)'
|
||||
|
||||
def update_md5(filenames):
|
||||
"""Update our built-in md5 registry"""
|
||||
|
||||
import re
|
||||
|
||||
for name in filenames:
|
||||
base = os.path.basename(name)
|
||||
f = open(name,'rb')
|
||||
md5_data[base] = md5(f.read()).hexdigest()
|
||||
f.close()
|
||||
|
||||
data = [" %r: %r,\n" % it for it in md5_data.items()]
|
||||
data.sort()
|
||||
repl = "".join(data)
|
||||
|
||||
import inspect
|
||||
srcfile = inspect.getsourcefile(sys.modules[__name__])
|
||||
f = open(srcfile, 'rb'); src = f.read(); f.close()
|
||||
|
||||
match = re.search("\nmd5_data = {\n([^}]+)}", src)
|
||||
if not match:
|
||||
print >>sys.stderr, "Internal error!"
|
||||
sys.exit(2)
|
||||
|
||||
src = src[:match.start(1)] + repl + src[match.end(1):]
|
||||
f = open(srcfile,'w')
|
||||
f.write(src)
|
||||
f.close()
|
||||
|
||||
|
||||
if __name__=='__main__':
|
||||
if len(sys.argv)>2 and sys.argv[1]=='--md5update':
|
||||
update_md5(sys.argv[2:])
|
||||
else:
|
||||
main(sys.argv[1:])
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
9
setup.py
9
setup.py
@ -2,8 +2,8 @@
|
||||
try:
|
||||
from setuptools import setup, find_packages
|
||||
except ImportError:
|
||||
import ez_setup
|
||||
ez_setup.use_setuptools()
|
||||
import distribute_setup
|
||||
distribute_setup.use_setuptools()
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
import os
|
||||
@ -47,7 +47,7 @@ A development egg can be found `here
|
||||
requires = ['Pygments>=0.8', 'Jinja2>=2.2', 'docutils>=0.5']
|
||||
|
||||
if sys.version_info < (2, 4):
|
||||
print 'ERROR: Sphinx requires at least Python 2.4 to run.'
|
||||
print('ERROR: Sphinx requires at least Python 2.4 to run.')
|
||||
sys.exit(1)
|
||||
|
||||
if sys.version_info < (2, 5):
|
||||
@ -178,6 +178,7 @@ setup(
|
||||
'License :: OSI Approved :: BSD License',
|
||||
'Operating System :: OS Independent',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Topic :: Documentation',
|
||||
'Topic :: Text Processing',
|
||||
'Topic :: Utilities',
|
||||
@ -197,4 +198,6 @@ setup(
|
||||
},
|
||||
install_requires=requires,
|
||||
cmdclass=cmdclass,
|
||||
use_2to3=True,
|
||||
use_2to3_fixers=['custom_fixers'],
|
||||
)
|
||||
|
@ -9,11 +9,14 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
# Keep this file executable as-is in Python 3!
|
||||
# (Otherwise getting the version out of it from setup.py is impossible.)
|
||||
|
||||
import sys
|
||||
from os import path
|
||||
|
||||
__version__ = '1.0b2+'
|
||||
__released__ = '1.0b2' # used when Sphinx builds its own docs
|
||||
__version__ = '1.1pre'
|
||||
__released__ = '1.1 (hg)' # used when Sphinx builds its own docs
|
||||
|
||||
package_dir = path.abspath(path.dirname(__file__))
|
||||
|
||||
@ -35,13 +38,14 @@ if '+' in __version__ or 'pre' in __version__:
|
||||
|
||||
def main(argv=sys.argv):
|
||||
if sys.version_info[:3] < (2, 4, 0):
|
||||
print >>sys.stderr, \
|
||||
'Error: Sphinx requires at least Python 2.4 to run.'
|
||||
sys.stderr.write('Error: Sphinx requires at least '
|
||||
'Python 2.4 to run.\n')
|
||||
return 1
|
||||
|
||||
try:
|
||||
from sphinx import cmdline
|
||||
except ImportError, err:
|
||||
except ImportError:
|
||||
err = sys.exc_info()[1]
|
||||
errstr = str(err)
|
||||
if errstr.lower().startswith('no module named'):
|
||||
whichmod = errstr[16:]
|
||||
@ -54,14 +58,14 @@ def main(argv=sys.argv):
|
||||
whichmod = 'roman module (which is distributed with Docutils)'
|
||||
hint = ('This can happen if you upgraded docutils using\n'
|
||||
'easy_install without uninstalling the old version'
|
||||
'first.')
|
||||
'first.\n')
|
||||
else:
|
||||
whichmod += ' module'
|
||||
print >>sys.stderr, ('Error: The %s cannot be found. '
|
||||
sys.stderr.write('Error: The %s cannot be found. '
|
||||
'Did you install Sphinx and its dependencies '
|
||||
'correctly?' % whichmod)
|
||||
'correctly?\n' % whichmod)
|
||||
if hint:
|
||||
print >> sys.stderr, hint
|
||||
sys.stderr.write(hint)
|
||||
return 1
|
||||
raise
|
||||
return cmdline.main(argv)
|
||||
|
@ -37,9 +37,6 @@ from sphinx.util.osutil import ENOENT
|
||||
from sphinx.util.console import bold
|
||||
|
||||
|
||||
# Directive is either new-style or old-style
|
||||
clstypes = (type, types.ClassType)
|
||||
|
||||
# List of all known core events. Maps name to arguments description.
|
||||
events = {
|
||||
'builder-inited': '',
|
||||
@ -109,7 +106,9 @@ class Sphinx(object):
|
||||
if self.confdir is None:
|
||||
self.confdir = self.srcdir
|
||||
|
||||
# load all extension modules
|
||||
# backwards compatibility: activate old C markup
|
||||
self.setup_extension('sphinx.ext.oldcmarkup')
|
||||
# load all user-given extension modules
|
||||
for extension in self.config.extensions:
|
||||
self.setup_extension(extension)
|
||||
# the config file itself can be an extension
|
||||
|
@ -11,15 +11,17 @@
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import codecs
|
||||
from os import path
|
||||
import zipfile
|
||||
from os import path
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.transforms import Transform
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.builders.html import StandaloneHTMLBuilder
|
||||
from sphinx.util.osutil import EEXIST
|
||||
from sphinx.util.smartypants import sphinx_smarty_pants as ssp
|
||||
|
||||
|
||||
# (Fragment) templates from which the metainfo files content.opf, toc.ncx,
|
||||
@ -119,29 +121,10 @@ _media_types = {
|
||||
'.ttf': 'application/x-font-ttf',
|
||||
}
|
||||
|
||||
|
||||
# The transform to show link targets
|
||||
|
||||
class VisibleLinksTransform(Transform):
|
||||
"""
|
||||
Add the link target of referances to the text, unless it is already
|
||||
present in the description.
|
||||
"""
|
||||
|
||||
# This transform must run after the references transforms
|
||||
default_priority = 680
|
||||
|
||||
def apply(self):
|
||||
for ref in self.document.traverse(nodes.reference):
|
||||
uri = ref.get('refuri', '')
|
||||
if ( uri.startswith('http:') or uri.startswith('https:') or \
|
||||
uri.startswith('ftp:') ) and uri not in ref.astext():
|
||||
uri = _link_target_template % {'uri': uri}
|
||||
if uri:
|
||||
idx = ref.parent.index(ref) + 1
|
||||
link = nodes.inline(uri, uri)
|
||||
link['classes'].append(_css_link_target_class)
|
||||
ref.parent.insert(idx, link)
|
||||
# Regular expression to match colons only in local fragment identifiers.
|
||||
# If the URI contains a colon before the #,
|
||||
# it is an external link that should not change.
|
||||
_refuri_re = re.compile("([^#:]*#)(.*)")
|
||||
|
||||
|
||||
# The epub publisher
|
||||
@ -170,7 +153,6 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
# the output files for epub must be .html only
|
||||
self.out_suffix = '.html'
|
||||
self.playorder = 0
|
||||
self.app.add_transform(VisibleLinksTransform)
|
||||
|
||||
def get_theme_config(self):
|
||||
return self.config.epub_theme, {}
|
||||
@ -194,17 +176,20 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
"""Collect section titles, their depth in the toc and the refuri."""
|
||||
# XXX: is there a better way than checking the attribute
|
||||
# toctree-l[1-8] on the parent node?
|
||||
if isinstance(doctree, nodes.reference):
|
||||
if isinstance(doctree, nodes.reference) and doctree.has_key('refuri'):
|
||||
refuri = doctree['refuri']
|
||||
if refuri.startswith('http://') or refuri.startswith('https://') \
|
||||
or refuri.startswith('irc:') or refuri.startswith('mailto:'):
|
||||
return result
|
||||
classes = doctree.parent.attributes['classes']
|
||||
level = 1
|
||||
for l in range(8, 0, -1): # or range(1, 8)?
|
||||
if (_toctree_template % l) in classes:
|
||||
level = l
|
||||
for level in range(8, 0, -1): # or range(1, 8)?
|
||||
if (_toctree_template % level) in classes:
|
||||
result.append({
|
||||
'level': level,
|
||||
'refuri': self.esc(doctree['refuri']),
|
||||
'text': self.esc(doctree.astext())
|
||||
'refuri': self.esc(refuri),
|
||||
'text': ssp(self.esc(doctree.astext()))
|
||||
})
|
||||
break
|
||||
else:
|
||||
for elem in doctree.children:
|
||||
result = self.get_refnodes(elem, result)
|
||||
@ -220,21 +205,97 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
self.refnodes.insert(0, {
|
||||
'level': 1,
|
||||
'refuri': self.esc(self.config.master_doc + '.html'),
|
||||
'text': self.esc(self.env.titles[self.config.master_doc].astext())
|
||||
'text': ssp(self.esc(
|
||||
self.env.titles[self.config.master_doc].astext()))
|
||||
})
|
||||
for file, text in reversed(self.config.epub_pre_files):
|
||||
self.refnodes.insert(0, {
|
||||
'level': 1,
|
||||
'refuri': self.esc(file + '.html'),
|
||||
'text': self.esc(text)
|
||||
'refuri': self.esc(file),
|
||||
'text': ssp(self.esc(text))
|
||||
})
|
||||
for file, text in self.config.epub_post_files:
|
||||
self.refnodes.append({
|
||||
'level': 1,
|
||||
'refuri': self.esc(file + '.html'),
|
||||
'text': self.esc(text)
|
||||
'refuri': self.esc(file),
|
||||
'text': ssp(self.esc(text))
|
||||
})
|
||||
|
||||
def fix_fragment(self, match):
|
||||
"""Return a href attribute with colons replaced by hyphens.
|
||||
"""
|
||||
return match.group(1) + match.group(2).replace(':', '-')
|
||||
|
||||
def fix_ids(self, tree):
|
||||
"""Replace colons with hyphens in href and id attributes.
|
||||
Some readers crash because they interpret the part as a
|
||||
transport protocol specification.
|
||||
"""
|
||||
for node in tree.traverse(nodes.reference):
|
||||
if 'refuri' in node:
|
||||
m = _refuri_re.match(node['refuri'])
|
||||
if m:
|
||||
node['refuri'] = self.fix_fragment(m)
|
||||
if 'refid' in node:
|
||||
node['refid'] = node['refid'].replace(':', '-')
|
||||
for node in tree.traverse(addnodes.desc_signature):
|
||||
ids = node.attributes['ids']
|
||||
newids = []
|
||||
for id in ids:
|
||||
newids.append(id.replace(':', '-'))
|
||||
node.attributes['ids'] = newids
|
||||
|
||||
def add_visible_links(self, tree):
|
||||
"""Append visible link targets after external links.
|
||||
"""
|
||||
for node in tree.traverse(nodes.reference):
|
||||
uri = node.get('refuri', '')
|
||||
if (uri.startswith('http:') or uri.startswith('https:') or
|
||||
uri.startswith('ftp:')) and uri not in node.astext():
|
||||
uri = _link_target_template % {'uri': uri}
|
||||
if uri:
|
||||
idx = node.parent.index(node) + 1
|
||||
link = nodes.inline(uri, uri)
|
||||
link['classes'].append(_css_link_target_class)
|
||||
node.parent.insert(idx, link)
|
||||
|
||||
def write_doc(self, docname, doctree):
|
||||
"""Write one document file.
|
||||
This method is overwritten in order to fix fragment identifiers
|
||||
and to add visible external links.
|
||||
"""
|
||||
self.fix_ids(doctree)
|
||||
self.add_visible_links(doctree)
|
||||
return StandaloneHTMLBuilder.write_doc(self, docname, doctree)
|
||||
|
||||
def fix_genindex(self, tree):
|
||||
"""Fix href attributes for genindex pages.
|
||||
"""
|
||||
# XXX: modifies tree inline
|
||||
# Logic modeled from themes/basic/genindex.html
|
||||
for key, columns in tree:
|
||||
for entryname, (links, subitems) in columns:
|
||||
for (i, link) in enumerate(links):
|
||||
m = _refuri_re.match(link)
|
||||
if m:
|
||||
links[i] = self.fix_fragment(m)
|
||||
for subentryname, subentrylinks in subitems:
|
||||
for (i, link) in enumerate(subentrylinks):
|
||||
m = _refuri_re.match(link)
|
||||
if m:
|
||||
subentrylinks[i] = self.fix_fragment(m)
|
||||
|
||||
def handle_page(self, pagename, addctx, templatename='page.html',
|
||||
outfilename=None, event_arg=None):
|
||||
"""Create a rendered page.
|
||||
This method is overwritten for genindex pages in order to fix
|
||||
href link attributes.
|
||||
"""
|
||||
if pagename.startswith('genindex'):
|
||||
self.fix_genindex(addctx['genindexentries'])
|
||||
StandaloneHTMLBuilder.handle_page(self, pagename, addctx, templatename,
|
||||
outfilename, event_arg)
|
||||
|
||||
|
||||
# Finish by building the epub file
|
||||
def handle_finish(self):
|
||||
@ -380,7 +441,7 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
navstack.append(navlist)
|
||||
navlist = []
|
||||
level += 1
|
||||
if lastnode:
|
||||
if lastnode and self.config.epub_tocdup:
|
||||
# Insert starting point in subtoc with same playOrder
|
||||
navlist.append(self.new_navpoint(lastnode, level, False))
|
||||
navlist.append(self.new_navpoint(node, level))
|
||||
|
@ -30,12 +30,12 @@ from docutils.frontend import OptionParser
|
||||
from docutils.readers.doctree import Reader as DoctreeReader
|
||||
|
||||
from sphinx import package_dir, __version__
|
||||
from sphinx.util import copy_static_entry
|
||||
from sphinx.util import jsonimpl, copy_static_entry
|
||||
from sphinx.util.osutil import SEP, os_path, relative_uri, ensuredir, \
|
||||
movefile, ustrftime, copyfile
|
||||
from sphinx.util.nodes import inline_all_toctrees
|
||||
from sphinx.util.matching import patmatch, compile_matchers
|
||||
from sphinx.util.pycompat import any
|
||||
from sphinx.util.pycompat import any, b
|
||||
from sphinx.errors import SphinxError
|
||||
from sphinx.locale import _
|
||||
from sphinx.search import js_index
|
||||
@ -47,14 +47,6 @@ from sphinx.util.console import bold, darkgreen, brown
|
||||
from sphinx.writers.html import HTMLWriter, HTMLTranslator, \
|
||||
SmartyPantsHTMLTranslator
|
||||
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
try:
|
||||
import simplejson as json
|
||||
except ImportError:
|
||||
json = None
|
||||
|
||||
#: the filename for the inventory of objects
|
||||
INVENTORY_FILENAME = 'objects.inv'
|
||||
#: the filename for the "last build" file (for serializing builders)
|
||||
@ -71,6 +63,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
out_suffix = '.html'
|
||||
link_suffix = '.html' # defaults to matching out_suffix
|
||||
indexer_format = js_index
|
||||
indexer_dumps_unicode = True
|
||||
supported_image_types = ['image/svg+xml', 'image/png',
|
||||
'image/gif', 'image/jpeg']
|
||||
searchindex_filename = 'searchindex.js'
|
||||
@ -99,7 +92,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
self.init_templates()
|
||||
self.init_highlighter()
|
||||
self.init_translator_class()
|
||||
if self.config.html_file_suffix:
|
||||
if self.config.html_file_suffix is not None:
|
||||
self.out_suffix = self.config.html_file_suffix
|
||||
|
||||
if self.config.html_link_suffix is not None:
|
||||
@ -154,8 +147,9 @@ class StandaloneHTMLBuilder(Builder):
|
||||
cfgdict = dict((name, self.config[name])
|
||||
for (name, desc) in self.config.values.iteritems()
|
||||
if desc[1] == 'html')
|
||||
self.config_hash = md5(str(cfgdict)).hexdigest()
|
||||
self.tags_hash = md5(str(sorted(self.tags))).hexdigest()
|
||||
self.config_hash = md5(unicode(cfgdict).encode('utf-8')).hexdigest()
|
||||
self.tags_hash = md5(unicode(sorted(self.tags)).encode('utf-8')) \
|
||||
.hexdigest()
|
||||
old_config_hash = old_tags_hash = ''
|
||||
try:
|
||||
fp = open(path.join(self.outdir, '.buildinfo'))
|
||||
@ -207,7 +201,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
"""Utility: Render a lone doctree node."""
|
||||
if node is None:
|
||||
return {'fragment': ''}
|
||||
doc = new_document('<partial node>')
|
||||
doc = new_document(b('<partial node>'))
|
||||
doc.append(node)
|
||||
|
||||
if self._publisher is None:
|
||||
@ -735,10 +729,12 @@ class StandaloneHTMLBuilder(Builder):
|
||||
self.info(bold('dumping object inventory... '), nonl=True)
|
||||
f = open(path.join(self.outdir, INVENTORY_FILENAME), 'wb')
|
||||
try:
|
||||
f.write('# Sphinx inventory version 2\n')
|
||||
f.write('# Project: %s\n' % self.config.project.encode('utf-8'))
|
||||
f.write('# Version: %s\n' % self.config.version.encode('utf-8'))
|
||||
f.write('# The remainder of this file is compressed using zlib.\n')
|
||||
f.write((u'# Sphinx inventory version 2\n'
|
||||
u'# Project: %s\n'
|
||||
u'# Version: %s\n'
|
||||
u'# The remainder of this file is compressed using zlib.\n'
|
||||
% (self.config.project, self.config.version)
|
||||
).encode('utf-8'))
|
||||
compressor = zlib.compressobj(9)
|
||||
for domainname, domain in self.env.domains.iteritems():
|
||||
for name, dispname, type, docname, anchor, prio in \
|
||||
@ -750,11 +746,9 @@ class StandaloneHTMLBuilder(Builder):
|
||||
if dispname == name:
|
||||
dispname = u'-'
|
||||
f.write(compressor.compress(
|
||||
'%s %s:%s %s %s %s\n' % (name.encode('utf-8'),
|
||||
domainname.encode('utf-8'),
|
||||
type.encode('utf-8'), prio,
|
||||
uri.encode('utf-8'),
|
||||
dispname.encode('utf-8'))))
|
||||
(u'%s %s:%s %s %s %s\n' % (name, domainname, type,
|
||||
prio, uri, dispname)
|
||||
).encode('utf-8')))
|
||||
f.write(compressor.flush())
|
||||
finally:
|
||||
f.close()
|
||||
@ -766,6 +760,9 @@ class StandaloneHTMLBuilder(Builder):
|
||||
searchindexfn = path.join(self.outdir, self.searchindex_filename)
|
||||
# first write to a temporary file, so that if dumping fails,
|
||||
# the existing index won't be overwritten
|
||||
if self.indexer_dumps_unicode:
|
||||
f = codecs.open(searchindexfn + '.tmp', 'w', encoding='utf-8')
|
||||
else:
|
||||
f = open(searchindexfn + '.tmp', 'wb')
|
||||
try:
|
||||
self.indexer.dump(f, self.indexer_format)
|
||||
@ -855,8 +852,14 @@ class SingleFileHTMLBuilder(StandaloneHTMLBuilder):
|
||||
def get_doc_context(self, docname, body, metatags):
|
||||
# no relation links...
|
||||
toc = self.env.get_toctree_for(self.config.master_doc, self, False)
|
||||
# if there is no toctree, toc is None
|
||||
if toc:
|
||||
self.fix_refuris(toc)
|
||||
toc = self.render_partial(toc)['fragment']
|
||||
display_toc = True
|
||||
else:
|
||||
toc = ''
|
||||
display_toc = False
|
||||
return dict(
|
||||
parents = [],
|
||||
prev = None,
|
||||
@ -869,7 +872,7 @@ class SingleFileHTMLBuilder(StandaloneHTMLBuilder):
|
||||
rellinks = [],
|
||||
sourcename = '',
|
||||
toc = toc,
|
||||
display_toc = True,
|
||||
display_toc = display_toc,
|
||||
)
|
||||
|
||||
def write(self, *ignored):
|
||||
@ -917,6 +920,7 @@ class SerializingHTMLBuilder(StandaloneHTMLBuilder):
|
||||
#: implements a `dump`, `load`, `dumps` and `loads` functions
|
||||
#: (pickle, simplejson etc.)
|
||||
implementation = None
|
||||
implementation_dumps_unicode = False
|
||||
|
||||
#: the filename for the global context file
|
||||
globalcontext_filename = None
|
||||
@ -939,6 +943,17 @@ class SerializingHTMLBuilder(StandaloneHTMLBuilder):
|
||||
return docname[:-5] # up to sep
|
||||
return docname + SEP
|
||||
|
||||
def dump_context(self, context, filename):
|
||||
if self.implementation_dumps_unicode:
|
||||
f = codecs.open(filename, 'w', encoding='utf-8')
|
||||
else:
|
||||
f = open(filename, 'wb')
|
||||
try:
|
||||
# XXX: the third argument is pickle-specific!
|
||||
self.implementation.dump(context, f, 2)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def handle_page(self, pagename, ctx, templatename='page.html',
|
||||
outfilename=None, event_arg=None):
|
||||
ctx['current_page_name'] = pagename
|
||||
@ -952,11 +967,7 @@ class SerializingHTMLBuilder(StandaloneHTMLBuilder):
|
||||
ctx, event_arg)
|
||||
|
||||
ensuredir(path.dirname(outfilename))
|
||||
f = open(outfilename, 'wb')
|
||||
try:
|
||||
self.implementation.dump(ctx, f, 2)
|
||||
finally:
|
||||
f.close()
|
||||
self.dump_context(ctx, outfilename)
|
||||
|
||||
# if there is a source file, copy the source file for the
|
||||
# "show source" link
|
||||
@ -969,11 +980,7 @@ class SerializingHTMLBuilder(StandaloneHTMLBuilder):
|
||||
def handle_finish(self):
|
||||
# dump the global context
|
||||
outfilename = path.join(self.outdir, self.globalcontext_filename)
|
||||
f = open(outfilename, 'wb')
|
||||
try:
|
||||
self.implementation.dump(self.globalcontext, f, 2)
|
||||
finally:
|
||||
f.close()
|
||||
self.dump_context(self.globalcontext, outfilename)
|
||||
|
||||
# super here to dump the search index
|
||||
StandaloneHTMLBuilder.handle_finish(self)
|
||||
@ -993,7 +1000,9 @@ class PickleHTMLBuilder(SerializingHTMLBuilder):
|
||||
A Builder that dumps the generated HTML into pickle files.
|
||||
"""
|
||||
implementation = pickle
|
||||
implementation_dumps_unicode = False
|
||||
indexer_format = pickle
|
||||
indexer_dumps_unicode = False
|
||||
name = 'pickle'
|
||||
out_suffix = '.fpickle'
|
||||
globalcontext_filename = 'globalcontext.pickle'
|
||||
@ -1007,15 +1016,17 @@ class JSONHTMLBuilder(SerializingHTMLBuilder):
|
||||
"""
|
||||
A builder that dumps the generated HTML into JSON files.
|
||||
"""
|
||||
implementation = json
|
||||
indexer_format = json
|
||||
implementation = jsonimpl
|
||||
implementation_dumps_unicode = True
|
||||
indexer_format = jsonimpl
|
||||
indexer_dumps_unicode = True
|
||||
name = 'json'
|
||||
out_suffix = '.fjson'
|
||||
globalcontext_filename = 'globalcontext.json'
|
||||
searchindex_filename = 'searchindex.json'
|
||||
|
||||
def init(self):
|
||||
if json is None:
|
||||
if jsonimpl.json is None:
|
||||
raise SphinxError(
|
||||
'The module simplejson (or json in Python >= 2.6) '
|
||||
'is not available. The JSONHTMLBuilder builder will not work.')
|
||||
|
@ -258,7 +258,8 @@ class HTMLHelpBuilder(StandaloneHTMLBuilder):
|
||||
def write_index(title, refs, subitems):
|
||||
def write_param(name, value):
|
||||
item = ' <param name="%s" value="%s">\n' % (name, value)
|
||||
f.write(item.encode('ascii', 'xmlcharrefreplace'))
|
||||
f.write(item.encode('ascii', 'xmlcharrefreplace')
|
||||
.decode('ascii'))
|
||||
title = cgi.escape(title)
|
||||
f.write('<LI> <OBJECT type="text/sitemap">\n')
|
||||
write_param('Keyword', title)
|
||||
|
@ -16,7 +16,7 @@ from urllib2 import build_opener, HTTPError
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.util.console import purple, red, darkgreen
|
||||
from sphinx.util.console import purple, red, darkgreen, darkgray
|
||||
|
||||
# create an opener that will simulate a browser user-agent
|
||||
opener = build_opener()
|
||||
@ -71,9 +71,12 @@ class CheckExternalLinksBuilder(Builder):
|
||||
break
|
||||
lineno = node.line
|
||||
|
||||
if uri[0:5] == 'http:' or uri[0:6] == 'https:':
|
||||
if len(uri) == 0 or uri[0:7] == 'mailto:' or uri[0:4] == 'ftp:':
|
||||
return
|
||||
|
||||
if lineno:
|
||||
self.info('(line %3d) ' % lineno, nonl=1)
|
||||
if uri[0:5] == 'http:' or uri[0:6] == 'https:':
|
||||
self.info(uri, nonl=1)
|
||||
|
||||
if uri in self.broken:
|
||||
@ -98,15 +101,9 @@ class CheckExternalLinksBuilder(Builder):
|
||||
self.write_entry('redirected', docname,
|
||||
lineno, uri + ' to ' + s)
|
||||
self.redirected[uri] = (r, s)
|
||||
elif len(uri) == 0 or uri[0:7] == 'mailto:' or uri[0:4] == 'ftp:':
|
||||
return
|
||||
else:
|
||||
self.warn(uri + ' - ' + red('malformed!'))
|
||||
self.write_entry('malformed', docname, lineno, uri)
|
||||
if self.app.quiet:
|
||||
self.warn('malformed link: %s' % uri,
|
||||
'%s:%s' % (self.env.doc2path(docname), lineno))
|
||||
self.app.statuscode = 1
|
||||
self.info(uri + ' - ' + darkgray('local'))
|
||||
self.write_entry('local', docname, lineno, uri)
|
||||
|
||||
if self.broken:
|
||||
self.app.statuscode = 1
|
||||
|
@ -130,8 +130,16 @@ class QtHelpBuilder(StandaloneHTMLBuilder):
|
||||
for indexname, indexcls, content, collapse in self.domain_indices:
|
||||
item = section_template % {'title': indexcls.localname,
|
||||
'ref': '%s.html' % indexname}
|
||||
sections.append(' '*4*4 + item)
|
||||
sections = '\n'.join(sections)
|
||||
sections.append((' ' * 4 * 4 + item).encode('utf-8'))
|
||||
# sections may be unicode strings or byte strings, we have to make sure
|
||||
# they are all byte strings before joining them
|
||||
new_sections = []
|
||||
for section in sections:
|
||||
if isinstance(section, unicode):
|
||||
new_sections.append(section.encode('utf-8'))
|
||||
else:
|
||||
new_sections.append(section)
|
||||
sections = u'\n'.encode('utf-8').join(new_sections)
|
||||
|
||||
# keywords
|
||||
keywords = []
|
||||
@ -230,7 +238,7 @@ class QtHelpBuilder(StandaloneHTMLBuilder):
|
||||
link = node['refuri']
|
||||
title = escape(node.astext()).replace('"','"')
|
||||
item = section_template % {'title': title, 'ref': link}
|
||||
item = ' '*4*indentlevel + item.encode('ascii', 'xmlcharrefreplace')
|
||||
item = u' ' * 4 * indentlevel + item
|
||||
parts.append(item.encode('ascii', 'xmlcharrefreplace'))
|
||||
elif isinstance(node, nodes.bullet_list):
|
||||
for subnode in node:
|
||||
|
@ -11,13 +11,18 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from os import path
|
||||
|
||||
from sphinx.errors import ConfigError
|
||||
from sphinx.util.osutil import make_filename
|
||||
from sphinx.util.pycompat import bytes, b, convert_with_2to3
|
||||
|
||||
nonascii_re = re.compile(r'[\x80-\xff]')
|
||||
nonascii_re = re.compile(b(r'[\x80-\xff]'))
|
||||
|
||||
CONFIG_SYNTAX_ERROR = "There is a syntax error in your configuration file: %s"
|
||||
if sys.version_info >= (3, 0):
|
||||
CONFIG_SYNTAX_ERROR += "\nDid you change the syntax from 2.x to 3.x?"
|
||||
|
||||
class Config(object):
|
||||
"""Configuration file abstraction."""
|
||||
@ -124,6 +129,7 @@ class Config(object):
|
||||
epub_post_files = ([], 'env'),
|
||||
epub_exclude_files = ([], 'env'),
|
||||
epub_tocdepth = (3, 'env'),
|
||||
epub_tocdup = (True, 'env'),
|
||||
|
||||
# LaTeX options
|
||||
latex_documents = ([], None),
|
||||
@ -162,12 +168,30 @@ class Config(object):
|
||||
config['tags'] = tags
|
||||
olddir = os.getcwd()
|
||||
try:
|
||||
try:
|
||||
# we promise to have the config dir as current dir while the
|
||||
# config file is executed
|
||||
os.chdir(dirname)
|
||||
execfile(config['__file__'], config)
|
||||
# get config source
|
||||
f = open(config_file, 'rb')
|
||||
try:
|
||||
source = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
try:
|
||||
# compile to a code object, handle syntax errors
|
||||
try:
|
||||
code = compile(source, config_file, 'exec')
|
||||
except SyntaxError:
|
||||
if convert_with_2to3:
|
||||
# maybe the file uses 2.x syntax; try to refactor to
|
||||
# 3.x syntax using 2to3
|
||||
source = convert_with_2to3(config_file)
|
||||
code = compile(source, config_file, 'exec')
|
||||
else:
|
||||
raise
|
||||
exec code in config
|
||||
except SyntaxError, err:
|
||||
raise ConfigError('There is a syntax error in your '
|
||||
'configuration file: ' + str(err))
|
||||
raise ConfigError(CONFIG_SYNTAX_ERROR % err)
|
||||
finally:
|
||||
os.chdir(olddir)
|
||||
|
||||
@ -181,10 +205,11 @@ class Config(object):
|
||||
# check all string values for non-ASCII characters in bytestrings,
|
||||
# since that can result in UnicodeErrors all over the place
|
||||
for name, value in self._raw_config.iteritems():
|
||||
if isinstance(value, str) and nonascii_re.search(value):
|
||||
if isinstance(value, bytes) and nonascii_re.search(value):
|
||||
warn('the config value %r is set to a string with non-ASCII '
|
||||
'characters; this can lead to Unicode errors occurring. '
|
||||
'Please use Unicode strings, e.g. u"Content".' % name)
|
||||
'Please use Unicode strings, e.g. %r.' % (name, u'Content')
|
||||
)
|
||||
|
||||
def init_values(self):
|
||||
config = self._raw_config
|
||||
|
@ -32,6 +32,7 @@ except AttributeError:
|
||||
|
||||
|
||||
# RE to strip backslash escapes
|
||||
nl_escape_re = re.compile(r'\\\n')
|
||||
strip_backslash_re = re.compile(r'\\(?=[^\\])')
|
||||
|
||||
|
||||
@ -57,10 +58,12 @@ class ObjectDescription(Directive):
|
||||
"""
|
||||
Retrieve the signatures to document from the directive arguments. By
|
||||
default, signatures are given as arguments, one per line.
|
||||
|
||||
Backslash-escaping of newlines is supported.
|
||||
"""
|
||||
lines = nl_escape_re.sub('', self.arguments[0]).split('\n')
|
||||
# remove backslashes to support (dummy) escapes; helps Vim highlighting
|
||||
return [strip_backslash_re.sub('', sig.strip())
|
||||
for sig in self.arguments[0].split('\n')]
|
||||
return [strip_backslash_re.sub('', line.strip()) for line in lines]
|
||||
|
||||
def handle_signature(self, sig, signode):
|
||||
"""
|
||||
@ -159,7 +162,6 @@ class ObjectDescription(Directive):
|
||||
self.env.temp_data['object'] = self.names[0]
|
||||
self.before_content()
|
||||
self.state.nested_parse(self.content, self.content_offset, contentnode)
|
||||
#self.handle_doc_fields(contentnode)
|
||||
DocFieldTransformer(self).transform_all(contentnode)
|
||||
self.env.temp_data['object'] = None
|
||||
self.after_content()
|
||||
|
@ -102,7 +102,7 @@ class LiteralInclude(Directive):
|
||||
rel_fn = filename[1:]
|
||||
else:
|
||||
docdir = path.dirname(env.doc2path(env.docname, base=None))
|
||||
rel_fn = path.normpath(path.join(docdir, filename))
|
||||
rel_fn = path.join(docdir, filename)
|
||||
try:
|
||||
fn = path.join(env.srcdir, rel_fn)
|
||||
except UnicodeDecodeError:
|
||||
@ -119,7 +119,7 @@ class LiteralInclude(Directive):
|
||||
encoding = self.options.get('encoding', env.config.source_encoding)
|
||||
codec_info = codecs.lookup(encoding)
|
||||
try:
|
||||
f = codecs.StreamReaderWriter(open(fn, 'U'),
|
||||
f = codecs.StreamReaderWriter(open(fn, 'rb'),
|
||||
codec_info[2], codec_info[3], 'strict')
|
||||
lines = f.readlines()
|
||||
f.close()
|
||||
|
@ -215,12 +215,7 @@ class VersionChange(Directive):
|
||||
else:
|
||||
ret = [node]
|
||||
env = self.state.document.settings.env
|
||||
env.versionchanges.setdefault(node['version'], []).append(
|
||||
(node['type'], env.temp_data['docname'], self.lineno,
|
||||
# XXX: python domain specific
|
||||
env.temp_data.get('py:module'),
|
||||
env.temp_data.get('object'),
|
||||
node.astext()))
|
||||
env.note_versionchange(node['type'], node['version'], node, self.lineno)
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -110,7 +110,7 @@ class DefinitionError(Exception):
|
||||
return self.description
|
||||
|
||||
def __str__(self):
|
||||
return unicode(self.encode('utf-8'))
|
||||
return unicode(self).encode('utf-8')
|
||||
|
||||
|
||||
class DefExpr(object):
|
||||
@ -132,6 +132,8 @@ class DefExpr(object):
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
__hash__ = None
|
||||
|
||||
def clone(self):
|
||||
"""Close a definition expression node"""
|
||||
return deepcopy(self)
|
||||
|
@ -56,7 +56,7 @@ class JSObject(ObjectDescription):
|
||||
else:
|
||||
# just a function or constructor
|
||||
objectname = ''
|
||||
fullname = ''
|
||||
fullname = name
|
||||
|
||||
signode['object'] = objectname
|
||||
signode['fullname'] = fullname
|
||||
|
@ -356,6 +356,9 @@ class PyModule(Directive):
|
||||
env.domaindata['py']['modules'][modname] = \
|
||||
(env.docname, self.options.get('synopsis', ''),
|
||||
self.options.get('platform', ''), 'deprecated' in self.options)
|
||||
# make a duplicate entry in 'objects' to facilitate searching for the
|
||||
# module in PythonDomain.find_obj()
|
||||
env.domaindata['py']['objects'][modname] = (env.docname, 'module')
|
||||
targetnode = nodes.target('', '', ids=['module-' + modname], ismod=True)
|
||||
self.state.document.note_explicit_target(targetnode)
|
||||
ret = [targetnode]
|
||||
@ -544,7 +547,7 @@ class PythonDomain(Domain):
|
||||
if fn == docname:
|
||||
del self.data['modules'][modname]
|
||||
|
||||
def find_obj(self, env, modname, classname, name, type, searchorder=0):
|
||||
def find_obj(self, env, modname, classname, name, type, searchmode=0):
|
||||
"""
|
||||
Find a Python object for "name", perhaps using the given module and/or
|
||||
classname. Returns a list of (name, object entry) tuples.
|
||||
@ -560,22 +563,31 @@ class PythonDomain(Domain):
|
||||
matches = []
|
||||
|
||||
newname = None
|
||||
if searchorder == 1:
|
||||
if modname and classname and \
|
||||
modname + '.' + classname + '.' + name in objects:
|
||||
newname = modname + '.' + classname + '.' + name
|
||||
elif modname and modname + '.' + name in objects:
|
||||
if searchmode == 1:
|
||||
objtypes = self.objtypes_for_role(type)
|
||||
if modname and classname:
|
||||
fullname = modname + '.' + classname + '.' + name
|
||||
if fullname in objects and objects[fullname][1] in objtypes:
|
||||
newname = fullname
|
||||
if not newname:
|
||||
if modname and modname + '.' + name in objects and \
|
||||
objects[modname + '.' + name][1] in objtypes:
|
||||
newname = modname + '.' + name
|
||||
elif name in objects:
|
||||
elif name in objects and objects[name][1] in objtypes:
|
||||
newname = name
|
||||
else:
|
||||
# "fuzzy" searching mode
|
||||
searchname = '.' + name
|
||||
matches = [(name, objects[name]) for name in objects
|
||||
if name.endswith(searchname)]
|
||||
if name.endswith(searchname)
|
||||
and objects[name][1] in objtypes]
|
||||
else:
|
||||
# NOTE: searching for exact match, object type is not considered
|
||||
if name in objects:
|
||||
newname = name
|
||||
elif type == 'mod':
|
||||
# only exact matches allowed for modules
|
||||
return []
|
||||
elif classname and classname + '.' + name in objects:
|
||||
newname = classname + '.' + name
|
||||
elif modname and modname + '.' + name in objects:
|
||||
@ -597,24 +609,11 @@ class PythonDomain(Domain):
|
||||
|
||||
def resolve_xref(self, env, fromdocname, builder,
|
||||
type, target, node, contnode):
|
||||
if (type == 'mod' or
|
||||
type == 'obj' and target in self.data['modules']):
|
||||
docname, synopsis, platform, deprecated = \
|
||||
self.data['modules'].get(target, ('','','', ''))
|
||||
if not docname:
|
||||
return None
|
||||
else:
|
||||
title = '%s%s%s' % ((platform and '(%s) ' % platform),
|
||||
synopsis,
|
||||
(deprecated and ' (deprecated)' or ''))
|
||||
return make_refnode(builder, fromdocname, docname,
|
||||
'module-' + target, contnode, title)
|
||||
else:
|
||||
modname = node.get('py:module')
|
||||
clsname = node.get('py:class')
|
||||
searchorder = node.hasattr('refspecific') and 1 or 0
|
||||
searchmode = node.hasattr('refspecific') and 1 or 0
|
||||
matches = self.find_obj(env, modname, clsname, target,
|
||||
type, searchorder)
|
||||
type, searchmode)
|
||||
if not matches:
|
||||
return None
|
||||
elif len(matches) > 1:
|
||||
@ -624,6 +623,21 @@ class PythonDomain(Domain):
|
||||
', '.join(match[0] for match in matches)),
|
||||
node.line)
|
||||
name, obj = matches[0]
|
||||
|
||||
if obj[1] == 'module':
|
||||
# get additional info for modules
|
||||
docname, synopsis, platform, deprecated = self.data['modules'][name]
|
||||
assert docname == obj[0]
|
||||
title = name
|
||||
if synopsis:
|
||||
title += ': ' + synopsis
|
||||
if deprecated:
|
||||
title += _(' (deprecated)')
|
||||
if platform:
|
||||
title += ' (' + platform + ')'
|
||||
return make_refnode(builder, fromdocname, docname,
|
||||
'module-' + name, contnode, title)
|
||||
else:
|
||||
return make_refnode(builder, fromdocname, obj[0], name,
|
||||
contnode, name)
|
||||
|
||||
|
@ -28,9 +28,10 @@ class ReSTMarkup(ObjectDescription):
|
||||
"""
|
||||
|
||||
def add_target_and_index(self, name, sig, signode):
|
||||
if name not in self.state.document.ids:
|
||||
signode['names'].append(name)
|
||||
signode['ids'].append(name)
|
||||
targetname = self.objtype + '-' + name
|
||||
if targetname not in self.state.document.ids:
|
||||
signode['names'].append(targetname)
|
||||
signode['ids'].append(targetname)
|
||||
signode['first'] = (not self.names)
|
||||
self.state.document.note_explicit_target(signode)
|
||||
|
||||
@ -47,7 +48,7 @@ class ReSTMarkup(ObjectDescription):
|
||||
indextext = self.get_index_text(self.objtype, name)
|
||||
if indextext:
|
||||
self.indexnode['entries'].append(('single', indextext,
|
||||
name, name))
|
||||
targetname, targetname))
|
||||
|
||||
def get_index_text(self, objectname, name):
|
||||
if self.objtype == 'directive':
|
||||
@ -129,8 +130,9 @@ class ReSTDomain(Domain):
|
||||
if (objtype, target) in objects:
|
||||
return make_refnode(builder, fromdocname,
|
||||
objects[objtype, target],
|
||||
target, contnode, target)
|
||||
objtype + '-' + target,
|
||||
contnode, target + ' ' + objtype)
|
||||
|
||||
def get_objects(self):
|
||||
for (typ, name), docname in self.data['objects'].iteritems():
|
||||
yield name, name, typ, docname, name, 1
|
||||
yield name, name, typ, docname, typ + '-' + name, 1
|
||||
|
@ -484,7 +484,13 @@ class StandardDomain(Domain):
|
||||
return make_refnode(builder, fromdocname, docname,
|
||||
labelid, contnode)
|
||||
else:
|
||||
docname, labelid = self.data['objects'].get((typ, target), ('', ''))
|
||||
objtypes = self.objtypes_for_role(typ) or []
|
||||
for objtype in objtypes:
|
||||
if (objtype, target) in self.data['objects']:
|
||||
docname, labelid = self.data['objects'][objtype, target]
|
||||
break
|
||||
else:
|
||||
docname, labelid = '', ''
|
||||
if not docname:
|
||||
if typ == 'term':
|
||||
env.warn(node.get('refdoc', fromdocname),
|
||||
|
@ -11,6 +11,7 @@
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import types
|
||||
import codecs
|
||||
@ -40,10 +41,11 @@ from sphinx.util import url_re, get_matching_docs, docname_join, \
|
||||
from sphinx.util.nodes import clean_astext, make_refnode, extract_messages
|
||||
from sphinx.util.osutil import movefile, SEP, ustrftime
|
||||
from sphinx.util.matching import compile_matchers
|
||||
from sphinx.util.pycompat import all
|
||||
from sphinx.util.pycompat import all, class_types
|
||||
from sphinx.errors import SphinxError, ExtensionError
|
||||
from sphinx.locale import _, init as init_locale
|
||||
|
||||
fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
|
||||
|
||||
orig_role_function = roles.role
|
||||
orig_directive_function = directives.directive
|
||||
@ -64,7 +66,7 @@ default_settings = {
|
||||
|
||||
# This is increased every time an environment attribute is added
|
||||
# or changed to properly invalidate pickle files.
|
||||
ENV_VERSION = 36
|
||||
ENV_VERSION = 38
|
||||
|
||||
|
||||
default_substitutions = set([
|
||||
@ -81,7 +83,7 @@ class WarningStream(object):
|
||||
self.warnfunc = warnfunc
|
||||
def write(self, text):
|
||||
if text.strip():
|
||||
self.warnfunc(text, None, '')
|
||||
self.warnfunc(text.strip(), None, '')
|
||||
|
||||
|
||||
class NoUri(Exception):
|
||||
@ -289,7 +291,7 @@ class BuildEnvironment:
|
||||
if key.startswith('_') or \
|
||||
isinstance(val, types.ModuleType) or \
|
||||
isinstance(val, types.FunctionType) or \
|
||||
isinstance(val, (type, types.ClassType)):
|
||||
isinstance(val, class_types):
|
||||
del self.config[key]
|
||||
try:
|
||||
pickle.dump(self, picklefile, pickle.HIGHEST_PROTOCOL)
|
||||
@ -421,14 +423,14 @@ class BuildEnvironment:
|
||||
If base is a path string, return absolute path under that.
|
||||
If suffix is not None, add it instead of config.source_suffix.
|
||||
"""
|
||||
docname = docname.replace(SEP, path.sep)
|
||||
suffix = suffix or self.config.source_suffix
|
||||
if base is True:
|
||||
return path.join(self.srcdir,
|
||||
docname.replace(SEP, path.sep)) + suffix
|
||||
return path.join(self.srcdir, docname) + suffix
|
||||
elif base is None:
|
||||
return docname.replace(SEP, path.sep) + suffix
|
||||
return docname + suffix
|
||||
else:
|
||||
return path.join(base, docname.replace(SEP, path.sep)) + suffix
|
||||
return path.join(base, docname) + suffix
|
||||
|
||||
def find_files(self, config):
|
||||
"""
|
||||
@ -666,6 +668,8 @@ class BuildEnvironment:
|
||||
|
||||
class SphinxSourceClass(FileInput):
|
||||
def decode(self_, data):
|
||||
if isinstance(data, unicode):
|
||||
return data
|
||||
return data.decode(self_.encoding, 'sphinx')
|
||||
|
||||
def read(self_):
|
||||
@ -687,7 +691,7 @@ class BuildEnvironment:
|
||||
destination_class=NullOutput)
|
||||
pub.set_components(None, 'restructuredtext', None)
|
||||
pub.process_programmatic_settings(None, self.settings, None)
|
||||
pub.set_source(None, src_path)
|
||||
pub.set_source(None, src_path.encode(fs_encoding))
|
||||
pub.set_destination(None, None)
|
||||
try:
|
||||
pub.publish()
|
||||
@ -771,6 +775,12 @@ class BuildEnvironment:
|
||||
def note_dependency(self, filename):
|
||||
self.dependencies.setdefault(self.docname, set()).add(filename)
|
||||
|
||||
def note_versionchange(self, type, version, node, lineno):
|
||||
self.versionchanges.setdefault(version, []).append(
|
||||
(type, self.temp_data['docname'], lineno,
|
||||
self.temp_data.get('py:module'),
|
||||
self.temp_data.get('object'), node.astext()))
|
||||
|
||||
# post-processing of read doctrees
|
||||
|
||||
def filter_messages(self, doctree):
|
||||
@ -1521,8 +1531,9 @@ class BuildEnvironment:
|
||||
i += 1
|
||||
|
||||
# group the entries by letter
|
||||
def keyfunc2((k, v), letters=string.ascii_uppercase + '_'):
|
||||
def keyfunc2(item, letters=string.ascii_uppercase + '_'):
|
||||
# hack: mutating the subitems dicts to a list in the keyfunc
|
||||
k, v = item
|
||||
v[1] = sorted((si, se) for (si, (se, void)) in v[1].iteritems())
|
||||
# now calculate the key
|
||||
letter = k[0].upper()
|
||||
|
@ -14,7 +14,7 @@
|
||||
import re
|
||||
import sys
|
||||
import inspect
|
||||
from types import FunctionType, BuiltinFunctionType, MethodType, ClassType
|
||||
from types import FunctionType, BuiltinFunctionType, MethodType
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.utils import assemble_option_dict
|
||||
@ -27,15 +27,10 @@ from sphinx.application import ExtensionError
|
||||
from sphinx.util.nodes import nested_parse_with_titles
|
||||
from sphinx.util.compat import Directive
|
||||
from sphinx.util.inspect import isdescriptor, safe_getmembers, safe_getattr
|
||||
from sphinx.util.pycompat import base_exception, class_types
|
||||
from sphinx.util.docstrings import prepare_docstring
|
||||
|
||||
|
||||
try:
|
||||
base_exception = BaseException
|
||||
except NameError:
|
||||
base_exception = Exception
|
||||
|
||||
|
||||
#: extended signature RE: with explicit module name separated by ::
|
||||
py_ext_sig_re = re.compile(
|
||||
r'''^ ([\w.]+::)? # explicit module name
|
||||
@ -256,6 +251,9 @@ class Documenter(object):
|
||||
self.retann = None
|
||||
# the object to document (set after import_object succeeds)
|
||||
self.object = None
|
||||
self.object_name = None
|
||||
# the parent/owner of the object to document
|
||||
self.parent = None
|
||||
# the module analyzer to get at attribute docs, or None
|
||||
self.analyzer = None
|
||||
|
||||
@ -321,9 +319,13 @@ class Documenter(object):
|
||||
"""
|
||||
try:
|
||||
__import__(self.modname)
|
||||
parent = None
|
||||
obj = self.module = sys.modules[self.modname]
|
||||
for part in self.objpath:
|
||||
parent = obj
|
||||
obj = self.get_attr(obj, part)
|
||||
self.object_name = part
|
||||
self.parent = parent
|
||||
self.object = obj
|
||||
return True
|
||||
# this used to only catch SyntaxError, ImportError and AttributeError,
|
||||
@ -416,9 +418,11 @@ class Documenter(object):
|
||||
def get_doc(self, encoding=None):
|
||||
"""Decode and return lines of the docstring(s) for the object."""
|
||||
docstring = self.get_attr(self.object, '__doc__', None)
|
||||
if docstring:
|
||||
# make sure we have Unicode docstrings, then sanitize and split
|
||||
# into lines
|
||||
if isinstance(docstring, unicode):
|
||||
return [prepare_docstring(docstring)]
|
||||
elif docstring:
|
||||
return [prepare_docstring(force_decode(docstring, encoding))]
|
||||
return []
|
||||
|
||||
@ -438,8 +442,11 @@ class Documenter(object):
|
||||
# set sourcename and add content from attribute documentation
|
||||
if self.analyzer:
|
||||
# prevent encoding errors when the file name is non-ASCII
|
||||
if not isinstance(self.analyzer.srcname, unicode):
|
||||
filename = unicode(self.analyzer.srcname,
|
||||
sys.getfilesystemencoding(), 'replace')
|
||||
else:
|
||||
filename = self.analyzer.srcname
|
||||
sourcename = u'%s:docstring of %s' % (filename, self.fullname)
|
||||
|
||||
attr_docs = self.analyzer.find_attr_docs()
|
||||
@ -866,7 +873,7 @@ class ClassDocumenter(ModuleLevelDocumenter):
|
||||
|
||||
@classmethod
|
||||
def can_document_member(cls, member, membername, isattr, parent):
|
||||
return isinstance(member, (type, ClassType))
|
||||
return isinstance(member, class_types)
|
||||
|
||||
def import_object(self):
|
||||
ret = ModuleLevelDocumenter.import_object(self)
|
||||
@ -939,9 +946,12 @@ class ClassDocumenter(ModuleLevelDocumenter):
|
||||
docstrings = [initdocstring]
|
||||
else:
|
||||
docstrings.append(initdocstring)
|
||||
|
||||
return [prepare_docstring(force_decode(docstring, encoding))
|
||||
for docstring in docstrings]
|
||||
doc = []
|
||||
for docstring in docstrings:
|
||||
if not isinstance(docstring, unicode):
|
||||
docstring = force_decode(docstring, encoding)
|
||||
doc.append(prepare_docstring(docstring))
|
||||
return doc
|
||||
|
||||
def add_content(self, more_content, no_docstring=False):
|
||||
if self.doc_as_attr:
|
||||
@ -972,7 +982,7 @@ class ExceptionDocumenter(ClassDocumenter):
|
||||
|
||||
@classmethod
|
||||
def can_document_member(cls, member, membername, isattr, parent):
|
||||
return isinstance(member, (type, ClassType)) and \
|
||||
return isinstance(member, class_types) and \
|
||||
issubclass(member, base_exception)
|
||||
|
||||
|
||||
@ -1004,6 +1014,20 @@ class MethodDocumenter(ClassLevelDocumenter):
|
||||
return inspect.isroutine(member) and \
|
||||
not isinstance(parent, ModuleDocumenter)
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
def import_object(self):
|
||||
ret = ClassLevelDocumenter.import_object(self)
|
||||
obj_from_parent = self.parent.__dict__.get(self.object_name)
|
||||
if isinstance(obj_from_parent, classmethod):
|
||||
self.directivetype = 'classmethod'
|
||||
self.member_order = self.member_order - 1
|
||||
elif isinstance(obj_from_parent, staticmethod):
|
||||
self.directivetype = 'staticmethod'
|
||||
self.member_order = self.member_order - 1
|
||||
else:
|
||||
self.directivetype = 'method'
|
||||
return ret
|
||||
else:
|
||||
def import_object(self):
|
||||
ret = ClassLevelDocumenter.import_object(self)
|
||||
if isinstance(self.object, classmethod) or \
|
||||
|
@ -173,8 +173,11 @@ class CoverageBuilder(Builder):
|
||||
|
||||
attrs = []
|
||||
|
||||
for attr_name in dir(obj):
|
||||
attr = getattr(obj, attr_name)
|
||||
for attr_name, attr in inspect.getmembers(
|
||||
obj, inspect.ismethod):
|
||||
obj, lambda x: inspect.ismethod(x) or \
|
||||
inspect.isfunction(x)):
|
||||
if attr_name[0] == '_':
|
||||
# starts with an underscore, ignore it
|
||||
continue
|
||||
|
@ -149,14 +149,14 @@ class TestCode(object):
|
||||
|
||||
class SphinxDocTestRunner(doctest.DocTestRunner):
|
||||
def summarize(self, out, verbose=None):
|
||||
io = StringIO.StringIO()
|
||||
string_io = StringIO.StringIO()
|
||||
old_stdout = sys.stdout
|
||||
sys.stdout = io
|
||||
sys.stdout = string_io
|
||||
try:
|
||||
res = doctest.DocTestRunner.summarize(self, verbose)
|
||||
finally:
|
||||
sys.stdout = old_stdout
|
||||
out(io.getvalue())
|
||||
out(string_io.getvalue())
|
||||
return res
|
||||
|
||||
def _DocTestRunner__patched_linecache_getlines(self, filename,
|
||||
|
@ -93,6 +93,7 @@ def render_dot(self, code, options, format, prefix='graphviz'):
|
||||
Render graphviz code into a PNG or PDF output file.
|
||||
"""
|
||||
hashkey = code.encode('utf-8') + str(options) + \
|
||||
str(self.builder.config.graphviz_dot) + \
|
||||
str(self.builder.config.graphviz_dot_args)
|
||||
fname = '%s-%s.%s' % (prefix, sha(hashkey).hexdigest(), format)
|
||||
if hasattr(self.builder, 'imgpath'):
|
||||
|
@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
r"""
|
||||
sphinx.ext.inheritance_diagram
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
@ -26,6 +26,7 @@
|
||||
|
||||
import time
|
||||
import zlib
|
||||
import codecs
|
||||
import urllib2
|
||||
import posixpath
|
||||
from os import path
|
||||
@ -33,19 +34,26 @@ from os import path
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx.builders.html import INVENTORY_FILENAME
|
||||
from sphinx.util.pycompat import b
|
||||
|
||||
|
||||
handlers = [urllib2.ProxyHandler(), urllib2.HTTPRedirectHandler(),
|
||||
urllib2.HTTPHandler()]
|
||||
if hasattr(urllib2, 'HTTPSHandler'):
|
||||
try:
|
||||
handlers.append(urllib2.HTTPSHandler)
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
urllib2.install_opener(urllib2.build_opener(*handlers))
|
||||
|
||||
UTF8StreamReader = codecs.lookup('utf-8')[2]
|
||||
|
||||
|
||||
def read_inventory_v1(f, uri, join):
|
||||
f = UTF8StreamReader(f)
|
||||
invdata = {}
|
||||
line = f.next()
|
||||
projname = line.rstrip()[11:].decode('utf-8')
|
||||
projname = line.rstrip()[11:]
|
||||
line = f.next()
|
||||
version = line.rstrip()[11:]
|
||||
for line in f:
|
||||
@ -68,25 +76,25 @@ def read_inventory_v2(f, uri, join, bufsize=16*1024):
|
||||
projname = line.rstrip()[11:].decode('utf-8')
|
||||
line = f.readline()
|
||||
version = line.rstrip()[11:].decode('utf-8')
|
||||
line = f.readline()
|
||||
line = f.readline().decode('utf-8')
|
||||
if 'zlib' not in line:
|
||||
raise ValueError
|
||||
|
||||
def read_chunks():
|
||||
decompressor = zlib.decompressobj()
|
||||
for chunk in iter(lambda: f.read(bufsize), ''):
|
||||
for chunk in iter(lambda: f.read(bufsize), b('')):
|
||||
yield decompressor.decompress(chunk)
|
||||
yield decompressor.flush()
|
||||
|
||||
def split_lines(iter):
|
||||
buf = ''
|
||||
buf = b('')
|
||||
for chunk in iter:
|
||||
buf += chunk
|
||||
lineend = buf.find('\n')
|
||||
lineend = buf.find(b('\n'))
|
||||
while lineend != -1:
|
||||
yield buf[:lineend].decode('utf-8')
|
||||
buf = buf[lineend+1:]
|
||||
lineend = buf.find('\n')
|
||||
lineend = buf.find(b('\n'))
|
||||
assert not buf
|
||||
|
||||
for line in split_lines(read_chunks()):
|
||||
@ -109,13 +117,13 @@ def fetch_inventory(app, uri, inv):
|
||||
if inv.find('://') != -1:
|
||||
f = urllib2.urlopen(inv)
|
||||
else:
|
||||
f = open(path.join(app.srcdir, inv))
|
||||
f = open(path.join(app.srcdir, inv), 'rb')
|
||||
except Exception, err:
|
||||
app.warn('intersphinx inventory %r not fetchable due to '
|
||||
'%s: %s' % (inv, err.__class__, err))
|
||||
return
|
||||
try:
|
||||
line = f.readline().rstrip()
|
||||
line = f.readline().rstrip().decode('utf-8')
|
||||
try:
|
||||
if line == '# Sphinx inventory version 1':
|
||||
invdata = read_inventory_v1(f, uri, join)
|
||||
@ -191,10 +199,12 @@ def missing_reference(app, env, node, contnode):
|
||||
return
|
||||
objtypes = ['%s:%s' % (domain, objtype) for objtype in objtypes]
|
||||
to_try = [(env.intersphinx_inventory, target)]
|
||||
in_set = None
|
||||
if ':' in target:
|
||||
# first part may be the foreign doc set name
|
||||
setname, newtarget = target.split(':', 1)
|
||||
if setname in env.intersphinx_named_inventory:
|
||||
in_set = setname
|
||||
to_try.append((env.intersphinx_named_inventory[setname], newtarget))
|
||||
for inventory, target in to_try:
|
||||
for objtype in objtypes:
|
||||
@ -203,11 +213,25 @@ def missing_reference(app, env, node, contnode):
|
||||
proj, version, uri, dispname = inventory[objtype][target]
|
||||
newnode = nodes.reference('', '', internal=False, refuri=uri,
|
||||
reftitle='(in %s v%s)' % (proj, version))
|
||||
if dispname == '-':
|
||||
if node.get('refexplicit'):
|
||||
# use whatever title was given
|
||||
newnode.append(contnode)
|
||||
elif dispname == '-':
|
||||
# use whatever title was given, but strip prefix
|
||||
title = contnode.astext()
|
||||
if in_set and title.startswith(in_set+':'):
|
||||
newnode.append(contnode.__class__(title[len(in_set)+1:],
|
||||
title[len(in_set)+1:]))
|
||||
else:
|
||||
newnode.append(contnode)
|
||||
else:
|
||||
# else use the given display name (used for :ref:)
|
||||
newnode.append(contnode.__class__(dispname, dispname))
|
||||
return newnode
|
||||
# at least get rid of the ':' in the target if no explicit title given
|
||||
if in_set is not None and not node.get('refexplicit', True):
|
||||
if len(contnode) and isinstance(contnode[0], nodes.Text):
|
||||
contnode[0] = nodes.Text(newtarget, contnode[0].rawsource)
|
||||
|
||||
|
||||
def setup(app):
|
||||
|
@ -13,6 +13,10 @@ from docutils.parsers.rst import directives
|
||||
|
||||
from sphinx.util.compat import Directive
|
||||
|
||||
_warned_oldcmarkup = False
|
||||
WARNING_MSG = 'using old C markup; please migrate to new-style markup ' \
|
||||
'(e.g. c:function instead of cfunction), see ' \
|
||||
'http://sphinx.pocoo.org/domains.html'
|
||||
|
||||
class OldCDirective(Directive):
|
||||
has_content = True
|
||||
@ -26,6 +30,10 @@ class OldCDirective(Directive):
|
||||
|
||||
def run(self):
|
||||
env = self.state.document.settings.env
|
||||
if not env.app._oldcmarkup_warned:
|
||||
print 'XXXYYY'
|
||||
env.warn(env.docname, WARNING_MSG, self.lineno)
|
||||
env.app._oldcmarkup_warned = True
|
||||
newname = 'c:' + self.name[1:]
|
||||
newdir = env.lookup_domain_element('directive', newname)[0]
|
||||
return newdir(newname, self.arguments, self.options,
|
||||
@ -35,12 +43,18 @@ class OldCDirective(Directive):
|
||||
|
||||
def old_crole(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
||||
env = inliner.document.settings.env
|
||||
if not typ:
|
||||
typ = env.config.default_role
|
||||
if not env.app._oldcmarkup_warned:
|
||||
env.warn(env.docname, WARNING_MSG)
|
||||
env.app._oldcmarkup_warned = True
|
||||
newtyp = 'c:' + typ[1:]
|
||||
newrole = env.lookup_domain_element('role', newtyp)[0]
|
||||
return newrole(newtyp, rawtext, text, lineno, inliner, options, content)
|
||||
|
||||
|
||||
def setup(app):
|
||||
app._oldcmarkup_warned = False
|
||||
app.add_directive('cfunction', OldCDirective)
|
||||
app.add_directive('cmember', OldCDirective)
|
||||
app.add_directive('cmacro', OldCDirective)
|
||||
@ -50,3 +64,4 @@ def setup(app):
|
||||
app.add_role('cfunc', old_crole)
|
||||
app.add_role('cmacro', old_crole)
|
||||
app.add_role('ctype', old_crole)
|
||||
app.add_role('cmember', old_crole)
|
||||
|
@ -31,7 +31,11 @@ def doctree_read(app, doctree):
|
||||
env._viewcode_modules[modname] = False
|
||||
return
|
||||
analyzer.find_tags()
|
||||
entry = analyzer.code.decode(analyzer.encoding), analyzer.tags, {}
|
||||
if not isinstance(analyzer.code, unicode):
|
||||
code = analyzer.code.decode(analyzer.encoding)
|
||||
else:
|
||||
code = analyzer.code
|
||||
entry = code, analyzer.tags, {}
|
||||
env._viewcode_modules[modname] = entry
|
||||
elif entry is False:
|
||||
return
|
||||
|
@ -156,7 +156,7 @@ class PygmentsBridge(object):
|
||||
if sys.version_info >= (2, 5):
|
||||
src = 'from __future__ import with_statement\n' + src
|
||||
|
||||
if isinstance(src, unicode):
|
||||
if sys.version_info < (3, 0) and isinstance(src, unicode):
|
||||
# Non-ASCII chars will only occur in string literals
|
||||
# and comments. If we wanted to give them to the parser
|
||||
# correctly, we'd have to find out the correct source
|
||||
@ -175,7 +175,7 @@ class PygmentsBridge(object):
|
||||
return True
|
||||
|
||||
def highlight_block(self, source, lang, linenos=False, warn=None):
|
||||
if isinstance(source, str):
|
||||
if not isinstance(source, unicode):
|
||||
source = source.decode()
|
||||
if not pygments:
|
||||
return self.unhighlighted(source)
|
||||
@ -240,7 +240,7 @@ class PygmentsBridge(object):
|
||||
# no HTML styles needed
|
||||
return ''
|
||||
if self.dest == 'html':
|
||||
return self.fmter[0].get_style_defs()
|
||||
return self.fmter[0].get_style_defs('.highlight')
|
||||
else:
|
||||
styledefs = self.fmter[0].get_style_defs()
|
||||
# workaround for Pygments < 0.12
|
||||
|
@ -8,6 +8,8 @@
|
||||
:copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import gettext
|
||||
import UserString
|
||||
|
||||
@ -178,9 +180,14 @@ pairindextypes = {
|
||||
|
||||
translators = {}
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
def _(message):
|
||||
return translators['sphinx'].gettext(message)
|
||||
else:
|
||||
def _(message):
|
||||
return translators['sphinx'].ugettext(message)
|
||||
|
||||
|
||||
def init(locale_dirs, language, catalog='sphinx'):
|
||||
"""
|
||||
Look for message catalogs in `locale_dirs` and *ensure* that there is at
|
||||
|
1
sphinx/locale/bn/LC_MESSAGES/sphinx.js
Normal file
1
sphinx/locale/bn/LC_MESSAGES/sphinx.js
Normal file
@ -0,0 +1 @@
|
||||
Documentation.addTranslations({"locale": "bn", "plural_expr": "(n != 1)", "messages": {"Search Results": "\u0985\u09a8\u09c1\u09b8\u09a8\u09cd\u09a7\u09be\u09a8\u09c7\u09b0 \u09ab\u09b2\u09be\u09ab\u09b2", "Preparing search...": "\u0985\u09a8\u09c1\u09b8\u09a8\u09cd\u09a7\u09be\u09a8\u09c7\u09b0 \u09aa\u09cd\u09b0\u09b8\u09cd\u09a4\u09c1\u09a4\u09bf \u099a\u09b2\u099b\u09c7...", "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.": "\u0986\u09aa\u09a8\u09be\u09b0 \u0985\u09a8\u09c1\u09b8\u09a8\u09cd\u09a7\u09be\u09a8\u09c7 \u0995\u09c7\u09be\u09a8 \u09ab\u09b2\u09be\u09ab\u09b2 \u09aa\u09be\u0993\u09df\u09be \u09af\u09be\u09df\u09a8\u09bf\u0964 \u0986\u09aa\u09a8\u09be\u09b0 \u0985\u09a8\u09c1\u09b8\u09a8\u09cd\u09a7\u09be\u09a8\u09c7\u09b0 \u09b6\u09ac\u09cd\u09a6\u0997\u09c1\u09b2\u09c7\u09be\u09b0 \u09b8\u09a0\u09bf\u0995 \u09ac\u09be\u09a8\u09be\u09a8 \u0993 \u09ac\u09bf\u09ad\u09be\u0997 \u09a8\u09bf\u09b0\u09cd\u09ac\u09be\u099a\u09a8 \u09a8\u09bf\u09b6\u09cd\u099a\u09bf\u09a4 \u0995\u09b0\u09c1\u09a8\u0964", "Search finished, found %s page(s) matching the search query.": "\u0985\u09a8\u09c1\u09b8\u09a8\u09cd\u09a7\u09be\u09a8 \u09b6\u09c7\u09b7 \u09b9\u09df\u09c7\u099b\u09c7, \u09ab\u09b2\u09be\u09ab\u09b2\u09c7 %s-\u099f\u09bf \u09aa\u09be\u09a4\u09be \u09aa\u09be\u0993\u09df\u09be \u0997\u09c7\u099b\u09c7\u0964", ", in ": ", -", "Permalink to this headline": "\u098f\u0987 \u09b6\u09bf\u09b0\u09c7\u09be\u09a8\u09be\u09ae\u09c7\u09b0 \u09aa\u09be\u09b0\u09cd\u09ae\u09be\u09b2\u09bf\u0999\u09cd\u0995", "Searching": "\u0985\u09a8\u09c1\u09b8\u09a8\u09cd\u09a7\u09be\u09a8 \u099a\u09b2\u099b\u09c7", "Permalink to this definition": "\u098f\u0987 \u09b8\u0982\u099c\u09cd\u099e\u09be\u09b0 \u09aa\u09be\u09b0\u09cd\u09ae\u09be\u09b2\u09bf\u0999\u09cd\u0995", "Hide Search Matches": "\u0985\u09a8\u09c1\u09b8\u09a8\u09cd\u09a7\u09be\u09a8\u09c7\u09b0 \u09ae\u09cd\u09af\u09be\u099a\u0997\u09c1\u09b2\u09c7\u09be \u09b2\u09c1\u0995\u09be\u09a8"}});
|
BIN
sphinx/locale/bn/LC_MESSAGES/sphinx.mo
Normal file
BIN
sphinx/locale/bn/LC_MESSAGES/sphinx.mo
Normal file
Binary file not shown.
698
sphinx/locale/bn/LC_MESSAGES/sphinx.po
Normal file
698
sphinx/locale/bn/LC_MESSAGES/sphinx.po
Normal file
@ -0,0 +1,698 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2009 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, 2009.
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: Sphinx 1.0pre/[?1034h2e1ab15e035e\n"
|
||||
"Report-Msgid-Bugs-To: nasim.haque@gmail.com\n"
|
||||
"POT-Creation-Date: 2009-11-08 16:28+0100\n"
|
||||
"PO-Revision-Date: 2009-11-10 13:42+0100\n"
|
||||
"Last-Translator: Nasimul Haque <nasim.haque@gmail.com>\n"
|
||||
"Language-Team: Nasimul Haque <nasim.haque@gmail.com>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=utf-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Generated-By: Babel 0.9.4\n"
|
||||
|
||||
#: sphinx/environment.py:130
|
||||
#: sphinx/writers/latex.py:184
|
||||
#, python-format
|
||||
msgid "%B %d, %Y"
|
||||
msgstr "%B %d, %Y"
|
||||
|
||||
#: sphinx/environment.py:348
|
||||
#: sphinx/themes/basic/genindex-single.html:2
|
||||
#: sphinx/themes/basic/genindex-split.html:2
|
||||
#: sphinx/themes/basic/genindex-split.html:5
|
||||
#: sphinx/themes/basic/genindex.html:2
|
||||
#: sphinx/themes/basic/genindex.html:5
|
||||
#: sphinx/themes/basic/genindex.html:48
|
||||
#: sphinx/themes/basic/layout.html:134
|
||||
#: sphinx/writers/latex.py:190
|
||||
msgid "Index"
|
||||
msgstr "ইনডেক্স"
|
||||
|
||||
#: sphinx/environment.py:349
|
||||
#: sphinx/writers/latex.py:189
|
||||
msgid "Module Index"
|
||||
msgstr "মডিউল ইনডেক্স"
|
||||
|
||||
#: sphinx/environment.py:350
|
||||
#: sphinx/themes/basic/defindex.html:16
|
||||
msgid "Search Page"
|
||||
msgstr "অনুসন্ধান পাতা"
|
||||
|
||||
#: sphinx/roles.py:167
|
||||
#, python-format
|
||||
msgid "Python Enhancement Proposals!PEP %s"
|
||||
msgstr "পাইথন উন্নয়ন পরামর্শ!PEP %s"
|
||||
|
||||
#: sphinx/builders/changes.py:70
|
||||
msgid "Builtins"
|
||||
msgstr "বিল্টইন সমূহ"
|
||||
|
||||
#: sphinx/builders/changes.py:72
|
||||
msgid "Module level"
|
||||
msgstr "মডিউল লেভেল"
|
||||
|
||||
#: sphinx/builders/html.py:224
|
||||
#, python-format
|
||||
msgid "%b %d, %Y"
|
||||
msgstr "%b %d, %Y"
|
||||
|
||||
#: sphinx/builders/html.py:243
|
||||
#: sphinx/themes/basic/defindex.html:21
|
||||
msgid "General Index"
|
||||
msgstr "সাধারণ ইনডেক্স"
|
||||
|
||||
#: sphinx/builders/html.py:243
|
||||
msgid "index"
|
||||
msgstr "ইনডেক্স"
|
||||
|
||||
#: sphinx/builders/html.py:247
|
||||
#: sphinx/builders/htmlhelp.py:220
|
||||
#: sphinx/builders/qthelp.py:133
|
||||
#: sphinx/themes/basic/defindex.html:19
|
||||
#: sphinx/themes/basic/modindex.html:2
|
||||
#: sphinx/themes/basic/modindex.html:13
|
||||
#: sphinx/themes/scrolls/modindex.html:2
|
||||
#: sphinx/themes/scrolls/modindex.html:13
|
||||
msgid "Global Module Index"
|
||||
msgstr "গ্লোবাল মডিউল ইনডেক্স"
|
||||
|
||||
#: sphinx/builders/html.py:248
|
||||
msgid "modules"
|
||||
msgstr "মডিউল সমূহ"
|
||||
|
||||
#: sphinx/builders/html.py:304
|
||||
msgid "next"
|
||||
msgstr "পরবর্তী"
|
||||
|
||||
#: sphinx/builders/html.py:313
|
||||
msgid "previous"
|
||||
msgstr "পূর্ববর্তী"
|
||||
|
||||
#: sphinx/builders/latex.py:162
|
||||
msgid " (in "
|
||||
msgstr "(-"
|
||||
|
||||
#: sphinx/directives/__init__.py:78
|
||||
#: sphinx/directives/__init__.py:79
|
||||
#: sphinx/directives/__init__.py:80
|
||||
#: sphinx/directives/__init__.py:81
|
||||
msgid "Raises"
|
||||
msgstr "রেইজেস"
|
||||
|
||||
#: sphinx/directives/__init__.py:82
|
||||
#: sphinx/directives/__init__.py:83
|
||||
#: sphinx/directives/__init__.py:84
|
||||
msgid "Variable"
|
||||
msgstr "ভ্যারিয়েবল"
|
||||
|
||||
#: sphinx/directives/__init__.py:85
|
||||
#: sphinx/directives/__init__.py:86
|
||||
#: sphinx/directives/__init__.py:92
|
||||
#: sphinx/directives/__init__.py:93
|
||||
msgid "Returns"
|
||||
msgstr "রিটার্নস"
|
||||
|
||||
#: sphinx/directives/__init__.py:94
|
||||
msgid "Return type"
|
||||
msgstr "রিটার্ন টাইপ"
|
||||
|
||||
#: sphinx/directives/__init__.py:169
|
||||
msgid "Parameter"
|
||||
msgstr "প্যারামিটার"
|
||||
|
||||
#: sphinx/directives/__init__.py:173
|
||||
msgid "Parameters"
|
||||
msgstr "প্যারামিটার"
|
||||
|
||||
#: sphinx/directives/other.py:127
|
||||
msgid "Section author: "
|
||||
msgstr "অনুচ্ছেদ লেখক:"
|
||||
|
||||
#: sphinx/directives/other.py:129
|
||||
msgid "Module author: "
|
||||
msgstr "মডিউল লেখক:"
|
||||
|
||||
#: sphinx/directives/other.py:131
|
||||
msgid "Author: "
|
||||
msgstr "লেখক:"
|
||||
|
||||
#: sphinx/directives/other.py:233
|
||||
msgid "See also"
|
||||
msgstr "আরও দেখুন"
|
||||
|
||||
#: sphinx/domains/c.py:124
|
||||
#, python-format
|
||||
msgid "%s (C function)"
|
||||
msgstr "%s (C ফাংশন)"
|
||||
|
||||
#: sphinx/domains/c.py:126
|
||||
#, python-format
|
||||
msgid "%s (C member)"
|
||||
msgstr "%s (C মেম্বার)"
|
||||
|
||||
#: sphinx/domains/c.py:128
|
||||
#, python-format
|
||||
msgid "%s (C macro)"
|
||||
msgstr "%s (C ম্যাক্রো)"
|
||||
|
||||
#: sphinx/domains/c.py:130
|
||||
#, python-format
|
||||
msgid "%s (C type)"
|
||||
msgstr "%s (C টাইপ)"
|
||||
|
||||
#: sphinx/domains/c.py:132
|
||||
#, python-format
|
||||
msgid "%s (C variable)"
|
||||
msgstr "%s (C ভ্যারিয়েবল)"
|
||||
|
||||
#: sphinx/domains/c.py:162
|
||||
msgid "C function"
|
||||
msgstr "C ফাংশন"
|
||||
|
||||
#: sphinx/domains/c.py:163
|
||||
msgid "C member"
|
||||
msgstr "C মেম্বার"
|
||||
|
||||
#: sphinx/domains/c.py:164
|
||||
msgid "C macro"
|
||||
msgstr "C ম্যাক্রো"
|
||||
|
||||
#: sphinx/domains/c.py:165
|
||||
msgid "C type"
|
||||
msgstr "C টাইপ"
|
||||
|
||||
#: sphinx/domains/c.py:166
|
||||
msgid "C variable"
|
||||
msgstr "C ভ্যারিয়েবল"
|
||||
|
||||
#: sphinx/domains/python.py:186
|
||||
#, python-format
|
||||
msgid "%s() (built-in function)"
|
||||
msgstr "%s() (বিল্ট-ইন ফাংশন)"
|
||||
|
||||
#: sphinx/domains/python.py:187
|
||||
#: sphinx/domains/python.py:244
|
||||
#: sphinx/domains/python.py:256
|
||||
#: sphinx/domains/python.py:269
|
||||
#, python-format
|
||||
msgid "%s() (in module %s)"
|
||||
msgstr "%s() (%s মডিউলে)"
|
||||
|
||||
#: sphinx/domains/python.py:190
|
||||
#, python-format
|
||||
msgid "%s (built-in variable)"
|
||||
msgstr "%s (বিল্ট-ইন ভ্যারিয়েবল)"
|
||||
|
||||
#: sphinx/domains/python.py:191
|
||||
#: sphinx/domains/python.py:282
|
||||
#, python-format
|
||||
msgid "%s (in module %s)"
|
||||
msgstr "%s (%s মডিউলে)"
|
||||
|
||||
#: sphinx/domains/python.py:207
|
||||
#, python-format
|
||||
msgid "%s (built-in class)"
|
||||
msgstr "%s (বিল্ট-ইন ক্লাস)"
|
||||
|
||||
#: sphinx/domains/python.py:208
|
||||
#, python-format
|
||||
msgid "%s (class in %s)"
|
||||
msgstr "%s (%s ক্লাসে)"
|
||||
|
||||
#: sphinx/domains/python.py:248
|
||||
#, python-format
|
||||
msgid "%s() (%s.%s method)"
|
||||
msgstr "%s (%s.%s মেথড)"
|
||||
|
||||
#: sphinx/domains/python.py:250
|
||||
#, python-format
|
||||
msgid "%s() (%s method)"
|
||||
msgstr "%s() (%s মেথড)"
|
||||
|
||||
#: sphinx/domains/python.py:260
|
||||
#, python-format
|
||||
msgid "%s() (%s.%s static method)"
|
||||
msgstr "%s (%s.%s স্ট্যাটিক মেথড)"
|
||||
|
||||
#: sphinx/domains/python.py:263
|
||||
#, python-format
|
||||
msgid "%s() (%s static method)"
|
||||
msgstr "%s() (%s স্ট্যাটিক মেথড)"
|
||||
|
||||
#: sphinx/domains/python.py:273
|
||||
#, python-format
|
||||
msgid "%s() (%s.%s class method)"
|
||||
msgstr "%s() (%s.%s ক্লাস মেথড)"
|
||||
|
||||
#: sphinx/domains/python.py:276
|
||||
#, python-format
|
||||
msgid "%s() (%s class method)"
|
||||
msgstr "%s() (%s ক্লাস মেথড)"
|
||||
|
||||
#: sphinx/domains/python.py:286
|
||||
#, python-format
|
||||
msgid "%s (%s.%s attribute)"
|
||||
msgstr "%s (%s.%s এ্যট্রিবিউট)"
|
||||
|
||||
#: sphinx/domains/python.py:288
|
||||
#, python-format
|
||||
msgid "%s (%s attribute)"
|
||||
msgstr "%s (%s এ্যট্রিবিউট)"
|
||||
|
||||
#: sphinx/domains/python.py:334
|
||||
msgid "Platforms: "
|
||||
msgstr "প্লাটফরম:"
|
||||
|
||||
#: sphinx/domains/python.py:340
|
||||
#, python-format
|
||||
msgid "%s (module)"
|
||||
msgstr "%s (মডিউল)"
|
||||
|
||||
#: sphinx/domains/python.py:396
|
||||
msgid "function"
|
||||
msgstr "ফাংশন"
|
||||
|
||||
#: sphinx/domains/python.py:397
|
||||
msgid "data"
|
||||
msgstr "ডাটা"
|
||||
|
||||
#: sphinx/domains/python.py:398
|
||||
msgid "class"
|
||||
msgstr "ক্লাস"
|
||||
|
||||
#: sphinx/domains/python.py:399
|
||||
#: sphinx/locale/__init__.py:161
|
||||
msgid "exception"
|
||||
msgstr "এক্সেপশন"
|
||||
|
||||
#: sphinx/domains/python.py:400
|
||||
msgid "method"
|
||||
msgstr "মেথড"
|
||||
|
||||
#: sphinx/domains/python.py:401
|
||||
msgid "attribute"
|
||||
msgstr "এ্যট্রিবিউট"
|
||||
|
||||
#: sphinx/domains/python.py:402
|
||||
#: sphinx/locale/__init__.py:157
|
||||
msgid "module"
|
||||
msgstr "মডিউল"
|
||||
|
||||
#: sphinx/domains/std.py:67
|
||||
#: sphinx/domains/std.py:83
|
||||
#, python-format
|
||||
msgid "environment variable; %s"
|
||||
msgstr "এনভায়রনমেন্ট ভ্যারিয়েবল; %s"
|
||||
|
||||
#: sphinx/domains/std.py:156
|
||||
#, python-format
|
||||
msgid "%scommand line option; %s"
|
||||
msgstr "%sকমান্ড লাইন অপশন; %s"
|
||||
|
||||
#: sphinx/domains/std.py:324
|
||||
msgid "glossary term"
|
||||
msgstr "শব্দকোষ"
|
||||
|
||||
#: sphinx/domains/std.py:325
|
||||
msgid "grammar token"
|
||||
msgstr "ব্যকরণ টোকেন"
|
||||
|
||||
#: sphinx/domains/std.py:326
|
||||
msgid "environment variable"
|
||||
msgstr "এনভায়রনমেন্ট ভ্যারিয়েবল"
|
||||
|
||||
#: sphinx/domains/std.py:327
|
||||
msgid "program option"
|
||||
msgstr "প্রোগ্রাম অপশন"
|
||||
|
||||
#: sphinx/ext/autodoc.py:892
|
||||
#, python-format
|
||||
msgid " Bases: %s"
|
||||
msgstr "বেস: %s"
|
||||
|
||||
#: sphinx/ext/autodoc.py:925
|
||||
#, python-format
|
||||
msgid "alias of :class:`%s`"
|
||||
msgstr ":class:`%s` এর উপনাম"
|
||||
|
||||
#: sphinx/ext/todo.py:40
|
||||
msgid "Todo"
|
||||
msgstr "অসমাপ্ত কাজ"
|
||||
|
||||
#: sphinx/ext/todo.py:98
|
||||
#, python-format
|
||||
msgid "(The original entry is located in %s, line %d and can be found "
|
||||
msgstr "(%s, %d লাইনে মূল অন্তর্ভুক্তিটি রয়েছে, যা পাওয়া যাবে"
|
||||
|
||||
#: sphinx/ext/todo.py:104
|
||||
msgid "here"
|
||||
msgstr "এখানে"
|
||||
|
||||
#: sphinx/locale/__init__.py:138
|
||||
msgid "Attention"
|
||||
msgstr "দৃষ্টি আকর্ষণ"
|
||||
|
||||
#: sphinx/locale/__init__.py:139
|
||||
msgid "Caution"
|
||||
msgstr "সতর্কীকরণ"
|
||||
|
||||
#: sphinx/locale/__init__.py:140
|
||||
msgid "Danger"
|
||||
msgstr "বিপজ্জনক"
|
||||
|
||||
#: sphinx/locale/__init__.py:141
|
||||
msgid "Error"
|
||||
msgstr "ভুল (এরর)"
|
||||
|
||||
#: sphinx/locale/__init__.py:142
|
||||
msgid "Hint"
|
||||
msgstr "আভাস"
|
||||
|
||||
#: sphinx/locale/__init__.py:143
|
||||
msgid "Important"
|
||||
msgstr "গুরুত্বপূর্ণ"
|
||||
|
||||
#: sphinx/locale/__init__.py:144
|
||||
msgid "Note"
|
||||
msgstr "নোট"
|
||||
|
||||
#: sphinx/locale/__init__.py:145
|
||||
msgid "See Also"
|
||||
msgstr "আরও দেখুন"
|
||||
|
||||
#: sphinx/locale/__init__.py:146
|
||||
msgid "Tip"
|
||||
msgstr "পরামর্শ"
|
||||
|
||||
#: sphinx/locale/__init__.py:147
|
||||
msgid "Warning"
|
||||
msgstr "সতর্কতা"
|
||||
|
||||
#: sphinx/locale/__init__.py:151
|
||||
#, python-format
|
||||
msgid "New in version %s"
|
||||
msgstr "%s ভার্সনে নতুন"
|
||||
|
||||
#: sphinx/locale/__init__.py:152
|
||||
#, python-format
|
||||
msgid "Changed in version %s"
|
||||
msgstr "%s ভার্সনে পরিবর্তিত"
|
||||
|
||||
#: sphinx/locale/__init__.py:153
|
||||
#, python-format
|
||||
msgid "Deprecated since version %s"
|
||||
msgstr "%s ভার্সন থেকে ডেপ্রিকেটেড"
|
||||
|
||||
#: sphinx/locale/__init__.py:158
|
||||
msgid "keyword"
|
||||
msgstr "কিওয়ার্ড"
|
||||
|
||||
#: sphinx/locale/__init__.py:159
|
||||
msgid "operator"
|
||||
msgstr "অপারেটর"
|
||||
|
||||
#: sphinx/locale/__init__.py:160
|
||||
msgid "object"
|
||||
msgstr "অবজেক্ট"
|
||||
|
||||
#: sphinx/locale/__init__.py:162
|
||||
msgid "statement"
|
||||
msgstr "স্ট্যাটমেন্ট"
|
||||
|
||||
#: sphinx/locale/__init__.py:163
|
||||
msgid "built-in function"
|
||||
msgstr "বিল্ট-ইন ফাংশন"
|
||||
|
||||
#: sphinx/themes/basic/defindex.html:2
|
||||
msgid "Overview"
|
||||
msgstr "ভুমিকা"
|
||||
|
||||
#: sphinx/themes/basic/defindex.html:11
|
||||
msgid "Indices and tables:"
|
||||
msgstr "ইনডেক্স ও টেবিল সমূহ:"
|
||||
|
||||
#: sphinx/themes/basic/defindex.html:14
|
||||
msgid "Complete Table of Contents"
|
||||
msgstr "পূর্ণাঙ্গ সূচীপত্র"
|
||||
|
||||
#: sphinx/themes/basic/defindex.html:15
|
||||
msgid "lists all sections and subsections"
|
||||
msgstr "সকল অনুচ্ছেদ সমূহের তালিকা"
|
||||
|
||||
#: sphinx/themes/basic/defindex.html:17
|
||||
msgid "search this documentation"
|
||||
msgstr "এই সহায়িকাতে অনুসন্ধা করুন"
|
||||
|
||||
#: sphinx/themes/basic/defindex.html:20
|
||||
msgid "quick access to all modules"
|
||||
msgstr "সকল মডিউলে দ্রুত প্রবেশ"
|
||||
|
||||
#: sphinx/themes/basic/defindex.html:22
|
||||
msgid "all functions, classes, terms"
|
||||
msgstr "সকল ফাংশন, ক্লাস, টার্ম"
|
||||
|
||||
#: sphinx/themes/basic/genindex-single.html:5
|
||||
#, python-format
|
||||
msgid "Index – %(key)s"
|
||||
msgstr "ইনডেক্স – %(key)s"
|
||||
|
||||
#: sphinx/themes/basic/genindex-single.html:44
|
||||
#: sphinx/themes/basic/genindex-split.html:14
|
||||
#: sphinx/themes/basic/genindex-split.html:27
|
||||
#: sphinx/themes/basic/genindex.html:54
|
||||
msgid "Full index on one page"
|
||||
msgstr "এক পাতায় সম্পূর্ণ ইনডেক্স"
|
||||
|
||||
#: sphinx/themes/basic/genindex-split.html:7
|
||||
msgid "Index pages by letter"
|
||||
msgstr "বর্ণানুসারে ইনডেক্স পাতা"
|
||||
|
||||
#: sphinx/themes/basic/genindex-split.html:15
|
||||
msgid "can be huge"
|
||||
msgstr "খুব বড় হতে পারে"
|
||||
|
||||
#: sphinx/themes/basic/layout.html:10
|
||||
msgid "Navigation"
|
||||
msgstr "নেভিগেশন"
|
||||
|
||||
#: sphinx/themes/basic/layout.html:42
|
||||
msgid "Table Of Contents"
|
||||
msgstr "সূচীপত্র"
|
||||
|
||||
#: sphinx/themes/basic/layout.html:48
|
||||
msgid "Previous topic"
|
||||
msgstr "পূর্ববর্তী টপিক"
|
||||
|
||||
#: sphinx/themes/basic/layout.html:50
|
||||
msgid "previous chapter"
|
||||
msgstr "পূর্ববর্তী অধ্যায়"
|
||||
|
||||
#: sphinx/themes/basic/layout.html:53
|
||||
msgid "Next topic"
|
||||
msgstr "পরবর্তী টপিক"
|
||||
|
||||
#: sphinx/themes/basic/layout.html:55
|
||||
msgid "next chapter"
|
||||
msgstr "পরবর্তী অধ্যায়"
|
||||
|
||||
#: sphinx/themes/basic/layout.html:60
|
||||
msgid "This Page"
|
||||
msgstr "এই পাতা"
|
||||
|
||||
#: sphinx/themes/basic/layout.html:63
|
||||
msgid "Show Source"
|
||||
msgstr "সোর্স দেখুন"
|
||||
|
||||
#: sphinx/themes/basic/layout.html:73
|
||||
msgid "Quick search"
|
||||
msgstr "দ্রুত অনুসন্ধান"
|
||||
|
||||
#: sphinx/themes/basic/layout.html:76
|
||||
msgid "Go"
|
||||
msgstr "যান"
|
||||
|
||||
#: sphinx/themes/basic/layout.html:81
|
||||
msgid "Enter search terms or a module, class or function name."
|
||||
msgstr "অনুসন্ধানের জন্য টার্ম, মডিউল, ক্লাস অথবা ফাংশনের নাম দিন।"
|
||||
|
||||
#: sphinx/themes/basic/layout.html:122
|
||||
#, python-format
|
||||
msgid "Search within %(docstitle)s"
|
||||
msgstr "%(docstitle)s এর মধ্যে খুঁজুন"
|
||||
|
||||
#: sphinx/themes/basic/layout.html:131
|
||||
msgid "About these documents"
|
||||
msgstr "এই ডকুমেন্ট সম্পর্কে"
|
||||
|
||||
#: sphinx/themes/basic/layout.html:137
|
||||
#: sphinx/themes/basic/search.html:2
|
||||
#: sphinx/themes/basic/search.html:5
|
||||
msgid "Search"
|
||||
msgstr "অনুসন্ধান"
|
||||
|
||||
#: sphinx/themes/basic/layout.html:140
|
||||
msgid "Copyright"
|
||||
msgstr "কপিরাইট"
|
||||
|
||||
#: sphinx/themes/basic/layout.html:187
|
||||
#: sphinx/themes/scrolls/layout.html:83
|
||||
#, python-format
|
||||
msgid "© <a href=\"%(path)s\">Copyright</a> %(copyright)s."
|
||||
msgstr "© <a href=\"%(path)s\">কপিরাইট</a> %(copyright)s."
|
||||
|
||||
#: sphinx/themes/basic/layout.html:189
|
||||
#: sphinx/themes/scrolls/layout.html:85
|
||||
#, python-format
|
||||
msgid "© Copyright %(copyright)s."
|
||||
msgstr "© কপিরাইট %(copyright)s."
|
||||
|
||||
#: sphinx/themes/basic/layout.html:193
|
||||
#: sphinx/themes/scrolls/layout.html:89
|
||||
#, python-format
|
||||
msgid "Last updated on %(last_updated)s."
|
||||
msgstr "%(last_updated)s সর্বশেষ পরিবর্তন করা হয়েছে।"
|
||||
|
||||
#: sphinx/themes/basic/layout.html:196
|
||||
#: sphinx/themes/scrolls/layout.html:92
|
||||
#, python-format
|
||||
msgid "Created using <a href=\"http://sphinx.pocoo.org/\">Sphinx</a> %(sphinx_version)s."
|
||||
msgstr "<a href=\"http://sphinx.pocoo.org/\">Sphinx</a> %(sphinx_version)s দিয়ে তৈরী।"
|
||||
|
||||
#: sphinx/themes/basic/modindex.html:36
|
||||
#: sphinx/themes/scrolls/modindex.html:37
|
||||
msgid "Deprecated"
|
||||
msgstr "ডেপ্রিকেটেড"
|
||||
|
||||
#: sphinx/themes/basic/opensearch.xml:4
|
||||
#, python-format
|
||||
msgid "Search %(docstitle)s"
|
||||
msgstr "%(docstitle)s-এ খুঁজুন"
|
||||
|
||||
#: sphinx/themes/basic/search.html:9
|
||||
msgid ""
|
||||
"Please activate JavaScript to enable the search\n"
|
||||
" functionality."
|
||||
msgstr ""
|
||||
"অনুসন্ধান করার জন্য অনুগ্রহপূর্বক জাভাস্ক্রিপ্ট \n"
|
||||
" সক্রিয় করুন।"
|
||||
|
||||
#: sphinx/themes/basic/search.html:14
|
||||
msgid ""
|
||||
"From here you can search these documents. Enter your search\n"
|
||||
" words into the box below and click \"search\". Note that the search\n"
|
||||
" function will automatically search for all of the words. Pages\n"
|
||||
" containing fewer words won't appear in the result list."
|
||||
msgstr ""
|
||||
"এখান থেকে এই নথিগুলোতে আপনি অনুসন্ধান করতে পারবেন। \n"
|
||||
" আপনার কাঙ্ক্ষিত শব্দসমূহ নিচের বাক্সে লিখুন এবং \"অনুসন্ধান\" বাটনে ক্লিক করুন।\n"
|
||||
" উল্লেখ্য, সকল শব্দসমূহের উপস্থিতি নিয়ে অনুসন্ধান করা হবে। যেসব পাতায় সকল\n"
|
||||
" শব্দ নেই সেগুলো বাদ দেয়া হবে।"
|
||||
|
||||
#: sphinx/themes/basic/search.html:21
|
||||
msgid "search"
|
||||
msgstr "খুঁজুন"
|
||||
|
||||
#: sphinx/themes/basic/search.html:25
|
||||
#: sphinx/themes/basic/static/searchtools.js:473
|
||||
msgid "Search Results"
|
||||
msgstr "অনুসন্ধানের ফলাফল"
|
||||
|
||||
#: sphinx/themes/basic/search.html:27
|
||||
msgid "Your search did not match any results."
|
||||
msgstr "আপনার অনুসন্ধানে কোন ফলাফল পাওয়া যায়নি।"
|
||||
|
||||
#: sphinx/themes/basic/changes/frameset.html:5
|
||||
#: sphinx/themes/basic/changes/versionchanges.html:12
|
||||
#, python-format
|
||||
msgid "Changes in Version %(version)s — %(docstitle)s"
|
||||
msgstr "%(version)s — %(docstitle)s-এ পরিবর্তন সমূহ"
|
||||
|
||||
#: sphinx/themes/basic/changes/rstsource.html:5
|
||||
#, python-format
|
||||
msgid "%(filename)s — %(docstitle)s"
|
||||
msgstr "%(filename)s — %(docstitle)s"
|
||||
|
||||
#: sphinx/themes/basic/changes/versionchanges.html:17
|
||||
#, python-format
|
||||
msgid "Automatically generated list of changes in version %(version)s"
|
||||
msgstr "স্বয়ংক্রিয়ভাবে তৈরী %(version)s-এ পরিবর্তন সমূহের তালিকা।"
|
||||
|
||||
#: sphinx/themes/basic/changes/versionchanges.html:18
|
||||
msgid "Library changes"
|
||||
msgstr "লাইব্রেরির পরিবর্তন"
|
||||
|
||||
#: sphinx/themes/basic/changes/versionchanges.html:23
|
||||
msgid "C API changes"
|
||||
msgstr "C API পরিবর্তন"
|
||||
|
||||
#: sphinx/themes/basic/changes/versionchanges.html:25
|
||||
msgid "Other changes"
|
||||
msgstr "অন্যান্য পরিবর্তন"
|
||||
|
||||
#: sphinx/themes/basic/static/doctools.js:138
|
||||
#: sphinx/writers/html.py:462
|
||||
#: sphinx/writers/html.py:467
|
||||
msgid "Permalink to this headline"
|
||||
msgstr "এই শিরোনামের পার্মালিঙ্ক"
|
||||
|
||||
#: sphinx/themes/basic/static/doctools.js:144
|
||||
#: sphinx/writers/html.py:80
|
||||
msgid "Permalink to this definition"
|
||||
msgstr "এই সংজ্ঞার পার্মালিঙ্ক"
|
||||
|
||||
#: sphinx/themes/basic/static/doctools.js:173
|
||||
msgid "Hide Search Matches"
|
||||
msgstr "অনুসন্ধানের ম্যাচগুলো লুকান"
|
||||
|
||||
#: sphinx/themes/basic/static/searchtools.js:274
|
||||
msgid "Searching"
|
||||
msgstr "অনুসন্ধান চলছে"
|
||||
|
||||
#: sphinx/themes/basic/static/searchtools.js:279
|
||||
msgid "Preparing search..."
|
||||
msgstr "অনুসন্ধানের প্রস্তুতি চলছে..."
|
||||
|
||||
#: sphinx/themes/basic/static/searchtools.js:352
|
||||
msgid ", in "
|
||||
msgstr ", -"
|
||||
|
||||
#: sphinx/themes/basic/static/searchtools.js:475
|
||||
msgid "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories."
|
||||
msgstr "আপনার অনুসন্ধানে কোন ফলাফল পাওয়া যায়নি। আপনার অনুসন্ধানের শব্দগুলোর সঠিক বানান ও বিভাগ নির্বাচন নিশ্চিত করুন।"
|
||||
|
||||
#: sphinx/themes/basic/static/searchtools.js:477
|
||||
#, python-format
|
||||
msgid "Search finished, found %s page(s) matching the search query."
|
||||
msgstr "অনুসন্ধান শেষ হয়েছে, ফলাফলে %s-টি পাতা পাওয়া গেছে।"
|
||||
|
||||
#: sphinx/writers/latex.py:187
|
||||
msgid "Release"
|
||||
msgstr "রিলিজ"
|
||||
|
||||
#: sphinx/writers/latex.py:579
|
||||
msgid "Footnotes"
|
||||
msgstr "পাদটীকা"
|
||||
|
||||
#: sphinx/writers/latex.py:647
|
||||
msgid "continued from previous page"
|
||||
msgstr "পূর্ববর্তী পাতা হতে চলমান"
|
||||
|
||||
#: sphinx/writers/latex.py:652
|
||||
msgid "Continued on next page"
|
||||
msgstr "পরবর্তী পাতাতে চলমান"
|
||||
|
||||
#: sphinx/writers/text.py:166
|
||||
#, python-format
|
||||
msgid "Platform: %s"
|
||||
msgstr "প্লাটফরম: %s"
|
||||
|
||||
#: sphinx/writers/text.py:428
|
||||
msgid "[image]"
|
||||
msgstr "[ছবি]"
|
||||
|
@ -1 +1 @@
|
||||
Documentation.addTranslations({"locale": "pt_BR", "plural_expr": "(n > 1)", "messages": {"Search Results": "Resultados da Pesquisa", "Preparing search...": "Preparando pesquisa...", "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.": "Sua pesquisa n\u00e3o encontrou nenhum documento. Por favor assegure-se de que todas as palavras foram digitadas corretamente e de que voc\u00ea tenha selecionado o m\u00ednimo de categorias.", "Search finished, found %s page(s) matching the search query.": "Pesquisa finalizada, foram encontrada(s) %s p\u00e1gina(s) que conferem com o crit\u00e9rio de pesquisa.", ", in ": ", em ", "Expand sidebar": "", "Permalink to this headline": "Link permanente para este t\u00edtulo", "Searching": "Pesquisando", "Collapse sidebar": "", "Permalink to this definition": "Link permanente para esta defini\u00e7\u00e3o", "Hide Search Matches": "Esconder Resultados da Pesquisa"}});
|
||||
Documentation.addTranslations({"locale": "pt_BR", "plural_expr": "(n > 1)", "messages": {"Search Results": "Resultados da Pesquisa", "Preparing search...": "Preparando pesquisa...", "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.": "Sua pesquisa n\u00e3o encontrou nenhum documento. Por favor assegure-se de que todas as palavras foram digitadas corretamente e de que voc\u00ea tenha selecionado o m\u00ednimo de categorias.", "Search finished, found %s page(s) matching the search query.": "Pesquisa finalizada, foram encontrada(s) %s p\u00e1gina(s) que conferem com o crit\u00e9rio de pesquisa.", ", in ": ", em ", "Expand sidebar": "Expandir painel lateral", "Permalink to this headline": "Link permanente para este t\u00edtulo", "Searching": "Pesquisando", "Collapse sidebar": "Recolher painel lateral", "Permalink to this definition": "Link permanente para esta defini\u00e7\u00e3o", "Hide Search Matches": "Esconder Resultados da Pesquisa"}});
|
Binary file not shown.
@ -8,7 +8,7 @@ msgstr ""
|
||||
"Project-Id-Version: Sphinx 0.5\n"
|
||||
"Report-Msgid-Bugs-To: roger.demetrescu@gmail.com\n"
|
||||
"POT-Creation-Date: 2008-11-09 19:46+0100\n"
|
||||
"PO-Revision-Date: 2010-05-24 23:54+0200\n"
|
||||
"PO-Revision-Date: 2010-06-20 18:34-0300\n"
|
||||
"Last-Translator: Roger Demetrescu <roger.demetrescu@gmail.com>\n"
|
||||
"Language-Team: pt_BR <roger.demetrescu@gmail.com>\n"
|
||||
"Plural-Forms: nplurals=2; plural=(n > 1)\n"
|
||||
@ -17,7 +17,8 @@ msgstr ""
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Generated-By: Babel 0.9.4\n"
|
||||
|
||||
#: sphinx/environment.py:106 sphinx/writers/latex.py:184
|
||||
#: sphinx/environment.py:106
|
||||
#: sphinx/writers/latex.py:184
|
||||
#: sphinx/writers/manpage.py:67
|
||||
#, python-format
|
||||
msgid "%B %d, %Y"
|
||||
@ -41,7 +42,8 @@ msgstr "Módulo"
|
||||
msgid "%b %d, %Y"
|
||||
msgstr "%d/%m/%Y"
|
||||
|
||||
#: sphinx/builders/html.py:285 sphinx/themes/basic/defindex.html:30
|
||||
#: sphinx/builders/html.py:285
|
||||
#: sphinx/themes/basic/defindex.html:30
|
||||
msgid "General Index"
|
||||
msgstr "Índice Geral"
|
||||
|
||||
@ -70,9 +72,8 @@ msgid "Module author: "
|
||||
msgstr "Autor do módulo: "
|
||||
|
||||
#: sphinx/directives/other.py:131
|
||||
#, fuzzy
|
||||
msgid "Code author: "
|
||||
msgstr "Autor do módulo: "
|
||||
msgstr "Autor do código: "
|
||||
|
||||
#: sphinx/directives/other.py:133
|
||||
msgid "Author: "
|
||||
@ -85,18 +86,21 @@ msgstr "Veja também"
|
||||
#: sphinx/domains/__init__.py:253
|
||||
#, python-format
|
||||
msgid "%s %s"
|
||||
msgstr ""
|
||||
msgstr "%s %s"
|
||||
|
||||
#: sphinx/domains/c.py:51 sphinx/domains/python.py:49
|
||||
#: sphinx/domains/c.py:51
|
||||
#: sphinx/domains/python.py:49
|
||||
msgid "Parameters"
|
||||
msgstr "Parâmetros"
|
||||
|
||||
#: sphinx/domains/c.py:54 sphinx/domains/javascript.py:137
|
||||
#: sphinx/domains/c.py:54
|
||||
#: sphinx/domains/javascript.py:137
|
||||
#: sphinx/domains/python.py:59
|
||||
msgid "Returns"
|
||||
msgstr "Retorna"
|
||||
|
||||
#: sphinx/domains/c.py:56 sphinx/domains/python.py:61
|
||||
#: sphinx/domains/c.py:56
|
||||
#: sphinx/domains/python.py:61
|
||||
msgid "Return type"
|
||||
msgstr "Tipo de retorno"
|
||||
|
||||
@ -125,12 +129,15 @@ msgstr "%s (tipo C)"
|
||||
msgid "%s (C variable)"
|
||||
msgstr "%s (variável C)"
|
||||
|
||||
#: sphinx/domains/c.py:171 sphinx/domains/cpp.py:1031
|
||||
#: sphinx/domains/javascript.py:166 sphinx/domains/python.py:497
|
||||
#: sphinx/domains/c.py:171
|
||||
#: sphinx/domains/cpp.py:1031
|
||||
#: sphinx/domains/javascript.py:166
|
||||
#: sphinx/domains/python.py:497
|
||||
msgid "function"
|
||||
msgstr "função"
|
||||
|
||||
#: sphinx/domains/c.py:172 sphinx/domains/cpp.py:1032
|
||||
#: sphinx/domains/c.py:172
|
||||
#: sphinx/domains/cpp.py:1032
|
||||
msgid "member"
|
||||
msgstr "membro"
|
||||
|
||||
@ -138,14 +145,14 @@ msgstr "membro"
|
||||
msgid "macro"
|
||||
msgstr "macro"
|
||||
|
||||
#: sphinx/domains/c.py:174 sphinx/domains/cpp.py:1033
|
||||
#: sphinx/domains/c.py:174
|
||||
#: sphinx/domains/cpp.py:1033
|
||||
msgid "type"
|
||||
msgstr "tipo"
|
||||
|
||||
#: sphinx/domains/c.py:175
|
||||
#, fuzzy
|
||||
msgid "variable"
|
||||
msgstr "Variável"
|
||||
msgstr "variável"
|
||||
|
||||
#: sphinx/domains/cpp.py:876
|
||||
#, python-format
|
||||
@ -167,16 +174,19 @@ msgstr "%s (membro C++)"
|
||||
msgid "%s (C++ function)"
|
||||
msgstr "%s (função C++)"
|
||||
|
||||
#: sphinx/domains/cpp.py:1030 sphinx/domains/python.py:499
|
||||
#: sphinx/domains/cpp.py:1030
|
||||
#: sphinx/domains/python.py:499
|
||||
msgid "class"
|
||||
msgstr "classe"
|
||||
|
||||
#: sphinx/domains/javascript.py:117 sphinx/domains/python.py:221
|
||||
#: sphinx/domains/javascript.py:117
|
||||
#: sphinx/domains/python.py:221
|
||||
#, python-format
|
||||
msgid "%s() (built-in function)"
|
||||
msgstr "%s() (função interna)"
|
||||
|
||||
#: sphinx/domains/javascript.py:118 sphinx/domains/python.py:285
|
||||
#: sphinx/domains/javascript.py:118
|
||||
#: sphinx/domains/python.py:285
|
||||
#, python-format
|
||||
msgid "%s() (%s method)"
|
||||
msgstr "%s() (método %s)"
|
||||
@ -184,41 +194,44 @@ msgstr "%s() (método %s)"
|
||||
#: sphinx/domains/javascript.py:120
|
||||
#, python-format
|
||||
msgid "%s (global variable or constant)"
|
||||
msgstr ""
|
||||
msgstr "%s (variável global ou constante)"
|
||||
|
||||
#: sphinx/domains/javascript.py:122 sphinx/domains/python.py:323
|
||||
#: sphinx/domains/javascript.py:122
|
||||
#: sphinx/domains/python.py:323
|
||||
#, python-format
|
||||
msgid "%s (%s attribute)"
|
||||
msgstr "%s (atributo %s)"
|
||||
|
||||
#: sphinx/domains/javascript.py:131
|
||||
#, fuzzy
|
||||
msgid "Arguments"
|
||||
msgstr "Parâmetros"
|
||||
|
||||
#: sphinx/domains/javascript.py:134
|
||||
msgid "Throws"
|
||||
msgstr ""
|
||||
msgstr "Gera"
|
||||
|
||||
#: sphinx/domains/javascript.py:167 sphinx/domains/python.py:498
|
||||
#: sphinx/domains/javascript.py:167
|
||||
#: sphinx/domains/python.py:498
|
||||
msgid "data"
|
||||
msgstr ""
|
||||
msgstr "dado"
|
||||
|
||||
#: sphinx/domains/javascript.py:168 sphinx/domains/python.py:504
|
||||
#: sphinx/domains/javascript.py:168
|
||||
#: sphinx/domains/python.py:504
|
||||
msgid "attribute"
|
||||
msgstr "atributo"
|
||||
|
||||
#: sphinx/domains/python.py:53
|
||||
#, fuzzy
|
||||
msgid "Variables"
|
||||
msgstr "Variável"
|
||||
msgstr "Variáveis"
|
||||
|
||||
#: sphinx/domains/python.py:56
|
||||
msgid "Raises"
|
||||
msgstr "Levanta"
|
||||
|
||||
#: sphinx/domains/python.py:222 sphinx/domains/python.py:279
|
||||
#: sphinx/domains/python.py:291 sphinx/domains/python.py:304
|
||||
#: sphinx/domains/python.py:222
|
||||
#: sphinx/domains/python.py:279
|
||||
#: sphinx/domains/python.py:291
|
||||
#: sphinx/domains/python.py:304
|
||||
#, python-format
|
||||
msgid "%s() (in module %s)"
|
||||
msgstr "%s() (no módulo %s)"
|
||||
@ -228,7 +241,8 @@ msgstr "%s() (no módulo %s)"
|
||||
msgid "%s (built-in variable)"
|
||||
msgstr "%s (variável interna)"
|
||||
|
||||
#: sphinx/domains/python.py:226 sphinx/domains/python.py:317
|
||||
#: sphinx/domains/python.py:226
|
||||
#: sphinx/domains/python.py:317
|
||||
#, python-format
|
||||
msgid "%s (in module %s)"
|
||||
msgstr "%s (no módulo %s)"
|
||||
@ -259,14 +273,14 @@ msgid "%s() (%s static method)"
|
||||
msgstr "%s() (método estático %s)"
|
||||
|
||||
#: sphinx/domains/python.py:308
|
||||
#, fuzzy, python-format
|
||||
#, python-format
|
||||
msgid "%s() (%s.%s class method)"
|
||||
msgstr "%s() (método %s.%s)"
|
||||
msgstr "%s() (método de classe %s.%s)"
|
||||
|
||||
#: sphinx/domains/python.py:311
|
||||
#, fuzzy, python-format
|
||||
#, python-format
|
||||
msgid "%s() (%s class method)"
|
||||
msgstr "%s() (método %s)"
|
||||
msgstr "%s() (método de classe %s)"
|
||||
|
||||
#: sphinx/domains/python.py:321
|
||||
#, python-format
|
||||
@ -283,9 +297,8 @@ msgid "%s (module)"
|
||||
msgstr "%s (módulo)"
|
||||
|
||||
#: sphinx/domains/python.py:429
|
||||
#, fuzzy
|
||||
msgid "Python Module Index"
|
||||
msgstr "Índice do Módulo"
|
||||
msgstr "Índice de Módulos do Python"
|
||||
|
||||
#: sphinx/domains/python.py:430
|
||||
msgid "modules"
|
||||
@ -295,47 +308,49 @@ msgstr "módulos"
|
||||
msgid "Deprecated"
|
||||
msgstr "Obsoleto"
|
||||
|
||||
#: sphinx/domains/python.py:500 sphinx/locale/__init__.py:162
|
||||
#: sphinx/domains/python.py:500
|
||||
#: sphinx/locale/__init__.py:162
|
||||
msgid "exception"
|
||||
msgstr "exceção"
|
||||
|
||||
#: sphinx/domains/python.py:501
|
||||
msgid "method"
|
||||
msgstr ""
|
||||
msgstr "método"
|
||||
|
||||
#: sphinx/domains/python.py:502
|
||||
#, fuzzy, python-format
|
||||
#, python-format
|
||||
msgid "class method"
|
||||
msgstr "%s() (método %s)"
|
||||
msgstr "método de classe"
|
||||
|
||||
#: sphinx/domains/python.py:503
|
||||
msgid "static method"
|
||||
msgstr "método estático"
|
||||
|
||||
#: sphinx/domains/python.py:505 sphinx/locale/__init__.py:158
|
||||
#: sphinx/domains/python.py:505
|
||||
#: sphinx/locale/__init__.py:158
|
||||
msgid "module"
|
||||
msgstr "módulo"
|
||||
|
||||
#: sphinx/domains/rst.py:53
|
||||
#, python-format
|
||||
msgid "%s (directive)"
|
||||
msgstr ""
|
||||
msgstr "%s (diretiva)"
|
||||
|
||||
#: sphinx/domains/rst.py:55
|
||||
#, fuzzy, python-format
|
||||
#, python-format
|
||||
msgid "%s (role)"
|
||||
msgstr "%s (módulo)"
|
||||
msgstr "%s (papel)"
|
||||
|
||||
#: sphinx/domains/rst.py:103
|
||||
msgid "directive"
|
||||
msgstr ""
|
||||
msgstr "diretiva"
|
||||
|
||||
#: sphinx/domains/rst.py:104
|
||||
#, fuzzy
|
||||
msgid "role"
|
||||
msgstr "módulo"
|
||||
msgstr "papel"
|
||||
|
||||
#: sphinx/domains/std.py:68 sphinx/domains/std.py:84
|
||||
#: sphinx/domains/std.py:68
|
||||
#: sphinx/domains/std.py:84
|
||||
#, python-format
|
||||
msgid "environment variable; %s"
|
||||
msgstr "váriavel de ambiente; %s"
|
||||
@ -347,15 +362,15 @@ msgstr "%sopção de linha de comando; %s"
|
||||
|
||||
#: sphinx/domains/std.py:328
|
||||
msgid "glossary term"
|
||||
msgstr ""
|
||||
msgstr "Termo de glossário"
|
||||
|
||||
#: sphinx/domains/std.py:329
|
||||
msgid "grammar token"
|
||||
msgstr ""
|
||||
msgstr "token de gramática"
|
||||
|
||||
#: sphinx/domains/std.py:330
|
||||
msgid "reference label"
|
||||
msgstr ""
|
||||
msgstr "rótulo de referência"
|
||||
|
||||
#: sphinx/domains/std.py:331
|
||||
msgid "environment variable"
|
||||
@ -363,13 +378,16 @@ msgstr "váriavel de ambiente"
|
||||
|
||||
#: sphinx/domains/std.py:332
|
||||
msgid "program option"
|
||||
msgstr ""
|
||||
msgstr "opção de programa"
|
||||
|
||||
#: sphinx/domains/std.py:360 sphinx/themes/basic/genindex-single.html:11
|
||||
#: sphinx/domains/std.py:360
|
||||
#: sphinx/themes/basic/genindex-single.html:11
|
||||
#: sphinx/themes/basic/genindex-split.html:11
|
||||
#: sphinx/themes/basic/genindex-split.html:14
|
||||
#: sphinx/themes/basic/genindex.html:11 sphinx/themes/basic/genindex.html:14
|
||||
#: sphinx/themes/basic/genindex.html:50 sphinx/themes/basic/layout.html:125
|
||||
#: sphinx/themes/basic/genindex.html:11
|
||||
#: sphinx/themes/basic/genindex.html:14
|
||||
#: sphinx/themes/basic/genindex.html:50
|
||||
#: sphinx/themes/basic/layout.html:125
|
||||
#: sphinx/writers/latex.py:173
|
||||
msgid "Index"
|
||||
msgstr "Índice"
|
||||
@ -378,19 +396,20 @@ msgstr "Índice"
|
||||
msgid "Module Index"
|
||||
msgstr "Índice do Módulo"
|
||||
|
||||
#: sphinx/domains/std.py:362 sphinx/themes/basic/defindex.html:25
|
||||
#: sphinx/domains/std.py:362
|
||||
#: sphinx/themes/basic/defindex.html:25
|
||||
msgid "Search Page"
|
||||
msgstr "Página de Pesquisa"
|
||||
|
||||
#: sphinx/ext/autodoc.py:917
|
||||
#, python-format
|
||||
msgid " Bases: %s"
|
||||
msgstr ""
|
||||
msgstr " Bases: %s"
|
||||
|
||||
#: sphinx/ext/autodoc.py:950
|
||||
#, python-format
|
||||
msgid "alias of :class:`%s`"
|
||||
msgstr ""
|
||||
msgstr "apelido de :class:`%s`"
|
||||
|
||||
#: sphinx/ext/todo.py:41
|
||||
msgid "Todo"
|
||||
@ -407,29 +426,28 @@ msgstr "entrada original"
|
||||
|
||||
#: sphinx/ext/viewcode.py:66
|
||||
msgid "[source]"
|
||||
msgstr ""
|
||||
msgstr "[código fonte]"
|
||||
|
||||
#: sphinx/ext/viewcode.py:109
|
||||
msgid "[docs]"
|
||||
msgstr ""
|
||||
msgstr "[documentos]"
|
||||
|
||||
#: sphinx/ext/viewcode.py:123
|
||||
#, fuzzy
|
||||
msgid "Module code"
|
||||
msgstr "módulo"
|
||||
msgstr "Código do módulo"
|
||||
|
||||
#: sphinx/ext/viewcode.py:129
|
||||
#, python-format
|
||||
msgid "<h1>Source code for %s</h1>"
|
||||
msgstr ""
|
||||
msgstr "<h1>Código fonte de %s</h1>"
|
||||
|
||||
#: sphinx/ext/viewcode.py:156
|
||||
msgid "Overview: module code"
|
||||
msgstr ""
|
||||
msgstr "Visão geral: código do módulo"
|
||||
|
||||
#: sphinx/ext/viewcode.py:157
|
||||
msgid "<h1>All modules for which code is available</h1>"
|
||||
msgstr ""
|
||||
msgstr "<h1>Todos os módulos onde este código está disponível</h1>"
|
||||
|
||||
#: sphinx/locale/__init__.py:139
|
||||
msgid "Attention"
|
||||
@ -506,26 +524,31 @@ msgstr "comando"
|
||||
msgid "built-in function"
|
||||
msgstr "função interna"
|
||||
|
||||
#: sphinx/themes/agogo/layout.html:45 sphinx/themes/basic/globaltoc.html:10
|
||||
#: sphinx/themes/agogo/layout.html:45
|
||||
#: sphinx/themes/basic/globaltoc.html:10
|
||||
#: sphinx/themes/basic/localtoc.html:11
|
||||
msgid "Table Of Contents"
|
||||
msgstr "Tabela de Conteúdo"
|
||||
|
||||
#: sphinx/themes/agogo/layout.html:49 sphinx/themes/basic/layout.html:128
|
||||
#: sphinx/themes/basic/search.html:11 sphinx/themes/basic/search.html:14
|
||||
#: sphinx/themes/agogo/layout.html:49
|
||||
#: sphinx/themes/basic/layout.html:128
|
||||
#: sphinx/themes/basic/search.html:11
|
||||
#: sphinx/themes/basic/search.html:14
|
||||
msgid "Search"
|
||||
msgstr "Pesquisar"
|
||||
|
||||
#: sphinx/themes/agogo/layout.html:52 sphinx/themes/basic/searchbox.html:15
|
||||
#: sphinx/themes/agogo/layout.html:52
|
||||
#: sphinx/themes/basic/searchbox.html:15
|
||||
msgid "Go"
|
||||
msgstr "Ir"
|
||||
|
||||
#: sphinx/themes/agogo/layout.html:57 sphinx/themes/basic/searchbox.html:20
|
||||
#, fuzzy
|
||||
#: sphinx/themes/agogo/layout.html:57
|
||||
#: sphinx/themes/basic/searchbox.html:20
|
||||
msgid "Enter search terms or a module, class or function name."
|
||||
msgstr "Informe o nome de um módulo, classe ou função."
|
||||
msgstr "Digite os termos da busca ou o nome de um módulo, classe ou função."
|
||||
|
||||
#: sphinx/themes/agogo/layout.html:78 sphinx/themes/basic/sourcelink.html:14
|
||||
#: sphinx/themes/agogo/layout.html:78
|
||||
#: sphinx/themes/basic/sourcelink.html:14
|
||||
msgid "Show Source"
|
||||
msgstr "Exibir Fonte"
|
||||
|
||||
@ -615,12 +638,8 @@ msgstr "Última atualização em %(last_updated)s."
|
||||
|
||||
#: sphinx/themes/basic/layout.html:189
|
||||
#, python-format
|
||||
msgid ""
|
||||
"Created using <a href=\"http://sphinx.pocoo.org/\">Sphinx</a> "
|
||||
"%(sphinx_version)s."
|
||||
msgstr ""
|
||||
"Criado com <a href=\"http://sphinx.pocoo.org/\">Sphinx</a> "
|
||||
"%(sphinx_version)s."
|
||||
msgid "Created using <a href=\"http://sphinx.pocoo.org/\">Sphinx</a> %(sphinx_version)s."
|
||||
msgstr "Criado com <a href=\"http://sphinx.pocoo.org/\">Sphinx</a> %(sphinx_version)s."
|
||||
|
||||
#: sphinx/themes/basic/opensearch.xml:4
|
||||
#, python-format
|
||||
@ -647,10 +666,9 @@ msgstr "próximo capítulo"
|
||||
msgid ""
|
||||
"Please activate JavaScript to enable the search\n"
|
||||
" functionality."
|
||||
msgstr ""
|
||||
msgstr "Por favor ative o JavaScript para habilitar a funcionalidade de pesquisa."
|
||||
|
||||
#: sphinx/themes/basic/search.html:23
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
"From here you can search these documents. Enter your search\n"
|
||||
" words into the box below and click \"search\". Note that the search\n"
|
||||
@ -658,11 +676,9 @@ msgid ""
|
||||
" containing fewer words won't appear in the result list."
|
||||
msgstr ""
|
||||
"A partir daqui você pode pesquisar estes documentos. Preencha suas \n"
|
||||
" palavras de pesquisa na caixa abaixo e clique em \"pesquisar\". "
|
||||
"Observe que a função de pesquisa\n"
|
||||
" palavras de pesquisa na caixa abaixo e clique em \"pesquisar\". Observe que a função de pesquisa\n"
|
||||
" irá pesquisar automaticamente por todas as palavras.\n"
|
||||
" Páginas contendo menos palavras não irão aparecer na lista de "
|
||||
"resultado."
|
||||
" Páginas contendo menos palavras não irão aparecer na lista de resultado."
|
||||
|
||||
#: sphinx/themes/basic/search.html:30
|
||||
msgid "search"
|
||||
@ -713,12 +729,14 @@ msgstr "Alterações na API C"
|
||||
msgid "Other changes"
|
||||
msgstr "Outras alterações"
|
||||
|
||||
#: sphinx/themes/basic/static/doctools.js:154 sphinx/writers/html.py:482
|
||||
#: sphinx/themes/basic/static/doctools.js:154
|
||||
#: sphinx/writers/html.py:482
|
||||
#: sphinx/writers/html.py:487
|
||||
msgid "Permalink to this headline"
|
||||
msgstr "Link permanente para este título"
|
||||
|
||||
#: sphinx/themes/basic/static/doctools.js:160 sphinx/writers/html.py:87
|
||||
#: sphinx/themes/basic/static/doctools.js:160
|
||||
#: sphinx/writers/html.py:87
|
||||
msgid "Permalink to this definition"
|
||||
msgstr "Link permanente para esta definição"
|
||||
|
||||
@ -739,51 +757,45 @@ msgid ", in "
|
||||
msgstr ", em "
|
||||
|
||||
#: sphinx/themes/basic/static/searchtools.js:491
|
||||
msgid ""
|
||||
"Your search did not match any documents. Please make sure that all words "
|
||||
"are spelled correctly and that you've selected enough categories."
|
||||
msgstr ""
|
||||
"Sua pesquisa não encontrou nenhum documento. Por favor assegure-se de que"
|
||||
" todas as palavras foram digitadas corretamente e de que você tenha "
|
||||
"selecionado o mínimo de categorias."
|
||||
msgid "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories."
|
||||
msgstr "Sua pesquisa não encontrou nenhum documento. Por favor assegure-se de que todas as palavras foram digitadas corretamente e de que você tenha selecionado o mínimo de categorias."
|
||||
|
||||
#: sphinx/themes/basic/static/searchtools.js:493
|
||||
#, python-format
|
||||
msgid "Search finished, found %s page(s) matching the search query."
|
||||
msgstr ""
|
||||
"Pesquisa finalizada, foram encontrada(s) %s página(s) que conferem com o "
|
||||
"critério de pesquisa."
|
||||
msgstr "Pesquisa finalizada, foram encontrada(s) %s página(s) que conferem com o critério de pesquisa."
|
||||
|
||||
#: sphinx/themes/default/static/sidebar.js:66
|
||||
msgid "Expand sidebar"
|
||||
msgstr ""
|
||||
msgstr "Expandir painel lateral"
|
||||
|
||||
#: sphinx/themes/default/static/sidebar.js:79
|
||||
#: sphinx/themes/default/static/sidebar.js:106
|
||||
msgid "Collapse sidebar"
|
||||
msgstr ""
|
||||
msgstr "Recolher painel lateral"
|
||||
|
||||
#: sphinx/themes/haiku/layout.html:26
|
||||
msgid "Contents"
|
||||
msgstr ""
|
||||
msgstr "Conteúdo"
|
||||
|
||||
#: sphinx/writers/latex.py:171
|
||||
msgid "Release"
|
||||
msgstr "Versão"
|
||||
|
||||
#: sphinx/writers/latex.py:572 sphinx/writers/manpage.py:178
|
||||
#: sphinx/writers/latex.py:572
|
||||
#: sphinx/writers/manpage.py:178
|
||||
msgid "Footnotes"
|
||||
msgstr ""
|
||||
msgstr "Notas de rodapé"
|
||||
|
||||
#: sphinx/writers/latex.py:641
|
||||
msgid "continued from previous page"
|
||||
msgstr ""
|
||||
msgstr "continuação da página anterior"
|
||||
|
||||
#: sphinx/writers/latex.py:646
|
||||
#, fuzzy
|
||||
msgid "Continued on next page"
|
||||
msgstr "Índice completo em uma página"
|
||||
msgstr "Continua na próxima página"
|
||||
|
||||
#: sphinx/writers/text.py:422
|
||||
msgid "[image]"
|
||||
msgstr "[imagem]"
|
||||
|
||||
|
@ -18,6 +18,7 @@ from sphinx.errors import PycodeError
|
||||
from sphinx.pycode import nodes
|
||||
from sphinx.pycode.pgen2 import driver, token, tokenize, parse, literals
|
||||
from sphinx.util import get_module_source
|
||||
from sphinx.util.pycompat import next
|
||||
from sphinx.util.docstrings import prepare_docstring, prepare_commentdoc
|
||||
|
||||
|
||||
@ -98,6 +99,7 @@ class AttrDocVisitor(nodes.NodeVisitor):
|
||||
if not pnode or pnode.type not in (token.INDENT, token.DEDENT):
|
||||
break
|
||||
prefix = pnode.get_prefix()
|
||||
if not isinstance(prefix, unicode):
|
||||
prefix = prefix.decode(self.encoding)
|
||||
docstring = prepare_commentdoc(prefix)
|
||||
self.add_docstring(node, docstring)
|
||||
@ -278,7 +280,7 @@ class ModuleAnalyzer(object):
|
||||
result[fullname] = (dtype, startline, endline)
|
||||
expect_indent = False
|
||||
if tok in ('def', 'class'):
|
||||
name = tokeniter.next()[1]
|
||||
name = next(tokeniter)[1]
|
||||
namespace.append(name)
|
||||
fullname = '.'.join(namespace)
|
||||
stack.append((tok, fullname, spos[0], indent))
|
||||
|
@ -29,6 +29,8 @@ class BaseNode(object):
|
||||
return NotImplemented
|
||||
return not self._eq(other)
|
||||
|
||||
__hash__ = None
|
||||
|
||||
def get_prev_sibling(self):
|
||||
"""Return previous child in parent's children, or None."""
|
||||
if self.parent is None:
|
||||
|
@ -66,7 +66,7 @@ uni_escape_re = re.compile(r"\\(\'|\"|\\|[abfnrtv]|x.{0,2}|[0-7]{1,3}|"
|
||||
def evalString(s, encoding=None):
|
||||
regex = escape_re
|
||||
repl = escape
|
||||
if encoding:
|
||||
if encoding and not isinstance(s, unicode):
|
||||
s = s.decode(encoding)
|
||||
if s.startswith('u') or s.startswith('U'):
|
||||
regex = uni_escape_re
|
||||
|
@ -143,7 +143,9 @@ class TokenError(Exception): pass
|
||||
|
||||
class StopTokenizing(Exception): pass
|
||||
|
||||
def printtoken(type, token, (srow, scol), (erow, ecol), line): # for testing
|
||||
def printtoken(type, token, scell, ecell, line): # for testing
|
||||
srow, scol = scell
|
||||
erow, ecol = ecell
|
||||
print "%d,%d-%d,%d:\t%s\t%s" % \
|
||||
(srow, scol, erow, ecol, tok_name[type], repr(token))
|
||||
|
||||
|
@ -9,8 +9,9 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import sys, os, time
|
||||
import sys, os, time, re
|
||||
from os import path
|
||||
from codecs import open
|
||||
|
||||
TERM_ENCODING = getattr(sys.stdin, 'encoding', None)
|
||||
|
||||
@ -20,10 +21,23 @@ from sphinx.util.console import purple, bold, red, turquoise, \
|
||||
nocolor, color_terminal
|
||||
from sphinx.util import texescape
|
||||
|
||||
# function to get input from terminal -- overridden by the test suite
|
||||
try:
|
||||
# this raw_input is not converted by 2to3
|
||||
term_input = raw_input
|
||||
except NameError:
|
||||
term_input = input
|
||||
|
||||
|
||||
PROMPT_PREFIX = '> '
|
||||
|
||||
QUICKSTART_CONF = '''\
|
||||
if sys.version_info >= (3, 0):
|
||||
# prevents that the file is checked for being written in Python 2.x syntax
|
||||
QUICKSTART_CONF = '#!/usr/bin/env python3\n'
|
||||
else:
|
||||
QUICKSTART_CONF = ''
|
||||
|
||||
QUICKSTART_CONF += '''\
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# %(project)s documentation build configuration file, created by
|
||||
@ -42,7 +56,7 @@ import sys, os
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#sys.path.append(os.path.abspath('.'))
|
||||
#sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
@ -186,8 +200,8 @@ html_static_path = ['%(dot)sstatic']
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = ''
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = '%(project_fn)sdoc'
|
||||
@ -279,6 +293,9 @@ epub_copyright = u'%(copyright_str)s'
|
||||
|
||||
# The depth of the table of contents in toc.ncx.
|
||||
#epub_tocdepth = 3
|
||||
|
||||
# Allow duplicate toc entries.
|
||||
#epub_tocdup = True
|
||||
'''
|
||||
|
||||
INTERSPHINX_CONFIG = '''
|
||||
@ -667,9 +684,11 @@ def do_prompt(d, key, text, default=None, validator=nonempty):
|
||||
prompt = purple(PROMPT_PREFIX + '%s [%s]: ' % (text, default))
|
||||
else:
|
||||
prompt = purple(PROMPT_PREFIX + text + ': ')
|
||||
x = raw_input(prompt)
|
||||
x = term_input(prompt)
|
||||
if default and not x:
|
||||
x = default
|
||||
if not isinstance(x, unicode):
|
||||
# for Python 2.x, try to get a Unicode string out of it
|
||||
if x.decode('ascii', 'replace').encode('ascii', 'replace') != x:
|
||||
if TERM_ENCODING:
|
||||
x = x.decode(TERM_ENCODING)
|
||||
@ -690,6 +709,18 @@ def do_prompt(d, key, text, default=None, validator=nonempty):
|
||||
d[key] = x
|
||||
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
# remove Unicode literal prefixes
|
||||
_unicode_string_re = re.compile(r"[uU]('.*?')")
|
||||
def _convert_python_source(source):
|
||||
return _unicode_string_re.sub('\\1', source)
|
||||
|
||||
for f in ['QUICKSTART_CONF', 'EPUB_CONFIG', 'INTERSPHINX_CONFIG']:
|
||||
globals()[f] = _convert_python_source(globals()[f])
|
||||
|
||||
del _unicode_string_re, _convert_python_source
|
||||
|
||||
|
||||
def inner_main(args):
|
||||
d = {}
|
||||
texescape.init()
|
||||
@ -845,28 +876,28 @@ directly.'''
|
||||
if d['ext_intersphinx']:
|
||||
conf_text += INTERSPHINX_CONFIG
|
||||
|
||||
f = open(path.join(srcdir, 'conf.py'), 'w')
|
||||
f.write(conf_text.encode('utf-8'))
|
||||
f = open(path.join(srcdir, 'conf.py'), 'w', encoding='utf-8')
|
||||
f.write(conf_text)
|
||||
f.close()
|
||||
|
||||
masterfile = path.join(srcdir, d['master'] + d['suffix'])
|
||||
f = open(masterfile, 'w')
|
||||
f.write((MASTER_FILE % d).encode('utf-8'))
|
||||
f = open(masterfile, 'w', encoding='utf-8')
|
||||
f.write(MASTER_FILE % d)
|
||||
f.close()
|
||||
|
||||
if d['makefile']:
|
||||
d['rsrcdir'] = d['sep'] and 'source' or '.'
|
||||
d['rbuilddir'] = d['sep'] and 'build' or d['dot'] + 'build'
|
||||
# use binary mode, to avoid writing \r\n on Windows
|
||||
f = open(path.join(d['path'], 'Makefile'), 'wb')
|
||||
f.write((MAKEFILE % d).encode('utf-8'))
|
||||
f = open(path.join(d['path'], 'Makefile'), 'wb', encoding='utf-8')
|
||||
f.write(MAKEFILE % d)
|
||||
f.close()
|
||||
|
||||
if d['batchfile']:
|
||||
d['rsrcdir'] = d['sep'] and 'source' or '.'
|
||||
d['rbuilddir'] = d['sep'] and 'build' or d['dot'] + 'build'
|
||||
f = open(path.join(d['path'], 'make.bat'), 'w')
|
||||
f.write((BATCHFILE % d).encode('utf-8'))
|
||||
f = open(path.join(d['path'], 'make.bat'), 'w', encoding='utf-8')
|
||||
f.write(BATCHFILE % d)
|
||||
f.close()
|
||||
|
||||
print
|
||||
|
@ -105,9 +105,9 @@ class XRefRole(object):
|
||||
classes = ['xref', domain, '%s-%s' % (domain, role)]
|
||||
# if the first character is a bang, don't cross-reference at all
|
||||
if text[0:1] == '!':
|
||||
text = utils.unescape(text)
|
||||
text = utils.unescape(text)[1:]
|
||||
if self.fix_parens:
|
||||
text, tgt = self._fix_parens(env, False, text[1:], "")
|
||||
text, tgt = self._fix_parens(env, False, text, "")
|
||||
innernode = self.innernodeclass(rawtext, text, classes=classes)
|
||||
return self.result_nodes(inliner.document, env, innernode,
|
||||
is_ref=False)
|
||||
@ -173,6 +173,10 @@ def indexmarkup_role(typ, rawtext, etext, lineno, inliner,
|
||||
indexnode['entries'] = [
|
||||
('single', _('Python Enhancement Proposals!PEP %s') % text,
|
||||
targetid, 'PEP %s' % text)]
|
||||
anchor = ''
|
||||
anchorindex = text.find('#')
|
||||
if anchorindex > 0:
|
||||
text, anchor = text[:anchorindex], text[anchorindex:]
|
||||
try:
|
||||
pepnum = int(text)
|
||||
except ValueError:
|
||||
@ -182,12 +186,17 @@ def indexmarkup_role(typ, rawtext, etext, lineno, inliner,
|
||||
return [prb], [msg]
|
||||
ref = inliner.document.settings.pep_base_url + 'pep-%04d' % pepnum
|
||||
sn = nodes.strong('PEP '+text, 'PEP '+text)
|
||||
rn = nodes.reference('', '', internal=False, refuri=ref, classes=[typ])
|
||||
rn = nodes.reference('', '', internal=False, refuri=ref+anchor,
|
||||
classes=[typ])
|
||||
rn += sn
|
||||
return [indexnode, targetnode, rn], []
|
||||
elif typ == 'rfc':
|
||||
indexnode['entries'] = [('single', 'RFC; RFC %s' % text,
|
||||
targetid, 'RFC %s' % text)]
|
||||
anchor = ''
|
||||
anchorindex = text.find('#')
|
||||
if anchorindex > 0:
|
||||
text, anchor = text[:anchorindex], text[anchorindex:]
|
||||
try:
|
||||
rfcnum = int(text)
|
||||
except ValueError:
|
||||
@ -197,7 +206,8 @@ def indexmarkup_role(typ, rawtext, etext, lineno, inliner,
|
||||
return [prb], [msg]
|
||||
ref = inliner.document.settings.rfc_base_url + inliner.rfc_url % rfcnum
|
||||
sn = nodes.strong('RFC '+text, 'RFC '+text)
|
||||
rn = nodes.reference('', '', internal=False, refuri=ref, classes=[typ])
|
||||
rn = nodes.reference('', '', internal=False, refuri=ref+anchor,
|
||||
classes=[typ])
|
||||
rn += sn
|
||||
return [indexnode, targetnode, rn], []
|
||||
|
||||
@ -205,8 +215,9 @@ def indexmarkup_role(typ, rawtext, etext, lineno, inliner,
|
||||
_amp_re = re.compile(r'(?<!&)&(?![&\s])')
|
||||
|
||||
def menusel_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
||||
text = utils.unescape(text)
|
||||
if typ == 'menuselection':
|
||||
text = utils.unescape(text).replace('-->', u'\N{TRIANGULAR BULLET}')
|
||||
text = text.replace('-->', u'\N{TRIANGULAR BULLET}')
|
||||
spans = _amp_re.split(text)
|
||||
|
||||
node = nodes.emphasis(rawtext=rawtext)
|
||||
|
@ -446,6 +446,7 @@
|
||||
linkcolor=InnerLinkColor,filecolor=OuterLinkColor,
|
||||
menucolor=OuterLinkColor,urlcolor=OuterLinkColor,
|
||||
citecolor=InnerLinkColor]{hyperref}
|
||||
\RequirePackage[figure,table]{hypcap}
|
||||
|
||||
% From docutils.writers.latex2e
|
||||
\providecommand{\DUspan}[2]{%
|
||||
|
@ -14,7 +14,7 @@
|
||||
{%- set reldelim1 = reldelim1 is not defined and ' »' or reldelim1 %}
|
||||
{%- set reldelim2 = reldelim2 is not defined and ' |' or reldelim2 %}
|
||||
{%- set render_sidebar = (not embedded) and (not theme_nosidebar|tobool) and
|
||||
(not sidebars == []) %}
|
||||
(sidebars != []) %}
|
||||
{%- set url_root = pathto('', 1) %}
|
||||
{%- if url_root == '#' %}{% set url_root = '' %}{% endif %}
|
||||
|
||||
|
@ -18,6 +18,7 @@ import tempfile
|
||||
import posixpath
|
||||
import traceback
|
||||
from os import path
|
||||
from codecs import open
|
||||
|
||||
import docutils
|
||||
from docutils.utils import relative_path
|
||||
@ -140,8 +141,8 @@ def copy_static_entry(source, targetdir, builder, context={},
|
||||
target = path.join(targetdir, path.basename(source))
|
||||
if source.lower().endswith('_t') and builder.templates:
|
||||
# templated!
|
||||
fsrc = open(source, 'rb')
|
||||
fdst = open(target[:-2], 'wb')
|
||||
fsrc = open(source, 'r', encoding='utf-8')
|
||||
fdst = open(target[:-2], 'w', encoding='utf-8')
|
||||
fdst.write(builder.templates.render_string(fsrc.read(), context))
|
||||
fsrc.close()
|
||||
fdst.close()
|
||||
@ -162,17 +163,23 @@ def copy_static_entry(source, targetdir, builder, context={},
|
||||
shutil.copytree(source, target)
|
||||
|
||||
|
||||
_DEBUG_HEADER = '''\
|
||||
# Sphinx version: %s
|
||||
# Docutils version: %s %s
|
||||
# Jinja2 version: %s
|
||||
'''
|
||||
|
||||
def save_traceback():
|
||||
"""
|
||||
Save the current exception's traceback in a temporary file.
|
||||
"""
|
||||
exc = traceback.format_exc()
|
||||
fd, path = tempfile.mkstemp('.log', 'sphinx-err-')
|
||||
os.write(fd, '# Sphinx version: %s\n' % sphinx.__version__)
|
||||
os.write(fd, '# Docutils version: %s %s\n' % (docutils.__version__,
|
||||
docutils.__version_details__))
|
||||
os.write(fd, '# Jinja2 version: %s\n' % jinja2.__version__)
|
||||
os.write(fd, exc)
|
||||
os.write(fd, (_DEBUG_HEADER %
|
||||
(sphinx.__version__,
|
||||
docutils.__version__, docutils.__version_details__,
|
||||
jinja2.__version__)).encode('utf-8'))
|
||||
os.write(fd, exc.encode('utf-8'))
|
||||
os.close(fd)
|
||||
return path
|
||||
|
||||
|
@ -141,9 +141,16 @@ class TypedField(GroupedField):
|
||||
par = nodes.paragraph()
|
||||
par += self.make_xref(self.rolename, domain, fieldarg, nodes.strong)
|
||||
if fieldarg in types:
|
||||
typename = u''.join(n.astext() for n in types[fieldarg])
|
||||
par += nodes.Text(' (')
|
||||
# NOTE: using .pop() here to prevent a single type node to be
|
||||
# inserted twice into the doctree, which leads to
|
||||
# inconsistencies later when references are resolved
|
||||
fieldtype = types.pop(fieldarg)
|
||||
if len(fieldtype) == 1 and isinstance(fieldtype[0], nodes.Text):
|
||||
typename = u''.join(n.astext() for n in fieldtype)
|
||||
par += self.make_xref(self.typerolename, domain, typename)
|
||||
else:
|
||||
par += fieldtype
|
||||
par += nodes.Text(')')
|
||||
par += nodes.Text(' -- ')
|
||||
par += content
|
||||
@ -160,7 +167,7 @@ class DocFieldTransformer(object):
|
||||
|
||||
def __init__(self, directive):
|
||||
self.domain = directive.domain
|
||||
if not hasattr(directive, '_doc_field_type_map'):
|
||||
if '_doc_field_type_map' not in directive.__class__.__dict__:
|
||||
directive.__class__._doc_field_type_map = \
|
||||
self.preprocess_fieldtypes(directive.__class__.doc_field_types)
|
||||
self.typemap = directive._doc_field_type_map
|
||||
@ -222,7 +229,10 @@ class DocFieldTransformer(object):
|
||||
if is_typefield:
|
||||
# filter out only inline nodes; others will result in invalid
|
||||
# markup being written out
|
||||
content = filter(lambda n: isinstance(n, nodes.Inline), content)
|
||||
content = filter(
|
||||
lambda n: isinstance(n, nodes.Inline) or
|
||||
isinstance(n, nodes.Text),
|
||||
content)
|
||||
if content:
|
||||
types.setdefault(typename, {})[fieldarg] = content
|
||||
continue
|
||||
|
48
sphinx/util/jsonimpl.py
Normal file
48
sphinx/util/jsonimpl.py
Normal file
@ -0,0 +1,48 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.util.jsonimpl
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
JSON serializer implementation wrapper.
|
||||
|
||||
:copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import UserString
|
||||
|
||||
try:
|
||||
import json
|
||||
# json-py's json module has not JSONEncoder; this will raise AttributeError
|
||||
# if json-py is imported instead of the built-in json module
|
||||
JSONEncoder = json.JSONEncoder
|
||||
except (ImportError, AttributeError):
|
||||
try:
|
||||
import simplejson as json
|
||||
JSONEncoder = json.JSONEncoder
|
||||
except ImportError:
|
||||
json = None
|
||||
JSONEncoder = object
|
||||
|
||||
|
||||
class SphinxJSONEncoder(JSONEncoder):
|
||||
"""JSONEncoder subclass that forces translation proxies."""
|
||||
def default(self, obj):
|
||||
if isinstance(obj, UserString.UserString):
|
||||
return unicode(obj)
|
||||
return JSONEncoder.default(self, obj)
|
||||
|
||||
|
||||
def dump(obj, fp, *args, **kwds):
|
||||
kwds['cls'] = SphinxJSONEncoder
|
||||
return json.dump(obj, fp, *args, **kwds)
|
||||
|
||||
def dumps(obj, *args, **kwds):
|
||||
kwds['cls'] = SphinxJSONEncoder
|
||||
return json.dumps(obj, *args, **kwds)
|
||||
|
||||
def load(*args, **kwds):
|
||||
return json.load(*args, **kwds)
|
||||
|
||||
def loads(*args, **kwds):
|
||||
return json.loads(*args, **kwds)
|
@ -10,11 +10,11 @@
|
||||
"""
|
||||
|
||||
import re
|
||||
import types
|
||||
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.util.pycompat import class_types
|
||||
|
||||
|
||||
# \x00 means the "<" was backslash-escaped
|
||||
@ -129,7 +129,7 @@ def _new_traverse(self, condition=None,
|
||||
if include_self and descend and not siblings and not ascend:
|
||||
if condition is None:
|
||||
return self._all_traverse([])
|
||||
elif isinstance(condition, (types.ClassType, type)):
|
||||
elif isinstance(condition, class_types):
|
||||
return self._fast_traverse(condition, [])
|
||||
return self._old_traverse(condition, include_self,
|
||||
descend, siblings, ascend)
|
||||
|
@ -11,6 +11,7 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import errno
|
||||
import shutil
|
||||
@ -124,7 +125,10 @@ no_fn_re = re.compile(r'[^a-zA-Z0-9_-]')
|
||||
def make_filename(string):
|
||||
return no_fn_re.sub('', string)
|
||||
|
||||
|
||||
if sys.version_info < (3, 0):
|
||||
def ustrftime(format, *args):
|
||||
# strftime for unicode strings
|
||||
return time.strftime(unicode(format).encode('utf-8'), *args).decode('utf-8')
|
||||
return time.strftime(unicode(format).encode('utf-8'), *args) \
|
||||
.decode('utf-8')
|
||||
else:
|
||||
ustrftime = time.strftime
|
||||
|
@ -12,6 +12,65 @@
|
||||
import sys
|
||||
import codecs
|
||||
import encodings
|
||||
import re
|
||||
|
||||
try:
|
||||
from types import ClassType
|
||||
class_types = (type, ClassType)
|
||||
except ImportError:
|
||||
# Python 3
|
||||
class_types = (type,)
|
||||
|
||||
|
||||
# the ubiquitous "bytes" helper function
|
||||
if sys.version_info >= (3, 0):
|
||||
def b(s):
|
||||
return s.encode('utf-8')
|
||||
else:
|
||||
b = str
|
||||
|
||||
|
||||
# Support for running 2to3 over config files
|
||||
|
||||
if sys.version_info < (3, 0):
|
||||
# no need to refactor on 2.x versions
|
||||
convert_with_2to3 = None
|
||||
else:
|
||||
def convert_with_2to3(filepath):
|
||||
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
|
||||
from lib2to3.pgen2.parse import ParseError
|
||||
fixers = get_fixers_from_package('lib2to3.fixes')
|
||||
refactoring_tool = RefactoringTool(fixers)
|
||||
source = refactoring_tool._read_python_source(filepath)[0]
|
||||
try:
|
||||
tree = refactoring_tool.refactor_string(source, 'conf.py')
|
||||
except ParseError, err:
|
||||
# do not propagate lib2to3 exceptions
|
||||
lineno, offset = err.context[1]
|
||||
# try to match ParseError details with SyntaxError details
|
||||
raise SyntaxError(err.msg, (filepath, lineno, offset, err.value))
|
||||
return unicode(tree)
|
||||
|
||||
|
||||
try:
|
||||
base_exception = BaseException
|
||||
except NameError:
|
||||
base_exception = Exception
|
||||
|
||||
|
||||
try:
|
||||
next = next
|
||||
except NameError:
|
||||
# this is on Python 2, where the method is called "next" (it is refactored
|
||||
# to __next__ by 2to3, but in that case never executed)
|
||||
def next(iterator):
|
||||
return iterator.next()
|
||||
|
||||
|
||||
try:
|
||||
bytes = bytes
|
||||
except NameError:
|
||||
bytes = str
|
||||
|
||||
|
||||
try:
|
||||
|
@ -83,6 +83,7 @@ def sphinx_smarty_pants(t):
|
||||
# Constants for quote education.
|
||||
|
||||
punct_class = r"""[!"#\$\%'()*+,-.\/:;<=>?\@\[\\\]\^_`{|}~]"""
|
||||
end_of_word_class = r"""[\s.,;:!?)]"""
|
||||
close_class = r"""[^\ \t\r\n\[\{\(\-]"""
|
||||
dec_dashes = r"""–|—"""
|
||||
|
||||
@ -117,8 +118,8 @@ opening_double_quotes_regex = re.compile(r"""
|
||||
closing_double_quotes_regex = re.compile(r"""
|
||||
#(%s)? # character that indicates the quote should be closing
|
||||
"
|
||||
(?=\s)
|
||||
""" % (close_class,), re.VERBOSE)
|
||||
(?=%s)
|
||||
""" % (close_class, end_of_word_class), re.VERBOSE)
|
||||
|
||||
closing_double_quotes_regex_2 = re.compile(r"""
|
||||
(%s) # character that indicates the quote should be closing
|
||||
|
@ -224,6 +224,8 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
else:
|
||||
self.top_sectionlevel = 1
|
||||
self.next_section_ids = set()
|
||||
self.next_figure_ids = set()
|
||||
self.next_table_ids = set()
|
||||
# flags
|
||||
self.verbatim = None
|
||||
self.in_title = 0
|
||||
@ -250,7 +252,7 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
'\\label{%s}' % self.idescape(id)
|
||||
|
||||
def hyperlink(self, id):
|
||||
return '\\hyperref[%s]{' % (self.idescape(id))
|
||||
return '{\\hyperref[%s]{' % (self.idescape(id))
|
||||
|
||||
def hyperpageref(self, id):
|
||||
return '\\autopageref*{%s}' % (self.idescape(id))
|
||||
@ -314,7 +316,7 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
# ... and all others are the appendices
|
||||
self.body.append(u'\n\\appendix\n')
|
||||
self.first_document = -1
|
||||
if node.has_key('docname'):
|
||||
if 'docname' in node:
|
||||
self.body.append(self.hypertarget(':doc'))
|
||||
# "- 1" because the level is increased before the title is visited
|
||||
self.sectionlevel = self.top_sectionlevel - 1
|
||||
@ -633,7 +635,10 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
self.body.append('{|' + ('L|' * self.table.colcount) + '}\n')
|
||||
if self.table.longtable and self.table.caption is not None:
|
||||
self.body.append(u'\\caption{%s} \\\\\n' % self.table.caption)
|
||||
|
||||
if self.table.caption is not None:
|
||||
for id in self.next_table_ids:
|
||||
self.body.append(self.hypertarget(id, anchor=False))
|
||||
self.next_table_ids.clear()
|
||||
if self.table.longtable:
|
||||
self.body.append('\\hline\n')
|
||||
self.body.append('\\endfirsthead\n\n')
|
||||
@ -694,7 +699,7 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
self.table.rowcount += 1
|
||||
|
||||
def visit_entry(self, node):
|
||||
if node.has_key('morerows') or node.has_key('morecols'):
|
||||
if 'morerows' in node or 'morecols' in node:
|
||||
raise UnsupportedError('%s:%s: column or row spanning cells are '
|
||||
'not yet implemented.' %
|
||||
(self.curfilestack[-1], node.line or ''))
|
||||
@ -751,7 +756,7 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
|
||||
def visit_term(self, node):
|
||||
ctx = '}] \\leavevmode'
|
||||
if node.has_key('ids') and node['ids']:
|
||||
if node.get('ids'):
|
||||
ctx += self.hypertarget(node['ids'][0])
|
||||
self.body.append('\\item[{')
|
||||
self.context.append(ctx)
|
||||
@ -833,20 +838,20 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
post = []
|
||||
include_graphics_options = []
|
||||
is_inline = self.is_inline(node)
|
||||
if attrs.has_key('scale'):
|
||||
if 'scale' in attrs:
|
||||
# Could also be done with ``scale`` option to
|
||||
# ``\includegraphics``; doing it this way for consistency.
|
||||
pre.append('\\scalebox{%f}{' % (attrs['scale'] / 100.0,))
|
||||
post.append('}')
|
||||
if attrs.has_key('width'):
|
||||
if 'width' in attrs:
|
||||
w = self.latex_image_length(attrs['width'])
|
||||
if w:
|
||||
include_graphics_options.append('width=%s' % w)
|
||||
if attrs.has_key('height'):
|
||||
if 'height' in attrs:
|
||||
h = self.latex_image_length(attrs['height'])
|
||||
if h:
|
||||
include_graphics_options.append('height=%s' % h)
|
||||
if attrs.has_key('align'):
|
||||
if 'align' in attrs:
|
||||
align_prepost = {
|
||||
# By default latex aligns the top of an image.
|
||||
(1, 'top'): ('', ''),
|
||||
@ -887,13 +892,17 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
pass
|
||||
|
||||
def visit_figure(self, node):
|
||||
if node.has_key('width') and node.get('align', '') in ('left', 'right'):
|
||||
ids = ''
|
||||
for id in self.next_figure_ids:
|
||||
ids += self.hypertarget(id, anchor=False)
|
||||
self.next_figure_ids.clear()
|
||||
if 'width' in node and node.get('align', '') in ('left', 'right'):
|
||||
self.body.append('\\begin{wrapfigure}{%s}{%s}\n\\centering' %
|
||||
(node['align'] == 'right' and 'r' or 'l',
|
||||
node['width']))
|
||||
self.context.append('\\end{wrapfigure}\n')
|
||||
self.context.append(ids + '\\end{wrapfigure}\n')
|
||||
else:
|
||||
if (not node.attributes.has_key('align') or
|
||||
if (not 'align' in node.attributes or
|
||||
node.attributes['align'] == 'center'):
|
||||
# centering does not add vertical space like center.
|
||||
align = '\n\\centering'
|
||||
@ -903,7 +912,7 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
align = '\\begin{flush%s}' % node.attributes['align']
|
||||
align_end = '\\end{flush%s}' % node.attributes['align']
|
||||
self.body.append('\\begin{figure}[htbp]%s\n' % align)
|
||||
self.context.append('%s\\end{figure}\n' % align_end)
|
||||
self.context.append(ids + align_end + '\\end{figure}\n')
|
||||
def depart_figure(self, node):
|
||||
self.body.append(self.context.pop())
|
||||
|
||||
@ -963,8 +972,11 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
def add_target(id):
|
||||
# indexing uses standard LaTeX index markup, so the targets
|
||||
# will be generated differently
|
||||
if not id.startswith('index-'):
|
||||
self.body.append(self.hypertarget(id))
|
||||
if id.startswith('index-'):
|
||||
return
|
||||
# do not generate \phantomsection in \section{}
|
||||
anchor = not self.in_title
|
||||
self.body.append(self.hypertarget(id, anchor=anchor))
|
||||
|
||||
# postpone the labels until after the sectioning command
|
||||
parindex = node.parent.index(node)
|
||||
@ -980,6 +992,20 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
self.next_section_ids.add(node['refid'])
|
||||
self.next_section_ids.update(node['ids'])
|
||||
return
|
||||
elif isinstance(next, nodes.figure):
|
||||
# labels for figures go in the figure body, not before
|
||||
if node.get('refid'):
|
||||
self.next_figure_ids.add(node['refid'])
|
||||
self.next_figure_ids.update(node['ids'])
|
||||
return
|
||||
elif isinstance(next, nodes.table):
|
||||
# same for tables, but only if they have a caption
|
||||
for n in node:
|
||||
if isinstance(n, nodes.title):
|
||||
if node.get('refid'):
|
||||
self.next_table_ids.add(node['refid'])
|
||||
self.next_table_ids.update(node['ids'])
|
||||
return
|
||||
except IndexError:
|
||||
pass
|
||||
if 'refuri' in node:
|
||||
@ -1048,9 +1074,9 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
id = self.curfilestack[-1] + ':' + uri[1:]
|
||||
self.body.append(self.hyperlink(id))
|
||||
if self.builder.config.latex_show_pagerefs:
|
||||
self.context.append('} (%s)' % self.hyperpageref(id))
|
||||
self.context.append('}} (%s)' % self.hyperpageref(id))
|
||||
else:
|
||||
self.context.append('}')
|
||||
self.context.append('}}')
|
||||
elif uri.startswith('%'):
|
||||
# references to documents or labels inside documents
|
||||
hashindex = uri.find('#')
|
||||
@ -1064,12 +1090,12 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
if len(node) and hasattr(node[0], 'attributes') and \
|
||||
'std-term' in node[0].get('classes', []):
|
||||
# don't add a pageref for glossary terms
|
||||
self.context.append('}')
|
||||
self.context.append('}}')
|
||||
else:
|
||||
if self.builder.config.latex_show_pagerefs:
|
||||
self.context.append('} (%s)' % self.hyperpageref(id))
|
||||
self.context.append('}} (%s)' % self.hyperpageref(id))
|
||||
else:
|
||||
self.context.append('}')
|
||||
self.context.append('}}')
|
||||
elif uri.startswith('@token'):
|
||||
if self.in_production_list:
|
||||
self.body.append('\\token{')
|
||||
@ -1151,7 +1177,7 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
self.no_contractions -= 1
|
||||
if self.in_title:
|
||||
self.body.append(r'\texttt{%s}' % content)
|
||||
elif node.has_key('role') and node['role'] == 'samp':
|
||||
elif node.get('role') == 'samp':
|
||||
self.body.append(r'\samp{%s}' % content)
|
||||
else:
|
||||
self.body.append(r'\code{%s}' % content)
|
||||
@ -1180,10 +1206,10 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
code = self.verbatim.rstrip('\n')
|
||||
lang = self.hlsettingstack[-1][0]
|
||||
linenos = code.count('\n') >= self.hlsettingstack[-1][1] - 1
|
||||
if node.has_key('language'):
|
||||
if 'language' in node:
|
||||
# code-block directives
|
||||
lang = node['language']
|
||||
if node.has_key('linenos'):
|
||||
if 'linenos' in node:
|
||||
linenos = node['linenos']
|
||||
hlcode = self.highlighter.highlight_block(code, lang, linenos)
|
||||
# workaround for Unicode issue
|
||||
|
@ -390,7 +390,7 @@ class TextTranslator(nodes.NodeVisitor):
|
||||
self.add_text(''.join(out) + '\n')
|
||||
|
||||
def writerow(row):
|
||||
lines = map(None, *row)
|
||||
lines = zip(*row)
|
||||
for line in lines:
|
||||
out = ['|']
|
||||
for i, cell in enumerate(line):
|
||||
|
@ -1425,6 +1425,10 @@ class XMLParser(object):
|
||||
err.position = value.lineno, value.offset
|
||||
raise err
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
def _fixtext(self, text):
|
||||
return text
|
||||
else:
|
||||
def _fixtext(self, text):
|
||||
# convert text string to ascii, if possible
|
||||
try:
|
||||
|
1067
tests/path.py
1067
tests/path.py
File diff suppressed because it is too large
Load Diff
@ -30,7 +30,7 @@ Special directives
|
||||
|
||||
.. testcode::
|
||||
|
||||
print 1+1
|
||||
print(1+1)
|
||||
|
||||
.. testoutput::
|
||||
|
||||
@ -50,30 +50,31 @@ Special directives
|
||||
|
||||
.. testsetup:: *
|
||||
|
||||
from math import floor
|
||||
def squared(x):
|
||||
return x * x
|
||||
|
||||
.. doctest::
|
||||
|
||||
>>> floor(1.2)
|
||||
1.0
|
||||
>>> squared(2)
|
||||
4
|
||||
|
||||
.. testcode::
|
||||
|
||||
print floor(1.2)
|
||||
print(squared(2))
|
||||
|
||||
.. testoutput::
|
||||
|
||||
1.0
|
||||
4
|
||||
|
||||
>>> floor(1.2)
|
||||
1.0
|
||||
>>> squared(2)
|
||||
4
|
||||
|
||||
* options for testcode/testoutput blocks
|
||||
|
||||
.. testcode::
|
||||
:hide:
|
||||
|
||||
print 'Output text.'
|
||||
print('Output text.')
|
||||
|
||||
.. testoutput::
|
||||
:hide:
|
||||
@ -85,36 +86,38 @@ Special directives
|
||||
|
||||
.. testsetup:: group1
|
||||
|
||||
from math import ceil
|
||||
def add(x, y):
|
||||
return x + y
|
||||
|
||||
``ceil`` is now known in "group1", but not in others.
|
||||
|
||||
``add`` is now known in "group1", but not in others.
|
||||
|
||||
.. doctest:: group1
|
||||
|
||||
>>> ceil(0.8)
|
||||
1.0
|
||||
>>> add(1, 1)
|
||||
2
|
||||
|
||||
.. doctest:: group2
|
||||
|
||||
>>> ceil(0.8)
|
||||
>>> add(1, 1)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
NameError: name 'ceil' is not defined
|
||||
NameError: name 'add' is not defined
|
||||
|
||||
Interleaving testcode/testoutput:
|
||||
|
||||
.. testcode:: group1
|
||||
|
||||
print ceil(0.8)
|
||||
print(squared(3))
|
||||
|
||||
.. testcode:: group2
|
||||
|
||||
print floor(0.8)
|
||||
print(squared(4))
|
||||
|
||||
.. testoutput:: group1
|
||||
|
||||
1.0
|
||||
9
|
||||
|
||||
.. testoutput:: group2
|
||||
|
||||
0.0
|
||||
16
|
||||
|
@ -1,7 +1,7 @@
|
||||
# Literally included file using Python highlighting
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
foo = u"Including Unicode characters: üöä"
|
||||
foo = "Including Unicode characters: üöä"
|
||||
|
||||
class Foo:
|
||||
pass
|
||||
|
@ -97,21 +97,23 @@ Inline markup
|
||||
|
||||
*Generic inline markup*
|
||||
|
||||
* :command:`command`
|
||||
* :dfn:`dfn`
|
||||
* :guilabel:`guilabel with &accelerator`
|
||||
* :kbd:`kbd`
|
||||
* :mailheader:`mailheader`
|
||||
* :makevar:`makevar`
|
||||
* :manpage:`manpage`
|
||||
* :mimetype:`mimetype`
|
||||
* :newsgroup:`newsgroup`
|
||||
* :program:`program`
|
||||
* :regexp:`regexp`
|
||||
* :menuselection:`File --> Close`
|
||||
Adding \n to test unescaping.
|
||||
|
||||
* :command:`command\\n`
|
||||
* :dfn:`dfn\\n`
|
||||
* :guilabel:`guilabel with &accelerator and \\n`
|
||||
* :kbd:`kbd\\n`
|
||||
* :mailheader:`mailheader\\n`
|
||||
* :makevar:`makevar\\n`
|
||||
* :manpage:`manpage\\n`
|
||||
* :mimetype:`mimetype\\n`
|
||||
* :newsgroup:`newsgroup\\n`
|
||||
* :program:`program\\n`
|
||||
* :regexp:`regexp\\n`
|
||||
* :menuselection:`File --> Close\\n`
|
||||
* :menuselection:`&File --> &Print`
|
||||
* :file:`a/{varpart}/b`
|
||||
* :samp:`print {i}`
|
||||
* :file:`a/{varpart}/b\\n`
|
||||
* :samp:`print {i}\\n`
|
||||
|
||||
*Linking inline markup*
|
||||
|
||||
|
@ -41,8 +41,17 @@ Testing object descriptions
|
||||
|
||||
.. function:: func_without_module2() -> annotation
|
||||
|
||||
.. object:: long(parameter, \
|
||||
list)
|
||||
another one
|
||||
|
||||
.. class:: TimeInt
|
||||
|
||||
:param moo: |test|
|
||||
:type moo: |test|
|
||||
|
||||
.. |test| replace:: Moo
|
||||
|
||||
.. class:: Time(hour, minute, isdst)
|
||||
|
||||
:param hour: The year.
|
||||
@ -57,6 +66,8 @@ Testing object descriptions
|
||||
:ivar int hour: like *hour*
|
||||
:ivar minute: like *minute*
|
||||
:vartype minute: int
|
||||
:param hour: Duplicate param. Should not lead to crashes.
|
||||
:type hour: Duplicate type.
|
||||
|
||||
|
||||
C items
|
||||
|
16
tests/run.py
16
tests/run.py
@ -11,7 +11,17 @@
|
||||
"""
|
||||
|
||||
import sys
|
||||
from os import path
|
||||
from os import path, chdir, listdir
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
print('Copying and converting sources to build/lib/tests...')
|
||||
from distutils.util import copydir_run_2to3
|
||||
testroot = path.dirname(__file__) or '.'
|
||||
newroot = path.join(testroot, path.pardir, 'build')
|
||||
newroot = path.join(newroot, listdir(newroot)[0], 'tests')
|
||||
copydir_run_2to3(testroot, newroot)
|
||||
# switch to the converted dir so nose tests the right tests
|
||||
chdir(newroot)
|
||||
|
||||
# always test the sphinx package from this directory
|
||||
sys.path.insert(0, path.join(path.dirname(__file__), path.pardir))
|
||||
@ -19,8 +29,8 @@ sys.path.insert(0, path.join(path.dirname(__file__), path.pardir))
|
||||
try:
|
||||
import nose
|
||||
except ImportError:
|
||||
print "The nose package is needed to run the Sphinx test suite."
|
||||
print("The nose package is needed to run the Sphinx test suite.")
|
||||
sys.exit(1)
|
||||
|
||||
print "Running Sphinx test suite..."
|
||||
print("Running Sphinx test suite...")
|
||||
nose.main()
|
||||
|
@ -45,9 +45,11 @@ def test_output():
|
||||
app = TestApp(status=status, warning=warnings)
|
||||
try:
|
||||
status.truncate(0) # __init__ writes to status
|
||||
status.seek(0)
|
||||
app.info("Nothing here...")
|
||||
assert status.getvalue() == "Nothing here...\n"
|
||||
status.truncate(0)
|
||||
status.seek(0)
|
||||
app.info("Nothing here...", True)
|
||||
assert status.getvalue() == "Nothing here..."
|
||||
|
||||
|
@ -9,8 +9,6 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import string
|
||||
|
||||
from util import *
|
||||
|
||||
from sphinx.ext.autosummary import mangle_signature
|
||||
@ -27,7 +25,7 @@ def test_mangle_signature():
|
||||
(a, b, c='foobar()', d=123) :: (a, b[, c, d])
|
||||
"""
|
||||
|
||||
TEST = [map(string.strip, x.split("::")) for x in TEST.split("\n")
|
||||
TEST = [map(lambda x: x.strip(), x.split("::")) for x in TEST.split("\n")
|
||||
if '::' in x]
|
||||
for inp, outp in TEST:
|
||||
res = mangle_signature(inp).strip().replace(u"\u00a0", " ")
|
||||
|
@ -21,6 +21,10 @@ def teardown_module():
|
||||
def test_pickle(app):
|
||||
app.builder.build_all()
|
||||
|
||||
@with_app(buildername='json')
|
||||
def test_json(app):
|
||||
app.builder.build_all()
|
||||
|
||||
@with_app(buildername='linkcheck')
|
||||
def test_linkcheck(app):
|
||||
app.builder.build_all()
|
||||
|
@ -75,7 +75,7 @@ def test_all(app):
|
||||
confoverrides={'language': 'xx', 'locale_dirs': ['.']})
|
||||
def test_patch(app):
|
||||
app.builder.build(['bom'])
|
||||
result = (app.outdir / 'bom.txt').text('utf-8')
|
||||
result = (app.outdir / 'bom.txt').text(encoding='utf-8')
|
||||
expect = (u"\nDatei mit UTF-8"
|
||||
u"\n***************\n" # underline matches new translation
|
||||
u"\nThis file has umlauts: äöü.\n")
|
||||
|
@ -11,8 +11,8 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import difflib
|
||||
import htmlentitydefs
|
||||
import sys
|
||||
from StringIO import StringIO
|
||||
|
||||
try:
|
||||
@ -37,8 +37,11 @@ ENV_WARNINGS = """\
|
||||
http://www.python.org/logo.png
|
||||
%(root)s/includes.txt:\\d*: \\(WARNING/2\\) Encoding 'utf-8-sig' used for \
|
||||
reading included file u'wrongenc.inc' seems to be wrong, try giving an \
|
||||
:encoding: option
|
||||
:encoding: option\\n?
|
||||
%(root)s/includes.txt:4: WARNING: download file not readable: nonexisting.png
|
||||
%(root)s/objects.txt:\\d*: WARNING: using old C markup; please migrate to \
|
||||
new-style markup \(e.g. c:function instead of cfunction\), see \
|
||||
http://sphinx.pocoo.org/domains.html
|
||||
"""
|
||||
|
||||
HTML_WARNINGS = ENV_WARNINGS + """\
|
||||
@ -48,183 +51,204 @@ HTML_WARNINGS = ENV_WARNINGS + """\
|
||||
%(root)s/markup.txt:: WARNING: invalid pair index entry u'keyword; '
|
||||
"""
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
ENV_WARNINGS = remove_unicode_literals(ENV_WARNINGS)
|
||||
HTML_WARNINGS = remove_unicode_literals(HTML_WARNINGS)
|
||||
|
||||
|
||||
def tail_check(check):
|
||||
rex = re.compile(check)
|
||||
def checker(nodes):
|
||||
for node in nodes:
|
||||
if node.tail and rex.search(node.tail):
|
||||
return True
|
||||
assert False, '%r not found in tail of any nodes %s' % (check, nodes)
|
||||
return checker
|
||||
|
||||
|
||||
HTML_XPATH = {
|
||||
'images.html': {
|
||||
".//img[@src='_images/img.png']": '',
|
||||
".//img[@src='_images/img1.png']": '',
|
||||
".//img[@src='_images/simg.png']": '',
|
||||
".//object[@data='_images/svgimg.svg']": '',
|
||||
".//embed[@src='_images/svgimg.svg']": '',
|
||||
},
|
||||
'subdir/images.html': {
|
||||
".//img[@src='../_images/img1.png']": '',
|
||||
".//img[@src='../_images/rimg.png']": '',
|
||||
},
|
||||
'subdir/includes.html': {
|
||||
".//a[@href='../_downloads/img.png']": '',
|
||||
".//img[@src='../_images/img.png']": '',
|
||||
".//p": 'This is an include file.',
|
||||
},
|
||||
'includes.html': {
|
||||
".//pre": u'Max Strauß',
|
||||
".//a[@href='_downloads/img.png']": '',
|
||||
".//a[@href='_downloads/img1.png']": '',
|
||||
".//pre": u'"quotes"',
|
||||
".//pre": u"'included'",
|
||||
},
|
||||
'autodoc.html': {
|
||||
".//dt[@id='test_autodoc.Class']": '',
|
||||
".//dt[@id='test_autodoc.function']/em": r'\*\*kwds',
|
||||
".//dd/p": r'Return spam\.',
|
||||
},
|
||||
'extapi.html': {
|
||||
".//strong": 'from function: Foo',
|
||||
".//strong": 'from class: Bar',
|
||||
},
|
||||
'markup.html': {
|
||||
".//title": 'set by title directive',
|
||||
".//p/em": 'Section author: Georg Brandl',
|
||||
".//p/em": 'Module author: Georg Brandl',
|
||||
'images.html': [
|
||||
(".//img[@src='_images/img.png']", ''),
|
||||
(".//img[@src='_images/img1.png']", ''),
|
||||
(".//img[@src='_images/simg.png']", ''),
|
||||
(".//object[@data='_images/svgimg.svg']", ''),
|
||||
(".//embed[@src='_images/svgimg.svg']", ''),
|
||||
],
|
||||
'subdir/images.html': [
|
||||
(".//img[@src='../_images/img1.png']", ''),
|
||||
(".//img[@src='../_images/rimg.png']", ''),
|
||||
],
|
||||
'subdir/includes.html': [
|
||||
(".//a[@href='../_downloads/img.png']", ''),
|
||||
(".//img[@src='../_images/img.png']", ''),
|
||||
(".//p", 'This is an include file.'),
|
||||
],
|
||||
'includes.html': [
|
||||
(".//pre", u'Max Strauß'),
|
||||
(".//a[@href='_downloads/img.png']", ''),
|
||||
(".//a[@href='_downloads/img1.png']", ''),
|
||||
(".//pre", u'"quotes"'),
|
||||
(".//pre", u"'included'"),
|
||||
],
|
||||
'autodoc.html': [
|
||||
(".//dt[@id='test_autodoc.Class']", ''),
|
||||
(".//dt[@id='test_autodoc.function']/em", r'\*\*kwds'),
|
||||
(".//dd/p", r'Return spam\.'),
|
||||
],
|
||||
'extapi.html': [
|
||||
(".//strong", 'from function: Foo'),
|
||||
(".//strong", 'from class: Bar'),
|
||||
],
|
||||
'markup.html': [
|
||||
(".//title", 'set by title directive'),
|
||||
(".//p/em", 'Section author: Georg Brandl'),
|
||||
(".//p/em", 'Module author: Georg Brandl'),
|
||||
# created by the meta directive
|
||||
".//meta[@name='author'][@content='Me']": '',
|
||||
".//meta[@name='keywords'][@content='docs, sphinx']": '',
|
||||
(".//meta[@name='author'][@content='Me']", ''),
|
||||
(".//meta[@name='keywords'][@content='docs, sphinx']", ''),
|
||||
# a label created by ``.. _label:``
|
||||
".//div[@id='label']": '',
|
||||
(".//div[@id='label']", ''),
|
||||
# code with standard code blocks
|
||||
".//pre": '^some code$',
|
||||
(".//pre", '^some code$'),
|
||||
# an option list
|
||||
".//span[@class='option']": '--help',
|
||||
(".//span[@class='option']", '--help'),
|
||||
# admonitions
|
||||
".//p[@class='first admonition-title']": 'My Admonition',
|
||||
".//p[@class='last']": 'Note text.',
|
||||
".//p[@class='last']": 'Warning text.',
|
||||
(".//p[@class='first admonition-title']", 'My Admonition'),
|
||||
(".//p[@class='last']", 'Note text.'),
|
||||
(".//p[@class='last']", 'Warning text.'),
|
||||
# inline markup
|
||||
".//li/strong": '^command$',
|
||||
".//li/strong": '^program$',
|
||||
".//li/em": '^dfn$',
|
||||
".//li/tt/span[@class='pre']": '^kbd$',
|
||||
".//li/em": u'File \N{TRIANGULAR BULLET} Close',
|
||||
".//li/tt/span[@class='pre']": '^a/$',
|
||||
".//li/tt/em/span[@class='pre']": '^varpart$',
|
||||
".//li/tt/em/span[@class='pre']": '^i$',
|
||||
".//a[@href='http://www.python.org/dev/peps/pep-0008']"
|
||||
"[@class='pep reference external']/strong": 'PEP 8',
|
||||
".//a[@href='http://tools.ietf.org/html/rfc1.html']"
|
||||
"[@class='rfc reference external']/strong": 'RFC 1',
|
||||
".//a[@href='objects.html#envvar-HOME']"
|
||||
"[@class='reference internal']/tt/span[@class='pre']": 'HOME',
|
||||
".//a[@href='#with']"
|
||||
"[@class='reference internal']/tt/span[@class='pre']": '^with$',
|
||||
".//a[@href='#grammar-token-try_stmt']"
|
||||
"[@class='reference internal']/tt/span": '^statement$',
|
||||
".//a[@href='subdir/includes.html']"
|
||||
"[@class='reference internal']/em": 'Including in subdir',
|
||||
".//a[@href='objects.html#cmdoption-python-c']"
|
||||
"[@class='reference internal']/em": 'Python -c option',
|
||||
(".//li/strong", r'^command\\n$'),
|
||||
(".//li/strong", r'^program\\n$'),
|
||||
(".//li/em", r'^dfn\\n$'),
|
||||
(".//li/tt/span[@class='pre']", r'^kbd\\n$'),
|
||||
(".//li/em", u'File \N{TRIANGULAR BULLET} Close'),
|
||||
(".//li/tt/span[@class='pre']", '^a/$'),
|
||||
(".//li/tt/em/span[@class='pre']", '^varpart$'),
|
||||
(".//li/tt/em/span[@class='pre']", '^i$'),
|
||||
(".//a[@href='http://www.python.org/dev/peps/pep-0008']"
|
||||
"[@class='pep reference external']/strong", 'PEP 8'),
|
||||
(".//a[@href='http://tools.ietf.org/html/rfc1.html']"
|
||||
"[@class='rfc reference external']/strong", 'RFC 1'),
|
||||
(".//a[@href='objects.html#envvar-HOME']"
|
||||
"[@class='reference internal']/tt/span[@class='pre']", 'HOME'),
|
||||
(".//a[@href='#with']"
|
||||
"[@class='reference internal']/tt/span[@class='pre']", '^with$'),
|
||||
(".//a[@href='#grammar-token-try_stmt']"
|
||||
"[@class='reference internal']/tt/span", '^statement$'),
|
||||
(".//a[@href='subdir/includes.html']"
|
||||
"[@class='reference internal']/em", 'Including in subdir'),
|
||||
(".//a[@href='objects.html#cmdoption-python-c']"
|
||||
"[@class='reference internal']/em", 'Python -c option'),
|
||||
# abbreviations
|
||||
".//abbr[@title='abbreviation']": '^abbr$',
|
||||
(".//abbr[@title='abbreviation']", '^abbr$'),
|
||||
# version stuff
|
||||
".//span[@class='versionmodified']": 'New in version 0.6',
|
||||
(".//span[@class='versionmodified']", 'New in version 0.6'),
|
||||
# footnote reference
|
||||
".//a[@class='footnote-reference']": r'\[1\]',
|
||||
(".//a[@class='footnote-reference']", r'\[1\]'),
|
||||
# created by reference lookup
|
||||
".//a[@href='contents.html#ref1']": '',
|
||||
(".//a[@href='contents.html#ref1']", ''),
|
||||
# ``seealso`` directive
|
||||
".//div/p[@class='first admonition-title']": 'See also',
|
||||
(".//div/p[@class='first admonition-title']", 'See also'),
|
||||
# a ``hlist`` directive
|
||||
".//table[@class='hlist']/tr/td/ul/li": '^This$',
|
||||
(".//table[@class='hlist']/tr/td/ul/li", '^This$'),
|
||||
# a ``centered`` directive
|
||||
".//p[@class='centered']/strong": 'LICENSE',
|
||||
(".//p[@class='centered']/strong", 'LICENSE'),
|
||||
# a glossary
|
||||
".//dl/dt[@id='term-boson']": 'boson',
|
||||
(".//dl/dt[@id='term-boson']", 'boson'),
|
||||
# a production list
|
||||
".//pre/strong": 'try_stmt',
|
||||
".//pre/a[@href='#grammar-token-try1_stmt']/tt/span": 'try1_stmt',
|
||||
(".//pre/strong", 'try_stmt'),
|
||||
(".//pre/a[@href='#grammar-token-try1_stmt']/tt/span", 'try1_stmt'),
|
||||
# tests for ``only`` directive
|
||||
".//p": 'A global substitution.',
|
||||
".//p": 'In HTML.',
|
||||
".//p": 'In both.',
|
||||
".//p": 'Always present',
|
||||
},
|
||||
'objects.html': {
|
||||
".//dt[@id='mod.Cls.meth1']": '',
|
||||
".//dt[@id='errmod.Error']": '',
|
||||
".//a[@href='#mod.Cls'][@class='reference internal']": '',
|
||||
".//dl[@class='userdesc']": '',
|
||||
".//dt[@id='userdesc-myobj']": '',
|
||||
".//a[@href='#userdesc-myobj']": '',
|
||||
(".//p", 'A global substitution.'),
|
||||
(".//p", 'In HTML.'),
|
||||
(".//p", 'In both.'),
|
||||
(".//p", 'Always present'),
|
||||
],
|
||||
'objects.html': [
|
||||
(".//dt[@id='mod.Cls.meth1']", ''),
|
||||
(".//dt[@id='errmod.Error']", ''),
|
||||
(".//dt/tt", r'long\(parameter,\s* list\)'),
|
||||
(".//dt/tt", 'another one'),
|
||||
(".//a[@href='#mod.Cls'][@class='reference internal']", ''),
|
||||
(".//dl[@class='userdesc']", ''),
|
||||
(".//dt[@id='userdesc-myobj']", ''),
|
||||
(".//a[@href='#userdesc-myobj'][@class='reference internal']", ''),
|
||||
# C references
|
||||
".//span[@class='pre']": 'CFunction()',
|
||||
".//a[@href='#Sphinx_DoSomething']": '',
|
||||
".//a[@href='#SphinxStruct.member']": '',
|
||||
".//a[@href='#SPHINX_USE_PYTHON']": '',
|
||||
".//a[@href='#SphinxType']": '',
|
||||
".//a[@href='#sphinx_global']": '',
|
||||
(".//span[@class='pre']", 'CFunction()'),
|
||||
(".//a[@href='#Sphinx_DoSomething']", ''),
|
||||
(".//a[@href='#SphinxStruct.member']", ''),
|
||||
(".//a[@href='#SPHINX_USE_PYTHON']", ''),
|
||||
(".//a[@href='#SphinxType']", ''),
|
||||
(".//a[@href='#sphinx_global']", ''),
|
||||
# reference from old C markup extension
|
||||
".//a[@href='#Sphinx_Func']": '',
|
||||
(".//a[@href='#Sphinx_Func']", ''),
|
||||
# test global TOC created by toctree()
|
||||
".//ul[@class='current']/li[@class='toctree-l1 current']/a[@href='']":
|
||||
'Testing object descriptions',
|
||||
".//li[@class='toctree-l1']/a[@href='markup.html']":
|
||||
'Testing various markup',
|
||||
(".//ul[@class='current']/li[@class='toctree-l1 current']/a[@href='']",
|
||||
'Testing object descriptions'),
|
||||
(".//li[@class='toctree-l1']/a[@href='markup.html']",
|
||||
'Testing various markup'),
|
||||
# custom sidebar
|
||||
".//h4": 'Custom sidebar',
|
||||
},
|
||||
'contents.html': {
|
||||
".//meta[@name='hc'][@content='hcval']": '',
|
||||
".//meta[@name='hc_co'][@content='hcval_co']": '',
|
||||
".//meta[@name='testopt'][@content='testoverride']": '',
|
||||
".//td[@class='label']": r'\[Ref1\]',
|
||||
".//td[@class='label']": '',
|
||||
".//li[@class='toctree-l1']/a": 'Testing various markup',
|
||||
".//li[@class='toctree-l2']/a": 'Inline markup',
|
||||
".//title": 'Sphinx <Tests>',
|
||||
".//div[@class='footer']": 'Georg Brandl & Team',
|
||||
".//a[@href='http://python.org/']"
|
||||
"[@class='reference external']": '',
|
||||
".//li/a[@href='genindex.html']/em": 'Index',
|
||||
".//li/a[@href='py-modindex.html']/em": 'Module Index',
|
||||
".//li/a[@href='search.html']/em": 'Search Page',
|
||||
(".//h4", 'Custom sidebar'),
|
||||
# docfields
|
||||
(".//td[@class='field-body']/ul/li/strong", '^moo$'),
|
||||
(".//td[@class='field-body']/ul/li/strong",
|
||||
tail_check(r'\(Moo\) .* Moo')),
|
||||
],
|
||||
'contents.html': [
|
||||
(".//meta[@name='hc'][@content='hcval']", ''),
|
||||
(".//meta[@name='hc_co'][@content='hcval_co']", ''),
|
||||
(".//meta[@name='testopt'][@content='testoverride']", ''),
|
||||
(".//td[@class='label']", r'\[Ref1\]'),
|
||||
(".//td[@class='label']", ''),
|
||||
(".//li[@class='toctree-l1']/a", 'Testing various markup'),
|
||||
(".//li[@class='toctree-l2']/a", 'Inline markup'),
|
||||
(".//title", 'Sphinx <Tests>'),
|
||||
(".//div[@class='footer']", 'Georg Brandl & Team'),
|
||||
(".//a[@href='http://python.org/']"
|
||||
"[@class='reference external']", ''),
|
||||
(".//li/a[@href='genindex.html']/em", 'Index'),
|
||||
(".//li/a[@href='py-modindex.html']/em", 'Module Index'),
|
||||
(".//li/a[@href='search.html']/em", 'Search Page'),
|
||||
# custom sidebar only for contents
|
||||
".//h4": 'Contents sidebar',
|
||||
},
|
||||
'bom.html': {
|
||||
".//title": " File with UTF-8 BOM",
|
||||
},
|
||||
'extensions.html': {
|
||||
".//a[@href='http://python.org/dev/']": "http://python.org/dev/",
|
||||
".//a[@href='http://bugs.python.org/issue1000']": "issue 1000",
|
||||
".//a[@href='http://bugs.python.org/issue1042']": "explicit caption",
|
||||
},
|
||||
'_static/statictmpl.html': {
|
||||
".//project": 'Sphinx <Tests>',
|
||||
},
|
||||
(".//h4", 'Contents sidebar'),
|
||||
],
|
||||
'bom.html': [
|
||||
(".//title", " File with UTF-8 BOM"),
|
||||
],
|
||||
'extensions.html': [
|
||||
(".//a[@href='http://python.org/dev/']", "http://python.org/dev/"),
|
||||
(".//a[@href='http://bugs.python.org/issue1000']", "issue 1000"),
|
||||
(".//a[@href='http://bugs.python.org/issue1042']", "explicit caption"),
|
||||
],
|
||||
'_static/statictmpl.html': [
|
||||
(".//project", 'Sphinx <Tests>'),
|
||||
],
|
||||
}
|
||||
|
||||
if pygments:
|
||||
HTML_XPATH['includes.html'].update({
|
||||
".//pre/span[@class='s']": u'üöä',
|
||||
".//div[@class='inc-pyobj1 highlight-text']//pre":
|
||||
r'^class Foo:\n pass\n\s*$',
|
||||
".//div[@class='inc-pyobj2 highlight-text']//pre":
|
||||
r'^ def baz\(\):\n pass\n\s*$',
|
||||
".//div[@class='inc-lines highlight-text']//pre":
|
||||
r'^class Foo:\n pass\nclass Bar:\n$',
|
||||
".//div[@class='inc-startend highlight-text']//pre":
|
||||
ur'^foo = u"Including Unicode characters: üöä"\n$',
|
||||
".//div[@class='inc-preappend highlight-text']//pre":
|
||||
r'(?m)^START CODE$',
|
||||
".//div[@class='inc-pyobj-dedent highlight-python']//span":
|
||||
r'def',
|
||||
".//div[@class='inc-tab3 highlight-text']//pre":
|
||||
r'-| |-',
|
||||
".//div[@class='inc-tab8 highlight-python']//pre":
|
||||
r'-| |-',
|
||||
})
|
||||
HTML_XPATH['subdir/includes.html'].update({
|
||||
".//pre/span": 'line 1',
|
||||
".//pre/span": 'line 2',
|
||||
})
|
||||
HTML_XPATH['includes.html'].extend([
|
||||
(".//pre/span[@class='s']", u'üöä'),
|
||||
(".//div[@class='inc-pyobj1 highlight-text']//pre",
|
||||
r'^class Foo:\n pass\n\s*$'),
|
||||
(".//div[@class='inc-pyobj2 highlight-text']//pre",
|
||||
r'^ def baz\(\):\n pass\n\s*$'),
|
||||
(".//div[@class='inc-lines highlight-text']//pre",
|
||||
r'^class Foo:\n pass\nclass Bar:\n$'),
|
||||
(".//div[@class='inc-startend highlight-text']//pre",
|
||||
ur'^foo = "Including Unicode characters: üöä"\n$'),
|
||||
(".//div[@class='inc-preappend highlight-text']//pre",
|
||||
r'(?m)^START CODE$'),
|
||||
(".//div[@class='inc-pyobj-dedent highlight-python']//span",
|
||||
r'def'),
|
||||
(".//div[@class='inc-tab3 highlight-text']//pre",
|
||||
r'-| |-'),
|
||||
(".//div[@class='inc-tab8 highlight-python']//pre",
|
||||
r'-| |-'),
|
||||
])
|
||||
HTML_XPATH['subdir/includes.html'].extend([
|
||||
(".//pre/span", 'line 1'),
|
||||
(".//pre/span", 'line 2'),
|
||||
])
|
||||
|
||||
class NslessParser(ET.XMLParser):
|
||||
"""XMLParser that throws away namespaces in tag names."""
|
||||
@ -282,14 +306,14 @@ def test_html(app):
|
||||
html_warnings_exp = HTML_WARNINGS % {'root': re.escape(app.srcdir)}
|
||||
assert re.match(html_warnings_exp + '$', html_warnings), \
|
||||
'Warnings don\'t match:\n' + \
|
||||
'\n'.join(difflib.ndiff(html_warnings_exp.splitlines(),
|
||||
html_warnings.splitlines()))
|
||||
'--- Expected (regex):\n' + html_warnings_exp + \
|
||||
'--- Got:\n' + html_warnings
|
||||
|
||||
for fname, paths in HTML_XPATH.iteritems():
|
||||
parser = NslessParser()
|
||||
parser.entity.update(htmlentitydefs.entitydefs)
|
||||
etree = ET.parse(os.path.join(app.outdir, fname), parser)
|
||||
for path, check in paths.iteritems():
|
||||
for path, check in paths:
|
||||
yield check_xpath, etree, fname, path, check
|
||||
|
||||
check_static_entries(app.builder.outdir)
|
||||
|
@ -12,7 +12,6 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import difflib
|
||||
from StringIO import StringIO
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
@ -33,6 +32,9 @@ None:None: WARNING: no matching candidate for image URI u'foo.\\*'
|
||||
WARNING: invalid pair index entry u''
|
||||
"""
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
LATEX_WARNINGS = remove_unicode_literals(LATEX_WARNINGS)
|
||||
|
||||
|
||||
@with_app(buildername='latex', warning=latex_warnfile, cleanenv=True)
|
||||
def test_latex(app):
|
||||
@ -42,8 +44,9 @@ def test_latex(app):
|
||||
latex_warnings_exp = LATEX_WARNINGS % {'root': app.srcdir}
|
||||
assert re.match(latex_warnings_exp + '$', latex_warnings), \
|
||||
'Warnings don\'t match:\n' + \
|
||||
'\n'.join(difflib.ndiff(latex_warnings_exp.splitlines(),
|
||||
latex_warnings.splitlines()))
|
||||
'--- Expected (regex):\n' + latex_warnings_exp + \
|
||||
'--- Got:\n' + latex_warnings
|
||||
|
||||
# file from latex_additional_files
|
||||
assert (app.outdir / 'svgimg.svg').isfile()
|
||||
|
||||
|
@ -9,6 +9,7 @@
|
||||
:copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
import sys
|
||||
|
||||
from util import *
|
||||
|
||||
@ -84,11 +85,23 @@ def test_extension_values(app):
|
||||
@with_tempdir
|
||||
def test_errors_warnings(dir):
|
||||
# test the error for syntax errors in the config file
|
||||
write_file(dir / 'conf.py', 'project = \n')
|
||||
write_file(dir / 'conf.py', u'project = \n', 'ascii')
|
||||
raises_msg(ConfigError, 'conf.py', Config, dir, 'conf.py', {}, None)
|
||||
|
||||
# test the automatic conversion of 2.x only code in configs
|
||||
write_file(dir / 'conf.py', u'# -*- coding: utf-8\n\n'
|
||||
u'project = u"Jägermeister"\n', 'utf-8')
|
||||
cfg = Config(dir, 'conf.py', {}, None)
|
||||
cfg.init_values()
|
||||
assert cfg.project == u'Jägermeister'
|
||||
|
||||
# test the warning for bytestrings with non-ascii content
|
||||
write_file(dir / 'conf.py', '# -*- coding: latin-1\nproject = "foo\xe4"\n')
|
||||
# bytestrings with non-ascii content are a syntax error in python3 so we
|
||||
# skip the test there
|
||||
if sys.version_info >= (3, 0):
|
||||
return
|
||||
write_file(dir / 'conf.py', u'# -*- coding: latin-1\nproject = "fooä"\n',
|
||||
'latin-1')
|
||||
cfg = Config(dir, 'conf.py', {}, None)
|
||||
warned = [False]
|
||||
def warn(msg):
|
||||
|
@ -33,7 +33,7 @@ def test_build(app):
|
||||
assert 'api.h' in c_undoc
|
||||
assert ' * Py_SphinxTest' in c_undoc
|
||||
|
||||
undoc_py, undoc_c = pickle.loads((app.outdir / 'undoc.pickle').text())
|
||||
undoc_py, undoc_c = pickle.loads((app.outdir / 'undoc.pickle').bytes())
|
||||
assert len(undoc_c) == 1
|
||||
# the key is the full path to the header file, which isn't testable
|
||||
assert undoc_c.values()[0] == [('function', 'Py_SphinxTest')]
|
||||
|
@ -8,6 +8,7 @@
|
||||
:copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
import sys
|
||||
|
||||
from util import *
|
||||
|
||||
@ -54,8 +55,10 @@ def test_images():
|
||||
app._warning.reset()
|
||||
htmlbuilder = StandaloneHTMLBuilder(app)
|
||||
htmlbuilder.post_process_images(tree)
|
||||
assert "no matching candidate for image URI u'foo.*'" in \
|
||||
app._warning.content[-1]
|
||||
image_uri_message = "no matching candidate for image URI u'foo.*'"
|
||||
if sys.version_info >= (3, 0):
|
||||
image_uri_message = remove_unicode_literals(image_uri_message)
|
||||
assert image_uri_message in app._warning.content[-1]
|
||||
assert set(htmlbuilder.images.keys()) == \
|
||||
set(['subdir/img.png', 'img.png', 'subdir/simg.png', 'svgimg.svg'])
|
||||
assert set(htmlbuilder.images.values()) == \
|
||||
@ -64,8 +67,7 @@ def test_images():
|
||||
app._warning.reset()
|
||||
latexbuilder = LaTeXBuilder(app)
|
||||
latexbuilder.post_process_images(tree)
|
||||
assert "no matching candidate for image URI u'foo.*'" in \
|
||||
app._warning.content[-1]
|
||||
assert image_uri_message in app._warning.content[-1]
|
||||
assert set(latexbuilder.images.keys()) == \
|
||||
set(['subdir/img.png', 'subdir/simg.png', 'img.png', 'img.pdf',
|
||||
'svgimg.pdf'])
|
||||
|
@ -11,7 +11,10 @@
|
||||
|
||||
import zlib
|
||||
import posixpath
|
||||
from cStringIO import StringIO
|
||||
try:
|
||||
from io import BytesIO
|
||||
except ImportError:
|
||||
from cStringIO import StringIO as BytesIO
|
||||
|
||||
from docutils import nodes
|
||||
|
||||
@ -28,23 +31,23 @@ inventory_v1 = '''\
|
||||
# Version: 1.0
|
||||
module mod foo.html
|
||||
module.cls class foo.html
|
||||
'''
|
||||
'''.encode('utf-8')
|
||||
|
||||
inventory_v2 = '''\
|
||||
# Sphinx inventory version 2
|
||||
# Project: foo
|
||||
# Version: 2.0
|
||||
# The remainder of this file is compressed with zlib.
|
||||
''' + zlib.compress('''\
|
||||
'''.encode('utf-8') + zlib.compress('''\
|
||||
module1 py:module 0 foo.html#module-module1 Long Module desc
|
||||
module2 py:module 0 foo.html#module-$ -
|
||||
module1.func py:function 1 sub/foo.html#$ -
|
||||
CFunc c:function 2 cfunc.html#CFunc -
|
||||
''')
|
||||
'''.encode('utf-8'))
|
||||
|
||||
|
||||
def test_read_inventory_v1():
|
||||
f = StringIO(inventory_v1)
|
||||
f = BytesIO(inventory_v1)
|
||||
f.readline()
|
||||
invdata = read_inventory_v1(f, '/util', posixpath.join)
|
||||
assert invdata['py:module']['module'] == \
|
||||
@ -54,12 +57,12 @@ def test_read_inventory_v1():
|
||||
|
||||
|
||||
def test_read_inventory_v2():
|
||||
f = StringIO(inventory_v2)
|
||||
f = BytesIO(inventory_v2)
|
||||
f.readline()
|
||||
invdata1 = read_inventory_v2(f, '/util', posixpath.join)
|
||||
|
||||
# try again with a small buffer size to test the chunking algorithm
|
||||
f = StringIO(inventory_v2)
|
||||
f = BytesIO(inventory_v2)
|
||||
f.readline()
|
||||
invdata2 = read_inventory_v2(f, '/util', posixpath.join, bufsize=5)
|
||||
|
||||
@ -80,7 +83,10 @@ def test_read_inventory_v2():
|
||||
def test_missing_reference(tempdir, app):
|
||||
inv_file = tempdir / 'inventory'
|
||||
write_file(inv_file, inventory_v2)
|
||||
app.config.intersphinx_mapping = {'http://docs.python.org/': inv_file}
|
||||
app.config.intersphinx_mapping = {
|
||||
'http://docs.python.org/': inv_file,
|
||||
'py3k': ('http://docs.python.org/py3k/', inv_file),
|
||||
}
|
||||
app.config.intersphinx_cache_limit = 0
|
||||
|
||||
# load the inventory and check if it's done correctly
|
||||
@ -91,22 +97,58 @@ def test_missing_reference(tempdir, app):
|
||||
('foo', '2.0', 'http://docs.python.org/foo.html#module-module2', '-')
|
||||
|
||||
# create fake nodes and check referencing
|
||||
contnode = nodes.emphasis('foo')
|
||||
refnode = addnodes.pending_xref('')
|
||||
refnode['reftarget'] = 'module1.func'
|
||||
refnode['reftype'] = 'func'
|
||||
refnode['refdomain'] = 'py'
|
||||
|
||||
rn = missing_reference(app, app.env, refnode, contnode)
|
||||
def fake_node(domain, type, target, content, **attrs):
|
||||
contnode = nodes.emphasis(content, content)
|
||||
node = addnodes.pending_xref('')
|
||||
node['reftarget'] = target
|
||||
node['reftype'] = type
|
||||
node['refdomain'] = domain
|
||||
node.attributes.update(attrs)
|
||||
node += contnode
|
||||
return node, contnode
|
||||
|
||||
def reference_check(*args, **kwds):
|
||||
node, contnode = fake_node(*args, **kwds)
|
||||
return missing_reference(app, app.env, node, contnode)
|
||||
|
||||
# check resolution when a target is found
|
||||
rn = reference_check('py', 'func', 'module1.func', 'foo')
|
||||
assert isinstance(rn, nodes.reference)
|
||||
assert rn['refuri'] == 'http://docs.python.org/sub/foo.html#module1.func'
|
||||
assert rn['reftitle'] == '(in foo v2.0)'
|
||||
assert rn[0] is contnode
|
||||
assert rn[0].astext() == 'foo'
|
||||
|
||||
# create unresolvable nodes and check None return value
|
||||
refnode['reftype'] = 'foo'
|
||||
assert missing_reference(app, app.env, refnode, contnode) is None
|
||||
assert reference_check('py', 'foo', 'module1.func', 'foo') is None
|
||||
assert reference_check('py', 'func', 'foo', 'foo') is None
|
||||
assert reference_check('py', 'func', 'foo', 'foo') is None
|
||||
|
||||
refnode['reftype'] = 'function'
|
||||
refnode['reftarget'] = 'foo.func'
|
||||
assert missing_reference(app, app.env, refnode, contnode) is None
|
||||
# check handling of prefixes
|
||||
|
||||
# prefix given, target found: prefix is stripped
|
||||
rn = reference_check('py', 'mod', 'py3k:module2', 'py3k:module2')
|
||||
assert rn[0].astext() == 'module2'
|
||||
|
||||
# prefix given, but not in title: nothing stripped
|
||||
rn = reference_check('py', 'mod', 'py3k:module2', 'module2')
|
||||
assert rn[0].astext() == 'module2'
|
||||
|
||||
# prefix given, but explicit: nothing stripped
|
||||
rn = reference_check('py', 'mod', 'py3k:module2', 'py3k:module2',
|
||||
refexplicit=True)
|
||||
assert rn[0].astext() == 'py3k:module2'
|
||||
|
||||
# prefix given, target not found and nonexplicit title: prefix is stripped
|
||||
node, contnode = fake_node('py', 'mod', 'py3k:unknown', 'py3k:unknown',
|
||||
refexplicit=False)
|
||||
rn = missing_reference(app, app.env, node, contnode)
|
||||
assert rn is None
|
||||
assert contnode[0].astext() == 'unknown'
|
||||
|
||||
# prefix given, target not found and explicit title: nothing is changed
|
||||
node, contnode = fake_node('py', 'mod', 'py3k:unknown', 'py3k:unknown',
|
||||
refexplicit=True)
|
||||
rn = missing_reference(app, app.env, node, contnode)
|
||||
assert rn is None
|
||||
assert contnode[0].astext() == 'py3k:unknown'
|
||||
|
@ -17,6 +17,7 @@ from docutils import frontend, utils, nodes
|
||||
from docutils.parsers import rst
|
||||
|
||||
from sphinx.util import texescape
|
||||
from sphinx.util.pycompat import b
|
||||
from sphinx.writers.html import HTMLWriter, SmartyPantsHTMLTranslator
|
||||
from sphinx.writers.latex import LaTeXWriter, LaTeXTranslator
|
||||
|
||||
@ -50,7 +51,7 @@ class ForgivingLaTeXTranslator(LaTeXTranslator, ForgivingTranslator):
|
||||
|
||||
|
||||
def verify_re(rst, html_expected, latex_expected):
|
||||
document = utils.new_document('test data', settings)
|
||||
document = utils.new_document(b('test data'), settings)
|
||||
document['file'] = 'dummy'
|
||||
parser.parse(rst, document)
|
||||
for msg in document.traverse(nodes.system_message):
|
||||
|
@ -36,8 +36,13 @@ def mock_raw_input(answers, needanswer=False):
|
||||
return ''
|
||||
return raw_input
|
||||
|
||||
try:
|
||||
real_raw_input = raw_input
|
||||
except NameError:
|
||||
real_raw_input = input
|
||||
|
||||
def teardown_module():
|
||||
qs.raw_input = __builtin__.raw_input
|
||||
qs.term_input = real_raw_input
|
||||
qs.TERM_ENCODING = getattr(sys.stdin, 'encoding', None)
|
||||
coloron()
|
||||
|
||||
@ -51,7 +56,7 @@ def test_do_prompt():
|
||||
'Q5': 'no',
|
||||
'Q6': 'foo',
|
||||
}
|
||||
qs.raw_input = mock_raw_input(answers)
|
||||
qs.term_input = mock_raw_input(answers)
|
||||
try:
|
||||
qs.do_prompt(d, 'k1', 'Q1')
|
||||
except AssertionError:
|
||||
@ -79,13 +84,18 @@ def test_quickstart_defaults(tempdir):
|
||||
'Author name': 'Georg Brandl',
|
||||
'Project version': '0.1',
|
||||
}
|
||||
qs.raw_input = mock_raw_input(answers)
|
||||
qs.term_input = mock_raw_input(answers)
|
||||
qs.inner_main([])
|
||||
|
||||
conffile = tempdir / 'conf.py'
|
||||
assert conffile.isfile()
|
||||
ns = {}
|
||||
execfile(conffile, ns)
|
||||
f = open(conffile, 'U')
|
||||
try:
|
||||
code = compile(f.read(), conffile, 'exec')
|
||||
finally:
|
||||
f.close()
|
||||
exec code in ns
|
||||
assert ns['extensions'] == []
|
||||
assert ns['templates_path'] == ['_templates']
|
||||
assert ns['source_suffix'] == '.rst'
|
||||
@ -112,8 +122,8 @@ def test_quickstart_all_answers(tempdir):
|
||||
'Root path': tempdir,
|
||||
'Separate source and build': 'y',
|
||||
'Name prefix for templates': '.',
|
||||
'Project name': 'STASI\xe2\x84\xa2',
|
||||
'Author name': 'Wolfgang Sch\xc3\xa4uble & G\'Beckstein',
|
||||
'Project name': u'STASI™'.encode('utf-8'),
|
||||
'Author name': u'Wolfgang Schäuble & G\'Beckstein'.encode('utf-8'),
|
||||
'Project version': '2.0',
|
||||
'Project release': '2.0.1',
|
||||
'Source file suffix': '.txt',
|
||||
@ -131,14 +141,19 @@ def test_quickstart_all_answers(tempdir):
|
||||
'Create Windows command file': 'no',
|
||||
'Do you want to use the epub builder': 'yes',
|
||||
}
|
||||
qs.raw_input = mock_raw_input(answers, needanswer=True)
|
||||
qs.term_input = mock_raw_input(answers, needanswer=True)
|
||||
qs.TERM_ENCODING = 'utf-8'
|
||||
qs.inner_main([])
|
||||
|
||||
conffile = tempdir / 'source' / 'conf.py'
|
||||
assert conffile.isfile()
|
||||
ns = {}
|
||||
execfile(conffile, ns)
|
||||
f = open(conffile, 'U')
|
||||
try:
|
||||
code = compile(f.read(), conffile, 'exec')
|
||||
finally:
|
||||
f.close()
|
||||
exec code in ns
|
||||
assert ns['extensions'] == ['sphinx.ext.autodoc', 'sphinx.ext.doctest']
|
||||
assert ns['templates_path'] == ['.templates']
|
||||
assert ns['source_suffix'] == '.txt'
|
||||
|
@ -13,6 +13,7 @@ from docutils import frontend, utils
|
||||
from docutils.parsers import rst
|
||||
|
||||
from sphinx.search import IndexBuilder
|
||||
from sphinx.util.pycompat import b
|
||||
|
||||
|
||||
settings = parser = None
|
||||
@ -31,7 +32,7 @@ test that non-comments are indexed: fermion
|
||||
'''
|
||||
|
||||
def test_wordcollector():
|
||||
doc = utils.new_document('test data', settings)
|
||||
doc = utils.new_document(b('test data'), settings)
|
||||
doc['file'] = 'dummy'
|
||||
parser.parse(FILE_CONTENTS, doc)
|
||||
|
||||
|
@ -11,6 +11,8 @@ import sys
|
||||
import StringIO
|
||||
import tempfile
|
||||
import shutil
|
||||
import re
|
||||
from codecs import open
|
||||
|
||||
try:
|
||||
from functools import wraps
|
||||
@ -31,7 +33,7 @@ __all__ = [
|
||||
'raises', 'raises_msg', 'Struct',
|
||||
'ListOutput', 'TestApp', 'with_app', 'gen_with_app',
|
||||
'path', 'with_tempdir', 'write_file',
|
||||
'sprint',
|
||||
'sprint', 'remove_unicode_literals',
|
||||
]
|
||||
|
||||
|
||||
@ -191,11 +193,21 @@ def with_tempdir(func):
|
||||
return new_func
|
||||
|
||||
|
||||
def write_file(name, contents):
|
||||
f = open(str(name), 'wb')
|
||||
def write_file(name, contents, encoding=None):
|
||||
if encoding is None:
|
||||
mode = 'wb'
|
||||
if isinstance(contents, unicode):
|
||||
contents = contents.encode('ascii')
|
||||
else:
|
||||
mode = 'w'
|
||||
f = open(str(name), 'wb', encoding=encoding)
|
||||
f.write(contents)
|
||||
f.close()
|
||||
|
||||
|
||||
def sprint(*args):
|
||||
sys.stderr.write(' '.join(map(str, args)) + '\n')
|
||||
|
||||
_unicode_literals_re = re.compile(r'u(".*?")|u(\'.*?\')')
|
||||
def remove_unicode_literals(s):
|
||||
return _unicode_literals_re.sub(lambda x: x.group(1) or x.group(2), s)
|
||||
|
17
tox.ini
Normal file
17
tox.ini
Normal file
@ -0,0 +1,17 @@
|
||||
[tox]
|
||||
envlist=du07,du06,du05
|
||||
|
||||
[testenv]
|
||||
deps=nose
|
||||
commands=
|
||||
nosetests
|
||||
sphinx-build -W -b html -d {envtmpdir}/doctrees doc {envtmpdir}/html
|
||||
|
||||
[testenv:du05]
|
||||
deps=docutils==0.5
|
||||
|
||||
[testenv:du06]
|
||||
deps=docutils==0.6
|
||||
|
||||
[testenv:du07]
|
||||
deps=docutils==0.7
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user