mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Small code style changes, remove unused imports.
This commit is contained in:
parent
0ff840dc48
commit
6ffec123a9
@ -311,6 +311,7 @@ class Builder(object):
|
|||||||
"""
|
"""
|
||||||
Cleanup any resources. The default implementation does nothing.
|
Cleanup any resources. The default implementation does nothing.
|
||||||
"""
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
BUILTIN_BUILDERS = {
|
BUILTIN_BUILDERS = {
|
||||||
|
@ -299,6 +299,7 @@ def format_exception_cut_frames(x=1):
|
|||||||
res += traceback.format_exception_only(typ, val)
|
res += traceback.format_exception_only(typ, val)
|
||||||
return ''.join(res)
|
return ''.join(res)
|
||||||
|
|
||||||
|
|
||||||
class PeekableIterator(object):
|
class PeekableIterator(object):
|
||||||
"""
|
"""
|
||||||
An iterator which wraps any iterable and makes it possible to peek to see
|
An iterator which wraps any iterable and makes it possible to peek to see
|
||||||
@ -312,24 +313,19 @@ class PeekableIterator(object):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def next(self):
|
def next(self):
|
||||||
"""
|
"""Return the next item from the iterator."""
|
||||||
Returns the next item from the iterator.
|
|
||||||
"""
|
|
||||||
if self.remaining:
|
if self.remaining:
|
||||||
return self.remaining.popleft()
|
return self.remaining.popleft()
|
||||||
return self._iterator.next()
|
return self._iterator.next()
|
||||||
|
|
||||||
def push(self, item):
|
def push(self, item):
|
||||||
"""
|
"""Push the `item` on the internal stack, it will be returned on the
|
||||||
Pushes the `item` on the internal stack, it will be returned on the
|
|
||||||
next :meth:`next` call.
|
next :meth:`next` call.
|
||||||
"""
|
"""
|
||||||
self.remaining.append(item)
|
self.remaining.append(item)
|
||||||
|
|
||||||
def peek(self):
|
def peek(self):
|
||||||
"""
|
"""Return the next item without changing the state of the iterator."""
|
||||||
Returns the next item without changing the state of the iterator.
|
|
||||||
"""
|
|
||||||
item = self.next()
|
item = self.next()
|
||||||
self.push(item)
|
self.push(item)
|
||||||
return item
|
return item
|
||||||
|
@ -18,15 +18,14 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
from itertools import zip_longest
|
from itertools import zip_longest
|
||||||
|
|
||||||
from sphinx.util import PeekableIterator
|
|
||||||
|
|
||||||
# anything below that ratio is considered equal/changed
|
# anything below that ratio is considered equal/changed
|
||||||
VERSIONING_RATIO = 65
|
VERSIONING_RATIO = 65
|
||||||
|
|
||||||
|
|
||||||
def add_uids(doctree, condition):
|
def add_uids(doctree, condition):
|
||||||
"""
|
"""Add a unique id to every node in the `doctree` which matches the
|
||||||
Adds a unique id to every node in the `doctree` which matches the condition
|
condition and yield the nodes.
|
||||||
and yields it.
|
|
||||||
|
|
||||||
:param doctree:
|
:param doctree:
|
||||||
A :class:`docutils.nodes.document` instance.
|
A :class:`docutils.nodes.document` instance.
|
||||||
@ -38,10 +37,10 @@ def add_uids(doctree, condition):
|
|||||||
node.uid = uuid4().hex
|
node.uid = uuid4().hex
|
||||||
yield node
|
yield node
|
||||||
|
|
||||||
|
|
||||||
def merge_doctrees(old, new, condition):
|
def merge_doctrees(old, new, condition):
|
||||||
"""
|
"""Merge the `old` doctree with the `new` one while looking at nodes
|
||||||
Merges the `old` doctree with the `new` one while looking at nodes matching
|
matching the `condition`.
|
||||||
the `condition`.
|
|
||||||
|
|
||||||
Each node which replaces another one or has been added to the `new` doctree
|
Each node which replaces another one or has been added to the `new` doctree
|
||||||
will be yielded.
|
will be yielded.
|
||||||
@ -102,16 +101,18 @@ def merge_doctrees(old, new, condition):
|
|||||||
new_node.uid = uuid4().hex
|
new_node.uid = uuid4().hex
|
||||||
yield new_node
|
yield new_node
|
||||||
|
|
||||||
|
|
||||||
def get_ratio(old, new):
|
def get_ratio(old, new):
|
||||||
"""
|
"""Return a "similiarity ratio" (in percent) representing the similarity
|
||||||
Returns a "similiarity ratio" representing the similarity between the two
|
between the two strings where 0 is equal and anything above less than equal.
|
||||||
strings where 0 is equal and anything above less than equal.
|
|
||||||
"""
|
"""
|
||||||
if not all([old, new]):
|
if not all([old, new]):
|
||||||
return VERSIONING_RATIO
|
return VERSIONING_RATIO
|
||||||
return levenshtein_distance(old, new) / (len(old) / 100.0)
|
return levenshtein_distance(old, new) / (len(old) / 100.0)
|
||||||
|
|
||||||
|
|
||||||
def levenshtein_distance(a, b):
|
def levenshtein_distance(a, b):
|
||||||
|
"""Return the Levenshtein edit distance between two strings *a* and *b*."""
|
||||||
if a == b:
|
if a == b:
|
||||||
return 0
|
return 0
|
||||||
if len(a) < len(b):
|
if len(a) < len(b):
|
||||||
|
@ -13,17 +13,17 @@ import sys
|
|||||||
import cPickle as pickle
|
import cPickle as pickle
|
||||||
import posixpath
|
import posixpath
|
||||||
from os import path
|
from os import path
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from jinja2 import Environment, FileSystemLoader
|
from jinja2 import Environment, FileSystemLoader
|
||||||
|
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
from sphinx.util.osutil import ensuredir
|
from sphinx.util.osutil import ensuredir
|
||||||
from sphinx.util.jsonimpl import dumps as dump_json
|
from sphinx.util.jsonimpl import dumps as dump_json
|
||||||
from sphinx.websupport.search import BaseSearch, search_adapters
|
from sphinx.websupport.search import BaseSearch, SEARCH_ADAPTERS
|
||||||
from sphinx.websupport.storage import StorageBackend
|
from sphinx.websupport.storage import StorageBackend
|
||||||
from sphinx.websupport.errors import *
|
from sphinx.websupport.errors import *
|
||||||
|
|
||||||
|
|
||||||
class WebSupportApp(Sphinx):
|
class WebSupportApp(Sphinx):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self.staticdir = kwargs.pop('staticdir', None)
|
self.staticdir = kwargs.pop('staticdir', None)
|
||||||
@ -32,6 +32,7 @@ class WebSupportApp(Sphinx):
|
|||||||
self.storage = kwargs.pop('storage', None)
|
self.storage = kwargs.pop('storage', None)
|
||||||
Sphinx.__init__(self, *args, **kwargs)
|
Sphinx.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class WebSupport(object):
|
class WebSupport(object):
|
||||||
"""The main API class for the web support package. All interactions
|
"""The main API class for the web support package. All interactions
|
||||||
with the web support package should occur through this class.
|
with the web support package should occur through this class.
|
||||||
@ -82,7 +83,7 @@ class WebSupport(object):
|
|||||||
if isinstance(search, BaseSearch):
|
if isinstance(search, BaseSearch):
|
||||||
self.search = search
|
self.search = search
|
||||||
else:
|
else:
|
||||||
mod, cls = search_adapters[search or 'null']
|
mod, cls = SEARCH_ADAPTERS[search or 'null']
|
||||||
mod = 'sphinx.websupport.search.' + mod
|
mod = 'sphinx.websupport.search.' + mod
|
||||||
SearchClass = getattr(__import__(mod, None, None, [cls]), cls)
|
SearchClass = getattr(__import__(mod, None, None, [cls]), cls)
|
||||||
search_path = path.join(self.datadir, 'search')
|
search_path = path.join(self.datadir, 'search')
|
||||||
@ -390,7 +391,8 @@ class WebSupport(object):
|
|||||||
:param username: The username of the user making the request.
|
:param username: The username of the user making the request.
|
||||||
:param moderator: Whether the user making the request is a moderator.
|
:param moderator: Whether the user making the request is a moderator.
|
||||||
"""
|
"""
|
||||||
parts = [self.base_comment_opts]
|
# XXX parts is not used?
|
||||||
|
#parts = [self.base_comment_opts]
|
||||||
rv = self.base_comment_opts.copy()
|
rv = self.base_comment_opts.copy()
|
||||||
if username:
|
if username:
|
||||||
rv.update({
|
rv.update({
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
|
||||||
class BaseSearch(object):
|
class BaseSearch(object):
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
pass
|
pass
|
||||||
@ -63,11 +64,10 @@ class BaseSearch(object):
|
|||||||
|
|
||||||
def query(self, q):
|
def query(self, q):
|
||||||
"""Called by the web support api to get search results. This method
|
"""Called by the web support api to get search results. This method
|
||||||
compiles the regular expression to be used when
|
compiles the regular expression to be used when :meth:`extracting
|
||||||
:meth:`extracting context <extract_context>`, then calls
|
context <extract_context>`, then calls :meth:`handle_query`. You
|
||||||
:meth:`handle_query`. You won't want to override this unless you
|
won't want to override this unless you don't want to use the included
|
||||||
don't want to use the included :meth:`extract_context` method.
|
:meth:`extract_context` method. Override :meth:`handle_query` instead.
|
||||||
Override :meth:`handle_query` instead.
|
|
||||||
|
|
||||||
:param q: the search query string.
|
:param q: the search query string.
|
||||||
"""
|
"""
|
||||||
@ -93,7 +93,7 @@ class BaseSearch(object):
|
|||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def extract_context(self, text, length=240):
|
def extract_context(self, text, length=240):
|
||||||
"""Extract the context for the search query from the documents
|
"""Extract the context for the search query from the document's
|
||||||
full `text`.
|
full `text`.
|
||||||
|
|
||||||
:param text: the full text of the document to create the context for
|
:param text: the full text of the document to create the context for
|
||||||
@ -113,9 +113,9 @@ class BaseSearch(object):
|
|||||||
except TypeError:
|
except TypeError:
|
||||||
return context
|
return context
|
||||||
|
|
||||||
# The build in search adapters.
|
# The built-in search adapters.
|
||||||
search_adapters = {
|
SEARCH_ADAPTERS = {
|
||||||
'xapian': ('xapiansearch', 'XapianSearch'),
|
'xapian': ('xapiansearch', 'XapianSearch'),
|
||||||
'whoosh': ('whooshsearch', 'WhooshSearch'),
|
'whoosh': ('whooshsearch', 'WhooshSearch'),
|
||||||
'null': ('nullsearch', 'NullSearch')
|
'null': ('nullsearch', 'NullSearch'),
|
||||||
}
|
}
|
||||||
|
@ -10,7 +10,8 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from sphinx.websupport.search import BaseSearch
|
from sphinx.websupport.search import BaseSearch
|
||||||
from sphinx.websupport.errors import *
|
from sphinx.websupport.errors import NullSearchException
|
||||||
|
|
||||||
|
|
||||||
class NullSearch(BaseSearch):
|
class NullSearch(BaseSearch):
|
||||||
"""A search adapter that does nothing. Used when no search adapter
|
"""A search adapter that does nothing. Used when no search adapter
|
||||||
|
@ -10,13 +10,13 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from whoosh import index
|
from whoosh import index
|
||||||
from whoosh.fields import Schema, ID, TEXT, STORED
|
from whoosh.fields import Schema, ID, TEXT
|
||||||
from whoosh.analysis import StemmingAnalyzer
|
from whoosh.analysis import StemmingAnalyzer
|
||||||
from whoosh import highlight
|
|
||||||
|
|
||||||
from sphinx.util.osutil import ensuredir
|
from sphinx.util.osutil import ensuredir
|
||||||
from sphinx.websupport.search import BaseSearch
|
from sphinx.websupport.search import BaseSearch
|
||||||
|
|
||||||
|
|
||||||
class WhooshSearch(BaseSearch):
|
class WhooshSearch(BaseSearch):
|
||||||
"""The whoosh search adapter for sphinx web support."""
|
"""The whoosh search adapter for sphinx web support."""
|
||||||
|
|
||||||
|
@ -9,13 +9,12 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from os import path
|
|
||||||
|
|
||||||
import xapian
|
import xapian
|
||||||
|
|
||||||
from sphinx.util.osutil import ensuredir
|
from sphinx.util.osutil import ensuredir
|
||||||
from sphinx.websupport.search import BaseSearch
|
from sphinx.websupport.search import BaseSearch
|
||||||
|
|
||||||
|
|
||||||
class XapianSearch(BaseSearch):
|
class XapianSearch(BaseSearch):
|
||||||
# Adapted from the GSOC 2009 webapp project.
|
# Adapted from the GSOC 2009 webapp project.
|
||||||
|
|
||||||
|
@ -11,11 +11,9 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
from sqlalchemy import Column, Integer, Text, String, Boolean, ForeignKey,\
|
from sqlalchemy import Column, Integer, Text, String, Boolean, \
|
||||||
DateTime
|
ForeignKey, DateTime
|
||||||
from sqlalchemy.schema import UniqueConstraint
|
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
from sqlalchemy.orm import relation, sessionmaker, aliased
|
from sqlalchemy.orm import relation, sessionmaker, aliased
|
||||||
|
|
||||||
@ -98,6 +96,7 @@ class Node(Base):
|
|||||||
self.document = document
|
self.document = document
|
||||||
self.source = source
|
self.source = source
|
||||||
|
|
||||||
|
|
||||||
class Comment(Base):
|
class Comment(Base):
|
||||||
"""An individual Comment being stored."""
|
"""An individual Comment being stored."""
|
||||||
__tablename__ = db_prefix + 'comments'
|
__tablename__ = db_prefix + 'comments'
|
||||||
@ -188,6 +187,7 @@ class Comment(Base):
|
|||||||
|
|
||||||
return '%s %s ago' % dt if dt[0] == 1 else '%s %ss ago' % dt
|
return '%s %s ago' % dt if dt[0] == 1 else '%s %ss ago' % dt
|
||||||
|
|
||||||
|
|
||||||
class CommentVote(Base):
|
class CommentVote(Base):
|
||||||
"""A vote a user has made on a Comment."""
|
"""A vote a user has made on a Comment."""
|
||||||
__tablename__ = db_prefix + 'commentvote'
|
__tablename__ = db_prefix + 'commentvote'
|
||||||
|
@ -13,6 +13,7 @@ import re
|
|||||||
from cgi import escape
|
from cgi import escape
|
||||||
from difflib import Differ
|
from difflib import Differ
|
||||||
|
|
||||||
|
|
||||||
class CombinedHtmlDiff(object):
|
class CombinedHtmlDiff(object):
|
||||||
"""Create an HTML representation of the differences between two pieces
|
"""Create an HTML representation of the differences between two pieces
|
||||||
of text.
|
of text.
|
||||||
|
@ -14,16 +14,19 @@ from datetime import datetime
|
|||||||
from sqlalchemy.orm import aliased
|
from sqlalchemy.orm import aliased
|
||||||
from sqlalchemy.sql import func
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
from sphinx.websupport.errors import *
|
from sphinx.websupport.errors import CommentNotAllowedError, \
|
||||||
|
UserNotAuthorizedError
|
||||||
from sphinx.websupport.storage import StorageBackend
|
from sphinx.websupport.storage import StorageBackend
|
||||||
from sphinx.websupport.storage.db import Base, Node, Comment, CommentVote,\
|
from sphinx.websupport.storage.db import Base, Node, Comment, \
|
||||||
Session
|
CommentVote, Session
|
||||||
from sphinx.websupport.storage.differ import CombinedHtmlDiff
|
from sphinx.websupport.storage.differ import CombinedHtmlDiff
|
||||||
|
|
||||||
|
|
||||||
class SQLAlchemyStorage(StorageBackend):
|
class SQLAlchemyStorage(StorageBackend):
|
||||||
"""A :class:`~sphinx.websupport.storage.StorageBackend` using
|
|
||||||
SQLAlchemy.
|
|
||||||
"""
|
"""
|
||||||
|
A :class:`.StorageBackend` using SQLAlchemy.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, engine):
|
def __init__(self, engine):
|
||||||
self.engine = engine
|
self.engine = engine
|
||||||
Base.metadata.bind = engine
|
Base.metadata.bind = engine
|
||||||
@ -147,6 +150,7 @@ class SQLAlchemyStorage(StorageBackend):
|
|||||||
def accept_comment(self, comment_id):
|
def accept_comment(self, comment_id):
|
||||||
session = Session()
|
session = Session()
|
||||||
|
|
||||||
|
# XXX assignment to "comment" needed?
|
||||||
comment = session.query(Comment).filter(
|
comment = session.query(Comment).filter(
|
||||||
Comment.id == comment_id).update(
|
Comment.id == comment_id).update(
|
||||||
{Comment.displayed: True})
|
{Comment.displayed: True})
|
||||||
|
Loading…
Reference in New Issue
Block a user