Compare commits
35 Commits
master
...
feature/au
Author | SHA1 | Date | |
---|---|---|---|
|
1f051bfabc | ||
|
5da9f4a410 | ||
|
ea5aa0e80f | ||
|
090c01bf8a | ||
|
d41ab31b3d | ||
|
8550859366 | ||
|
f42e70c768 | ||
|
8d71ad6c1c | ||
|
701ea7058c | ||
|
2001ed423b | ||
|
bf5fc622ba | ||
|
b2a34caa66 | ||
|
7bf46d9b53 | ||
|
33741b9d2d | ||
|
9d63498def | ||
|
2347b41647 | ||
|
7174c6a686 | ||
|
43d8ae702d | ||
|
9aa1e9544d | ||
|
131951c976 | ||
|
7a3251ddfc | ||
|
7886c20aef | ||
|
d472262fee | ||
|
80cbf2676f | ||
|
4f152d03ac | ||
|
10960ecf1e | ||
|
1e542e612a | ||
|
a551271743 | ||
|
88689c789a | ||
|
bbd9e1b523 | ||
|
b2bc582f92 | ||
|
5f71b735e5 | ||
|
bffcc3af6f | ||
|
1a4e760fe0 | ||
|
65caa7ad99 |
3
.gitignore
vendored
3
.gitignore
vendored
@ -24,3 +24,6 @@
|
|||||||
|
|
||||||
/docs/_build
|
/docs/_build
|
||||||
/docs/_static/css/site.css
|
/docs/_static/css/site.css
|
||||||
|
|
||||||
|
/build
|
||||||
|
/dist
|
||||||
|
@ -1,15 +1,10 @@
|
|||||||
language: python
|
language: python
|
||||||
python: 2.7
|
python: 2.7
|
||||||
env:
|
env:
|
||||||
- TOX_ENV=py26
|
|
||||||
- TOX_ENV=py27
|
- TOX_ENV=py27
|
||||||
- TOX_ENV=py33
|
- TOX_ENV=py33
|
||||||
- TOX_ENV=py34
|
- TOX_ENV=py34
|
||||||
- TOX_ENV=squeeze
|
|
||||||
- TOX_ENV=wheezy
|
- TOX_ENV=wheezy
|
||||||
matrix:
|
|
||||||
allow_failures:
|
|
||||||
- env: TOX_ENV=squeeze
|
|
||||||
install:
|
install:
|
||||||
- pip install tox
|
- pip install tox
|
||||||
- sudo rm -rf /dev/shm && sudo ln -s /run/shm /dev/shm
|
- sudo rm -rf /dev/shm && sudo ln -s /run/shm /dev/shm
|
||||||
|
@ -1,9 +1,12 @@
|
|||||||
include man/man1/isso.1
|
include man/man1/isso.1
|
||||||
include man/man5/isso.conf.5
|
include man/man5/isso.conf.5
|
||||||
|
|
||||||
|
include isso/defaults.ini
|
||||||
include share/isso.conf
|
include share/isso.conf
|
||||||
|
|
||||||
include isso/js/embed.min.js
|
include isso/js/embed.min.js
|
||||||
include isso/js/embed.dev.js
|
include isso/js/embed.dev.js
|
||||||
include isso/js/count.min.js
|
include isso/js/count.min.js
|
||||||
include isso/js/count.dev.js
|
include isso/js/count.dev.js
|
||||||
|
|
||||||
|
include isso/demo/index.html
|
||||||
|
@ -218,7 +218,7 @@ enabled
|
|||||||
purposes.
|
purposes.
|
||||||
|
|
||||||
ratelimit
|
ratelimit
|
||||||
limit to N new comments per minute.
|
limit to N new comments per minute. Use -1 to disable rate limit.
|
||||||
|
|
||||||
direct-reply
|
direct-reply
|
||||||
how many comments directly to the thread (prevent a simple
|
how many comments directly to the thread (prevent a simple
|
||||||
|
@ -72,7 +72,7 @@ Install from PyPi
|
|||||||
Requirements
|
Requirements
|
||||||
^^^^^^^^^^^^
|
^^^^^^^^^^^^
|
||||||
|
|
||||||
- Python 2.6, 2.7 or 3.3+ (+ devel headers)
|
- Python 2.7, 3.3 or 3.4+ (+ devel headers)
|
||||||
- SQLite 3.3.8 or later
|
- SQLite 3.3.8 or later
|
||||||
- a working C compiler
|
- a working C compiler
|
||||||
|
|
||||||
|
143
isso/__init__.py
143
isso/__init__.py
@ -41,16 +41,14 @@ if sys.argv[0].startswith("isso"):
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import errno
|
import errno
|
||||||
|
import atexit
|
||||||
import logging
|
import logging
|
||||||
import tempfile
|
|
||||||
|
|
||||||
from os.path import dirname, join
|
from os.path import dirname, join
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from functools import partial, reduce
|
from functools import partial, reduce
|
||||||
|
|
||||||
from itsdangerous import URLSafeTimedSerializer
|
from werkzeug.routing import Map, Rule, redirect
|
||||||
|
|
||||||
from werkzeug.routing import Map
|
|
||||||
from werkzeug.exceptions import HTTPException, InternalServerError
|
from werkzeug.exceptions import HTTPException, InternalServerError
|
||||||
|
|
||||||
from werkzeug.wsgi import SharedDataMiddleware
|
from werkzeug.wsgi import SharedDataMiddleware
|
||||||
@ -62,11 +60,14 @@ from werkzeug.contrib.profiler import ProfilerMiddleware
|
|||||||
local = Local()
|
local = Local()
|
||||||
local_manager = LocalManager([local])
|
local_manager = LocalManager([local])
|
||||||
|
|
||||||
from isso import config, db, migrate, wsgi, ext, views
|
try:
|
||||||
from isso.core import ThreadedMixin, ProcessMixin, uWSGIMixin
|
import uwsgi
|
||||||
|
except ImportError:
|
||||||
|
uwsgi = None
|
||||||
|
|
||||||
|
from isso import cache, config, db, migrate, ext, queue, spam, views, wsgi
|
||||||
from isso.wsgi import origin, urlsplit
|
from isso.wsgi import origin, urlsplit
|
||||||
from isso.utils import http, JSONRequest, html, hash
|
from isso.utils import http, JSONRequest, html, hash, URLSafeTimedSerializer
|
||||||
from isso.views import comments
|
|
||||||
|
|
||||||
from isso.ext.notifications import Stdout, SMTP
|
from isso.ext.notifications import Stdout, SMTP
|
||||||
|
|
||||||
@ -80,40 +81,64 @@ logger = logging.getLogger("isso")
|
|||||||
|
|
||||||
class Isso(object):
|
class Isso(object):
|
||||||
|
|
||||||
def __init__(self, conf):
|
def __init__(self, conf, cacheobj=None, dbobj=None):
|
||||||
|
|
||||||
|
if cacheobj is None:
|
||||||
|
cacheobj = cache.Cache(1024)
|
||||||
|
|
||||||
|
if dbobj is None:
|
||||||
|
dbobj = db.Adapter("sqlite:///:memory:")
|
||||||
|
|
||||||
self.conf = conf
|
self.conf = conf
|
||||||
self.db = db.SQLite3(conf.get('general', 'dbpath'), conf)
|
self.db = dbobj
|
||||||
self.signer = URLSafeTimedSerializer(self.db.preferences.get("session-key"))
|
|
||||||
self.markup = html.Markup(conf.section('markup'))
|
|
||||||
self.hasher = hash.new(conf.section("hash"))
|
|
||||||
|
|
||||||
super(Isso, self).__init__(conf)
|
signer = URLSafeTimedSerializer(
|
||||||
|
dbobj.preferences.get("session-key"))
|
||||||
|
markup = html.Markup(
|
||||||
|
conf.getlist("markup", "options"),
|
||||||
|
conf.getlist("markup", "allowed-elements"),
|
||||||
|
conf.getlist("markup", "allowed-attributes"))
|
||||||
|
hasher = hash.new(
|
||||||
|
conf.get("hash", "algorithm"),
|
||||||
|
conf.get("hash", "salt"))
|
||||||
|
guard = spam.Guard(
|
||||||
|
dbobj,
|
||||||
|
conf.getboolean("guard", "enabled"),
|
||||||
|
conf.getint("guard", "ratelimit"),
|
||||||
|
conf.getint("guard", "direct-reply"),
|
||||||
|
conf.getboolean("guard", "reply-to-self"),
|
||||||
|
conf.getint("general", "max-age"))
|
||||||
|
|
||||||
subscribers = []
|
urls = Map()
|
||||||
for backend in conf.getlist("general", "notify"):
|
Isso.routes(
|
||||||
if backend == "stdout":
|
urls,
|
||||||
subscribers.append(Stdout(None))
|
views.API(conf, cacheobj, dbobj, guard, hasher.uhash, markup, signer),
|
||||||
elif backend in ("smtp", "SMTP"):
|
views.Info(conf))
|
||||||
subscribers.append(SMTP(self))
|
|
||||||
else:
|
|
||||||
logger.warn("unknown notification backend '%s'", backend)
|
|
||||||
|
|
||||||
self.signal = ext.Signal(*subscribers)
|
self.urls = urls
|
||||||
|
|
||||||
self.urls = Map()
|
@classmethod
|
||||||
|
def routes(cls, urls, api, info):
|
||||||
|
|
||||||
views.Info(self)
|
for rule in [
|
||||||
comments.API(self, self.hasher)
|
Rule("/demo/", endpoint=lambda *z: redirect("/demo/index.html")),
|
||||||
|
Rule("/info", endpoint=info.show)
|
||||||
|
]:
|
||||||
|
urls.add(rule)
|
||||||
|
|
||||||
def render(self, text):
|
for func, (method, rule) in [
|
||||||
return self.markup.render(text)
|
('fetch', ('GET', '/')),
|
||||||
|
('new', ('POST', '/new')),
|
||||||
def sign(self, obj):
|
('count', ('POST', '/count')),
|
||||||
return self.signer.dumps(obj)
|
('view', ('GET', '/id/<int:id>')),
|
||||||
|
('edit', ('PUT', '/id/<int:id>')),
|
||||||
def unsign(self, obj, max_age=None):
|
('delete', ('DELETE', '/id/<int:id>')),
|
||||||
return self.signer.loads(obj, max_age=max_age or self.conf.getint('general', 'max-age'))
|
('moderate',('GET', '/id/<int:id>/<any(activate,delete):action>/<string:key>')),
|
||||||
|
('moderate',('POST', '/id/<int:id>/<any(activate,delete):action>/<string:key>')),
|
||||||
|
('like', ('POST', '/id/<int:id>/like')),
|
||||||
|
('dislike', ('POST', '/id/<int:id>/dislike')),
|
||||||
|
]:
|
||||||
|
urls.add(Rule(rule, methods=[method], endpoint=getattr(api, func)))
|
||||||
|
|
||||||
def dispatch(self, request):
|
def dispatch(self, request):
|
||||||
local.request = request
|
local.request = request
|
||||||
@ -146,22 +171,25 @@ class Isso(object):
|
|||||||
return self.wsgi_app(environ, start_response)
|
return self.wsgi_app(environ, start_response)
|
||||||
|
|
||||||
|
|
||||||
def make_app(conf=None, threading=True, multiprocessing=False, uwsgi=False):
|
def make_app(conf):
|
||||||
|
|
||||||
if not any((threading, multiprocessing, uwsgi)):
|
dbobj = db.Adapter(conf.get("general", "dbpath"))
|
||||||
raise RuntimeError("either set threading, multiprocessing or uwsgi")
|
|
||||||
|
|
||||||
if threading:
|
if uwsgi is not None:
|
||||||
class App(Isso, ThreadedMixin):
|
cacheobj = cache.uWSGICache(timeout=3600)
|
||||||
pass
|
|
||||||
elif multiprocessing:
|
|
||||||
class App(Isso, ProcessMixin):
|
|
||||||
pass
|
|
||||||
else:
|
else:
|
||||||
class App(Isso, uWSGIMixin):
|
cacheobj = cache.SQLite3Cache(db.SQLite3("/dev/shm/isso"), threshold=2048)
|
||||||
pass
|
|
||||||
|
|
||||||
isso = App(conf)
|
jobs = queue.Jobs()
|
||||||
|
jobs.register("db-purge", dbobj, conf.getint("moderation", "purge-after"))
|
||||||
|
|
||||||
|
queueobj = queue.Queue(1024)
|
||||||
|
worker = queue.Worker(queueobj, jobs)
|
||||||
|
|
||||||
|
isso = Isso(conf, cacheobj, dbobj)
|
||||||
|
|
||||||
|
atexit.register(worker.join, 0.25)
|
||||||
|
worker.start()
|
||||||
|
|
||||||
# check HTTP server connection
|
# check HTTP server connection
|
||||||
for host in conf.getiter("general", "host"):
|
for host in conf.getiter("general", "host"):
|
||||||
@ -176,7 +204,7 @@ def make_app(conf=None, threading=True, multiprocessing=False, uwsgi=False):
|
|||||||
|
|
||||||
wrapper = [local_manager.make_middleware]
|
wrapper = [local_manager.make_middleware]
|
||||||
|
|
||||||
if isso.conf.getboolean("server", "profile"):
|
if conf.getboolean("server", "profile"):
|
||||||
wrapper.append(partial(ProfilerMiddleware,
|
wrapper.append(partial(ProfilerMiddleware,
|
||||||
sort_by=("cumulative", ), restrictions=("isso/(?!lib)", 10)))
|
sort_by=("cumulative", ), restrictions=("isso/(?!lib)", 10)))
|
||||||
|
|
||||||
@ -184,10 +212,10 @@ def make_app(conf=None, threading=True, multiprocessing=False, uwsgi=False):
|
|||||||
'/js': join(dirname(__file__), 'js/'),
|
'/js': join(dirname(__file__), 'js/'),
|
||||||
'/css': join(dirname(__file__), 'css/'),
|
'/css': join(dirname(__file__), 'css/'),
|
||||||
'/demo': join(dirname(__file__), 'demo/')
|
'/demo': join(dirname(__file__), 'demo/')
|
||||||
}))
|
}))
|
||||||
|
|
||||||
wrapper.append(partial(wsgi.CORSMiddleware,
|
wrapper.append(partial(wsgi.CORSMiddleware,
|
||||||
origin=origin(isso.conf.getiter("general", "host")),
|
origin=origin(conf.getiter("general", "host")),
|
||||||
allowed=("Origin", "Referer", "Content-Type"),
|
allowed=("Origin", "Referer", "Content-Type"),
|
||||||
exposed=("X-Set-Cookie", "Date")))
|
exposed=("X-Set-Cookie", "Date")))
|
||||||
|
|
||||||
@ -203,7 +231,7 @@ def main():
|
|||||||
|
|
||||||
parser.add_argument('--version', action='version', version='%(prog)s ' + dist.version)
|
parser.add_argument('--version', action='version', version='%(prog)s ' + dist.version)
|
||||||
parser.add_argument("-c", dest="conf", default="/etc/isso.conf",
|
parser.add_argument("-c", dest="conf", default="/etc/isso.conf",
|
||||||
metavar="/etc/isso.conf", help="set configuration file")
|
metavar="/etc/isso.conf", help="set configuration file")
|
||||||
|
|
||||||
imprt = subparser.add_parser('import', help="import Disqus XML export")
|
imprt = subparser.add_parser('import', help="import Disqus XML export")
|
||||||
imprt.add_argument("dump", metavar="FILE")
|
imprt.add_argument("dump", metavar="FILE")
|
||||||
@ -215,18 +243,17 @@ def main():
|
|||||||
serve = subparser.add_parser("run", help="run server")
|
serve = subparser.add_parser("run", help="run server")
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
conf = config.load(join(dist.location, "share", "isso.conf"), args.conf)
|
conf = config.load(join(dist.location, "isso", "defaults.ini"), args.conf)
|
||||||
|
|
||||||
if args.command == "import":
|
if args.command == "import":
|
||||||
conf.set("guard", "enabled", "off")
|
conf.set("guard", "enabled", "off")
|
||||||
|
|
||||||
if args.dryrun:
|
if args.dryrun:
|
||||||
xxx = tempfile.NamedTemporaryFile()
|
dbpath = ":memory:"
|
||||||
dbpath = xxx.name
|
|
||||||
else:
|
else:
|
||||||
dbpath = conf.get("general", "dbpath")
|
dbpath = conf.get("general", "dbpath")
|
||||||
|
|
||||||
mydb = db.SQLite3(dbpath, conf)
|
mydb = db.Adapter(db.SQLite3(dbpath), conf)
|
||||||
migrate.dispatch(args.type, mydb, args.dump)
|
migrate.dispatch(args.type, mydb, args.dump)
|
||||||
|
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
@ -235,13 +262,15 @@ def main():
|
|||||||
logger.error("No website(s) configured, Isso won't work.")
|
logger.error("No website(s) configured, Isso won't work.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
app = make_app(conf)
|
||||||
|
|
||||||
if conf.get("server", "listen").startswith("http://"):
|
if conf.get("server", "listen").startswith("http://"):
|
||||||
host, port, _ = urlsplit(conf.get("server", "listen"))
|
host, port, _ = urlsplit(conf.get("server", "listen"))
|
||||||
try:
|
try:
|
||||||
from gevent.pywsgi import WSGIServer
|
from gevent.pywsgi import WSGIServer
|
||||||
WSGIServer((host, port), make_app(conf)).serve_forever()
|
WSGIServer((host, port), app).serve_forever()
|
||||||
except ImportError:
|
except ImportError:
|
||||||
run_simple(host, port, make_app(conf), threaded=True,
|
run_simple(host, port, app, threaded=True,
|
||||||
use_reloader=conf.getboolean('server', 'reload'))
|
use_reloader=conf.getboolean('server', 'reload'))
|
||||||
else:
|
else:
|
||||||
sock = conf.get("server", "listen").partition("unix://")[2]
|
sock = conf.get("server", "listen").partition("unix://")[2]
|
||||||
@ -250,4 +279,4 @@ def main():
|
|||||||
except OSError as ex:
|
except OSError as ex:
|
||||||
if ex.errno != errno.ENOENT:
|
if ex.errno != errno.ENOENT:
|
||||||
raise
|
raise
|
||||||
wsgi.SocketHTTPServer(sock, make_app(conf)).serve_forever()
|
wsgi.SocketHTTPServer(sock, app).serve_forever()
|
||||||
|
105
isso/cache/__init__.py
vendored
Normal file
105
isso/cache/__init__.py
vendored
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import json
|
||||||
|
|
||||||
|
from isso.utils import types
|
||||||
|
from isso.compat import string_types
|
||||||
|
|
||||||
|
|
||||||
|
def pickle(value):
|
||||||
|
return json.dumps(value).encode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
def unpickle(value):
|
||||||
|
return json.loads(value.decode("utf-8"))
|
||||||
|
|
||||||
|
|
||||||
|
class Base(object):
|
||||||
|
"""Base class for all cache objects.
|
||||||
|
|
||||||
|
Arbitrary values are set by namespace and key. Namespace and key must be
|
||||||
|
strings, the underlying cache implementation may use :func:`pickle` and
|
||||||
|
:func:`unpickle:` to safely un-/serialize Python primitives.
|
||||||
|
|
||||||
|
:param threshold: maximum size of the cache
|
||||||
|
:param timeout: key expiration
|
||||||
|
"""
|
||||||
|
|
||||||
|
__metaclass__ = abc.ABCMeta
|
||||||
|
|
||||||
|
# enable serialization of Python primitives
|
||||||
|
serialize = False
|
||||||
|
|
||||||
|
def __init__(self, threshold, timeout):
|
||||||
|
self.threshold = threshold
|
||||||
|
self.timeout = timeout
|
||||||
|
|
||||||
|
def get(self, ns, key, default=None):
|
||||||
|
types.require(ns, string_types)
|
||||||
|
types.require(key, string_types)
|
||||||
|
|
||||||
|
try:
|
||||||
|
value = self._get(ns.encode("utf-8"), key.encode("utf-8"))
|
||||||
|
except KeyError:
|
||||||
|
return default
|
||||||
|
else:
|
||||||
|
if self.serialize:
|
||||||
|
value = unpickle(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get(self, ns, key):
|
||||||
|
return
|
||||||
|
|
||||||
|
def set(self, ns, key, value):
|
||||||
|
types.require(ns, string_types)
|
||||||
|
types.require(key, string_types)
|
||||||
|
|
||||||
|
if self.serialize:
|
||||||
|
value = pickle(value)
|
||||||
|
|
||||||
|
return self._set(ns.encode("utf-8"), key.encode("utf-8"), value)
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _set(self, ns, key, value):
|
||||||
|
return
|
||||||
|
|
||||||
|
def delete(self, ns, key):
|
||||||
|
types.require(ns, string_types)
|
||||||
|
types.require(key, string_types)
|
||||||
|
|
||||||
|
return self._delete(ns.encode("utf-8"), key.encode("utf-8"))
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _delete(self, ns, key):
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
class Cache(Base):
|
||||||
|
"""Implements a simple in-memory cache; once the threshold is reached, all
|
||||||
|
cached elements are discarded (the timeout parameter is ignored).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, threshold=512, timeout=-1):
|
||||||
|
super(Cache, self).__init__(threshold, timeout)
|
||||||
|
self.cache = {}
|
||||||
|
|
||||||
|
def _get(self, ns, key):
|
||||||
|
return self.cache[ns + b'-' + key]
|
||||||
|
|
||||||
|
def _set(self, ns, key, value):
|
||||||
|
if len(self.cache) > self.threshold - 1:
|
||||||
|
self.cache.clear()
|
||||||
|
self.cache[ns + b'-' + key] = value
|
||||||
|
|
||||||
|
def _delete(self, ns, key):
|
||||||
|
self.cache.pop(ns + b'-' + key, None)
|
||||||
|
|
||||||
|
|
||||||
|
from .uwsgi import uWSGICache
|
||||||
|
from .sqlite import SQLite3Cache
|
||||||
|
|
||||||
|
__all__ = ["Cache", "SQLite3Cache", "uWSGICache"]
|
57
isso/cache/sqlite.py
vendored
Normal file
57
isso/cache/sqlite.py
vendored
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import absolute_import, unicode_literals
|
||||||
|
|
||||||
|
import time
|
||||||
|
|
||||||
|
from . import Base
|
||||||
|
|
||||||
|
|
||||||
|
class SQLite3Cache(Base):
|
||||||
|
"""Implements a shared cache using SQLite3. Works across multiple processes
|
||||||
|
and threads, concurrent writes are not supported.
|
||||||
|
|
||||||
|
JSON is used to serialize python primitives in a safe way.
|
||||||
|
"""
|
||||||
|
|
||||||
|
serialize = True
|
||||||
|
|
||||||
|
def __init__(self, connection, threshold=1024, timeout=-1):
|
||||||
|
super(SQLite3Cache, self).__init__(threshold, timeout)
|
||||||
|
self.connection = connection
|
||||||
|
self.connection.execute(
|
||||||
|
'CREATE TABLE IF NOT EXISTS cache ('
|
||||||
|
' key TEXT PRIMARY KEY,'
|
||||||
|
' value BLOB,'
|
||||||
|
' time FLOAT)')
|
||||||
|
|
||||||
|
# drop trigger, in case threshold has changed
|
||||||
|
self.connection.execute('DROP TRIGGER IF EXISTS sweeper')
|
||||||
|
self.connection.execute([
|
||||||
|
'CREATE TRIGGER sweeper AFTER INSERT ON cache',
|
||||||
|
'BEGIN',
|
||||||
|
' DELETE FROM cache WHERE key NOT IN ('
|
||||||
|
' SELECT key FROM cache',
|
||||||
|
' ORDER BY time DESC LIMIT {0}'.format(threshold),
|
||||||
|
' );',
|
||||||
|
'END'])
|
||||||
|
|
||||||
|
def _get(self, ns, key, default=None):
|
||||||
|
rv = self.connection.execute(
|
||||||
|
'SELECT value FROM cache WHERE key = ?',
|
||||||
|
(ns + b'-' + key, )).fetchone()
|
||||||
|
|
||||||
|
if rv is None:
|
||||||
|
raise KeyError
|
||||||
|
|
||||||
|
return rv[0]
|
||||||
|
|
||||||
|
def _set(self, ns, key, value):
|
||||||
|
with self.connection.transaction as con:
|
||||||
|
con.execute(
|
||||||
|
'INSERT OR REPLACE INTO cache (key, value, time) VALUES (?, ?, ?)',
|
||||||
|
(ns + b'-' + key, value, time.time()))
|
||||||
|
|
||||||
|
def _delete(self, ns, key):
|
||||||
|
with self.connection.transaction as con:
|
||||||
|
con.execute('DELETE FROM cache WHERE key = ?', (ns + b'-' + key, ))
|
34
isso/cache/uwsgi.py
vendored
Normal file
34
isso/cache/uwsgi.py
vendored
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import absolute_import, unicode_literals
|
||||||
|
|
||||||
|
try:
|
||||||
|
import uwsgi
|
||||||
|
except ImportError:
|
||||||
|
uwsgi = None
|
||||||
|
|
||||||
|
from . import Base
|
||||||
|
|
||||||
|
|
||||||
|
class uWSGICache(Base):
|
||||||
|
"""Utilize uWSGI caching framework, in-memory and SMP-safe.
|
||||||
|
"""
|
||||||
|
|
||||||
|
serialize = True
|
||||||
|
|
||||||
|
def __init__(self, threshold=-1, timeout=3600):
|
||||||
|
if uwsgi is None:
|
||||||
|
raise RuntimeError("uWSGI not available")
|
||||||
|
|
||||||
|
super(uWSGICache, self).__init__(threshold, timeout)
|
||||||
|
|
||||||
|
def _get(self, ns, key):
|
||||||
|
if not uwsgi.cache_exists(key, ns):
|
||||||
|
raise KeyError
|
||||||
|
return uwsgi.cache_get(key, ns)
|
||||||
|
|
||||||
|
def _delete(self, ns, key):
|
||||||
|
uwsgi.cache_del(key, ns)
|
||||||
|
|
||||||
|
def _set(self, ns, key, value):
|
||||||
|
uwsgi.cache_set(key, value, self.timeout, ns)
|
@ -14,11 +14,6 @@ from isso.compat import text_type as str
|
|||||||
logger = logging.getLogger("isso")
|
logger = logging.getLogger("isso")
|
||||||
|
|
||||||
|
|
||||||
# Python 2.6 compatibility
|
|
||||||
def total_seconds(td):
|
|
||||||
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6
|
|
||||||
|
|
||||||
|
|
||||||
def timedelta(string):
|
def timedelta(string):
|
||||||
"""
|
"""
|
||||||
Parse :param string: into :class:`datetime.timedelta`, you can use any
|
Parse :param string: into :class:`datetime.timedelta`, you can use any
|
||||||
@ -95,10 +90,7 @@ class IssoParser(ConfigParser):
|
|||||||
except ValueError:
|
except ValueError:
|
||||||
return super(IssoParser, self).getint(section, key)
|
return super(IssoParser, self).getint(section, key)
|
||||||
else:
|
else:
|
||||||
try:
|
return int(delta.total_seconds())
|
||||||
return int(delta.total_seconds())
|
|
||||||
except AttributeError:
|
|
||||||
return int(total_seconds(delta))
|
|
||||||
|
|
||||||
def getlist(self, section, key):
|
def getlist(self, section, key):
|
||||||
return list(map(str.strip, self.get(section, key).split(',')))
|
return list(map(str.strip, self.get(section, key).split(',')))
|
||||||
|
0
isso/controllers/__init__.py
Normal file
0
isso/controllers/__init__.py
Normal file
289
isso/controllers/comments.py
Normal file
289
isso/controllers/comments.py
Normal file
@ -0,0 +1,289 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
|
||||||
|
from sqlalchemy.sql import func, select, not_
|
||||||
|
|
||||||
|
from isso.spam import Guard
|
||||||
|
from isso.utils import Bloomfilter
|
||||||
|
from isso.models import Comment
|
||||||
|
|
||||||
|
from isso.compat import string_types, buffer
|
||||||
|
|
||||||
|
|
||||||
|
class Invalid(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Denied(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Validator(object):
|
||||||
|
|
||||||
|
# from Django appearently, looks good to me *duck*
|
||||||
|
__url_re = re.compile(
|
||||||
|
r'^'
|
||||||
|
r'(https?://)?'
|
||||||
|
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
|
||||||
|
r'localhost|' # localhost...
|
||||||
|
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
|
||||||
|
r'(?::\d+)?' # optional port
|
||||||
|
r'(?:/?|[/?]\S+)'
|
||||||
|
r'$', re.IGNORECASE)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def isurl(cls, text):
|
||||||
|
return Validator.__url_re.match(text) is not None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def verify(cls, comment):
|
||||||
|
|
||||||
|
if not isinstance(comment["parent"], (int, type(None))):
|
||||||
|
return False, "parent must be an integer or null"
|
||||||
|
|
||||||
|
if not isinstance(comment["text"], string_types):
|
||||||
|
return False, "text must be a string"
|
||||||
|
|
||||||
|
if len(comment["text"].rstrip()) < 3:
|
||||||
|
return False, "text is too short (minimum length: 3)"
|
||||||
|
|
||||||
|
for key in ("author", "email", "website"):
|
||||||
|
if not isinstance(comment[key], (string_types, type(None))):
|
||||||
|
return False, "%s must be a string or null" % key
|
||||||
|
|
||||||
|
if len(comment["email"] or "") > 254:
|
||||||
|
return False, "http://tools.ietf.org/html/rfc5321#section-4.5.3"
|
||||||
|
|
||||||
|
if comment["website"]:
|
||||||
|
if len(comment["website"]) > 254:
|
||||||
|
return False, "arbitrary length limit"
|
||||||
|
if not Validator.isurl(comment["website"]):
|
||||||
|
return False, "Website not Django-conform"
|
||||||
|
|
||||||
|
return True, ""
|
||||||
|
|
||||||
|
|
||||||
|
class Controller(object):
|
||||||
|
|
||||||
|
def __init__(self, db, guard=None):
|
||||||
|
if guard is None:
|
||||||
|
guard = Guard(db, enabled=False)
|
||||||
|
|
||||||
|
self.db = db
|
||||||
|
self.guard = guard
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def Comment(cls, rv):
|
||||||
|
return Comment(
|
||||||
|
id=rv[0], parent=rv[1], thread=rv[2], created=rv[3], modified=rv[4],
|
||||||
|
mode=rv[5], remote_addr=rv[6], text=rv[7], author=rv[8], email=rv[9],
|
||||||
|
website=rv[10], likes=rv[11], dislikes=rv[12], voters=Bloomfilter(bytearray(rv[13])))
|
||||||
|
|
||||||
|
def new(self, remote_addr, thread, obj, moderated=False):
|
||||||
|
obj.setdefault("text", "")
|
||||||
|
obj.setdefault("parent", None)
|
||||||
|
for field in ("email", "author", "website"):
|
||||||
|
obj.setdefault(field, None)
|
||||||
|
|
||||||
|
valid, reason = Validator.verify(obj)
|
||||||
|
if not valid:
|
||||||
|
raise Invalid(reason)
|
||||||
|
|
||||||
|
if self.guard.enabled:
|
||||||
|
valid, reason = self.guard.validate(remote_addr, thread, obj)
|
||||||
|
if not valid:
|
||||||
|
raise Denied(reason)
|
||||||
|
|
||||||
|
obj["id"] = None
|
||||||
|
obj["thread"] = thread.id
|
||||||
|
obj["mode"] = 2 if moderated else 1
|
||||||
|
obj["created"] = time.time()
|
||||||
|
obj["modified"] = None
|
||||||
|
obj["remote_addr"] = remote_addr
|
||||||
|
|
||||||
|
obj["likes"] = obj["dislikes"] = 0
|
||||||
|
obj["voters"] = Bloomfilter(iterable=[remote_addr])
|
||||||
|
|
||||||
|
if obj["parent"] is not None:
|
||||||
|
parent = self.get(obj["parent"])
|
||||||
|
if parent is None:
|
||||||
|
obj["parent"] = None
|
||||||
|
elif parent.parent: # normalize to max depth of 1
|
||||||
|
obj["parent"] = parent.parent
|
||||||
|
|
||||||
|
obj = Comment(**obj)
|
||||||
|
_id = self.db.engine.execute(self.db.comments.insert()
|
||||||
|
.values((obj.id, obj.parent, obj.thread, obj.created, obj.modified,
|
||||||
|
obj.mode, obj.remote_addr, obj.text, obj.author, obj.email,
|
||||||
|
obj.website, obj.likes, obj.dislikes, buffer(obj.voters.array)))
|
||||||
|
).inserted_primary_key[0]
|
||||||
|
|
||||||
|
return obj.new(id=_id)
|
||||||
|
|
||||||
|
def edit(self, _id, new):
|
||||||
|
obj = self.get(_id)
|
||||||
|
if not obj:
|
||||||
|
return None
|
||||||
|
|
||||||
|
new.setdefault("text", "")
|
||||||
|
new.setdefault("parent", None)
|
||||||
|
for field in ("email", "author", "website"):
|
||||||
|
new.setdefault(field, None)
|
||||||
|
|
||||||
|
valid, reason = Validator.verify(new)
|
||||||
|
if not valid:
|
||||||
|
raise Invalid(reason)
|
||||||
|
|
||||||
|
obj = obj.new(text=new["text"], author=new["author"], email=new["email"],
|
||||||
|
website=new["website"], modified=time.time())
|
||||||
|
self.db.engine.execute(self.db.comments.update()
|
||||||
|
.values(text=obj.text, author=obj.author, email=obj.email,
|
||||||
|
website=obj.website, modified=obj.modified)
|
||||||
|
.where(self.db.comments.c.id == _id))
|
||||||
|
|
||||||
|
return obj
|
||||||
|
|
||||||
|
def get(self, _id):
|
||||||
|
"""Retrieve comment with :param id: if any.
|
||||||
|
"""
|
||||||
|
rv = self.db.engine.execute(
|
||||||
|
self.db.comments.select(self.db.comments.c.id == _id)).fetchone()
|
||||||
|
|
||||||
|
if not rv:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return Controller.Comment(rv)
|
||||||
|
|
||||||
|
def all(self, thread, mode=1, after=0, parent='any', order_by='id', limit=None):
|
||||||
|
stmt = (self.db.comments.select()
|
||||||
|
.where(self.db.comments.c.thread == thread.id)
|
||||||
|
.where(self.db.comments.c.mode.op("|")(mode) == self.db.comments.c.mode)
|
||||||
|
.where(self.db.comments.c.created > after))
|
||||||
|
|
||||||
|
if parent != 'any':
|
||||||
|
stmt = stmt.where(self.db.comments.c.parent == parent)
|
||||||
|
|
||||||
|
stmt.order_by(getattr(self.db.comments.c, order_by))
|
||||||
|
|
||||||
|
if limit:
|
||||||
|
stmt.limit(limit)
|
||||||
|
|
||||||
|
return list(map(Controller.Comment, self.db.engine.execute(stmt).fetchall()))
|
||||||
|
|
||||||
|
def vote(self, remote_addr, _id, like):
|
||||||
|
"""Vote with +1 or -1 on comment :param id:
|
||||||
|
|
||||||
|
Returns True on success (in either direction), False if :param
|
||||||
|
remote_addr: has already voted. A comment can not be voted by its
|
||||||
|
original author.
|
||||||
|
"""
|
||||||
|
obj = self.get(_id)
|
||||||
|
if obj is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if remote_addr in obj.voters:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if like:
|
||||||
|
obj = obj.new(likes=obj.likes + 1)
|
||||||
|
else:
|
||||||
|
obj = obj.new(dislikes=obj.dislikes + 1)
|
||||||
|
|
||||||
|
obj.voters.add(remote_addr)
|
||||||
|
self.db.engine.execute(self.db.comments.update()
|
||||||
|
.values(likes=obj.likes, dislikes=obj.dislikes,
|
||||||
|
voters=buffer(obj.voters.array))
|
||||||
|
.where(self.db.comments.c.id == _id))
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def like(self, remote_addr, _id):
|
||||||
|
return self.vote(remote_addr, _id, like=True)
|
||||||
|
|
||||||
|
def dislike(self, remote_addr, _id):
|
||||||
|
return self.vote(remote_addr, _id, like=False)
|
||||||
|
|
||||||
|
def delete(self, _id):
|
||||||
|
"""
|
||||||
|
Delete comment with :param id:
|
||||||
|
|
||||||
|
If the comment is referenced by another (not yet deleted) comment, the
|
||||||
|
comment is *not* removed, but cleared and flagged as deleted.
|
||||||
|
"""
|
||||||
|
refs = self.db.engine.execute(
|
||||||
|
self.db.comments.select(self.db.comments.c.id).where(
|
||||||
|
self.db.comments.c.parent == _id)).fetchone()
|
||||||
|
|
||||||
|
if refs is None:
|
||||||
|
self.db.engine.execute(
|
||||||
|
self.db.comments.delete(self.db.comments.c.id == _id))
|
||||||
|
self.db.engine.execute(
|
||||||
|
self.db.comments.delete()
|
||||||
|
.where(self.db.comments.c.mode.op("|")(4) == self.db.comments.c.mode)
|
||||||
|
.where(not_(self.db.comments.c.id.in_(
|
||||||
|
select([self.db.comments.c.parent]).where(
|
||||||
|
self.db.comments.c.parent != None)))))
|
||||||
|
return None
|
||||||
|
|
||||||
|
obj = self.get(_id)
|
||||||
|
obj = obj.new(text="", author=None, email=None, website=None, mode=4)
|
||||||
|
self.db.engine.execute(self.db.comments.update()
|
||||||
|
.values(text=obj.text, author=obj.email, website=obj.website, mode=obj.mode)
|
||||||
|
.where(self.db.comments.c.id == _id))
|
||||||
|
|
||||||
|
return obj
|
||||||
|
|
||||||
|
def count(self, *threads):
|
||||||
|
"""Retrieve comment count for :param threads:
|
||||||
|
"""
|
||||||
|
|
||||||
|
ids = [getattr(th, "id", None) for th in threads]
|
||||||
|
|
||||||
|
threads = dict(
|
||||||
|
self.db.engine.execute(
|
||||||
|
select([self.db.comments.c.thread, func.count(self.db.comments)])
|
||||||
|
.where(self.db.comments.c.thread.in_(ids))
|
||||||
|
.group_by(self.db.comments.c.thread)).fetchall())
|
||||||
|
|
||||||
|
return [threads.get(_id, 0) for _id in ids]
|
||||||
|
|
||||||
|
def reply_count(self, thread, mode=5, after=0):
|
||||||
|
|
||||||
|
rv = self.db.engine.execute(
|
||||||
|
select([self.db.comments.c.parent, func.count(self.db.comments)])
|
||||||
|
.where(self.db.comments.c.thread == thread.id)
|
||||||
|
# .where(self.db.comments.c.mode.op("|")(mode) == mode)
|
||||||
|
.where(self.db.comments.c.created)
|
||||||
|
.group_by(self.db.comments.c.parent)).fetchall()
|
||||||
|
|
||||||
|
return dict(rv)
|
||||||
|
|
||||||
|
def activate(self, _id):
|
||||||
|
"""Activate comment :param id: and return True on success.
|
||||||
|
"""
|
||||||
|
obj = self.get(_id)
|
||||||
|
if obj is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
i = self.db.engine.execute(self.db.comments.update()
|
||||||
|
.values(mode=1)
|
||||||
|
.where(self.db.comments.c.id == _id)
|
||||||
|
.where(self.db.comments.c.mode == 2)).rowcount
|
||||||
|
|
||||||
|
return i > 0
|
||||||
|
|
||||||
|
def prune(self, delta):
|
||||||
|
"""Remove comments still in moderation queue older than max-age.
|
||||||
|
"""
|
||||||
|
now = time.time()
|
||||||
|
|
||||||
|
self.db.engine.execute(
|
||||||
|
self.db.comments.delete()
|
||||||
|
.where(self.db.comments.c.mode == 2)
|
||||||
|
.where(now - self.db.comments.c.created > delta))
|
||||||
|
|
||||||
|
return
|
36
isso/controllers/threads.py
Normal file
36
isso/controllers/threads.py
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from isso.models import Thread
|
||||||
|
|
||||||
|
|
||||||
|
class Controller(object):
|
||||||
|
|
||||||
|
def __init__(self, db):
|
||||||
|
self.db = db
|
||||||
|
|
||||||
|
def new(self, uri, title=None):
|
||||||
|
_id = self.db.engine.execute(
|
||||||
|
self.db.threads.insert().values(uri=uri, title=title)
|
||||||
|
).inserted_primary_key[0]
|
||||||
|
|
||||||
|
return Thread(_id, uri, title)
|
||||||
|
|
||||||
|
def get(self, uri):
|
||||||
|
rv = self.db.engine.execute(
|
||||||
|
self.db.threads.select(self.db.threads.c.uri == uri)).fetchone()
|
||||||
|
|
||||||
|
if not rv:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return Thread(*rv)
|
||||||
|
|
||||||
|
def delete(self, uri):
|
||||||
|
thread = self.get(uri)
|
||||||
|
|
||||||
|
self.db.engine.execute(
|
||||||
|
self.db.comments.delete().where(self.db.comments.c.thread == thread.id))
|
||||||
|
|
||||||
|
self.db.engine.execute(
|
||||||
|
self.db.threads.delete().where(self.db.threads.c.id == thread.id))
|
130
isso/core.py
130
isso/core.py
@ -1,130 +0,0 @@
|
|||||||
# -*- encoding: utf-8 -*-
|
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import time
|
|
||||||
import logging
|
|
||||||
import threading
|
|
||||||
import multiprocessing
|
|
||||||
|
|
||||||
try:
|
|
||||||
import uwsgi
|
|
||||||
except ImportError:
|
|
||||||
uwsgi = None
|
|
||||||
|
|
||||||
from isso.compat import PY2K
|
|
||||||
|
|
||||||
if PY2K:
|
|
||||||
import thread
|
|
||||||
else:
|
|
||||||
import _thread as thread
|
|
||||||
|
|
||||||
from werkzeug.contrib.cache import NullCache, SimpleCache
|
|
||||||
|
|
||||||
logger = logging.getLogger("isso")
|
|
||||||
|
|
||||||
|
|
||||||
class Cache:
|
|
||||||
"""Wrapper around werkzeug's cache class, to make it compatible to
|
|
||||||
uWSGI's cache framework.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, cache):
|
|
||||||
self.cache = cache
|
|
||||||
|
|
||||||
def get(self, cache, key):
|
|
||||||
return self.cache.get(key)
|
|
||||||
|
|
||||||
def set(self, cache, key, value):
|
|
||||||
return self.cache.set(key, value)
|
|
||||||
|
|
||||||
def delete(self, cache, key):
|
|
||||||
return self.cache.delete(key)
|
|
||||||
|
|
||||||
|
|
||||||
class Mixin(object):
|
|
||||||
|
|
||||||
def __init__(self, conf):
|
|
||||||
self.lock = threading.Lock()
|
|
||||||
self.cache = Cache(NullCache())
|
|
||||||
|
|
||||||
def notify(self, subject, body, retries=5):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def threaded(func):
|
|
||||||
"""
|
|
||||||
Decorator to execute each :param func: call in a separate thread.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def dec(self, *args, **kwargs):
|
|
||||||
thread.start_new_thread(func, (self, ) + args, kwargs)
|
|
||||||
|
|
||||||
return dec
|
|
||||||
|
|
||||||
|
|
||||||
class ThreadedMixin(Mixin):
|
|
||||||
|
|
||||||
def __init__(self, conf):
|
|
||||||
|
|
||||||
super(ThreadedMixin, self).__init__(conf)
|
|
||||||
|
|
||||||
if conf.getboolean("moderation", "enabled"):
|
|
||||||
self.purge(conf.getint("moderation", "purge-after"))
|
|
||||||
|
|
||||||
self.cache = Cache(SimpleCache(threshold=1024, default_timeout=3600))
|
|
||||||
|
|
||||||
@threaded
|
|
||||||
def purge(self, delta):
|
|
||||||
while True:
|
|
||||||
with self.lock:
|
|
||||||
self.db.comments.purge(delta)
|
|
||||||
time.sleep(delta)
|
|
||||||
|
|
||||||
|
|
||||||
class ProcessMixin(ThreadedMixin):
|
|
||||||
|
|
||||||
def __init__(self, conf):
|
|
||||||
|
|
||||||
super(ProcessMixin, self).__init__(conf)
|
|
||||||
self.lock = multiprocessing.Lock()
|
|
||||||
|
|
||||||
|
|
||||||
class uWSGICache(object):
|
|
||||||
"""Uses uWSGI Caching Framework. INI configuration:
|
|
||||||
|
|
||||||
.. code-block:: ini
|
|
||||||
|
|
||||||
cache2 = name=hash,items=1024,blocksize=32
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get(self, cache, key):
|
|
||||||
return uwsgi.cache_get(key, cache)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def set(self, cache, key, value):
|
|
||||||
uwsgi.cache_set(key, value, 3600, cache)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def delete(self, cache, key):
|
|
||||||
uwsgi.cache_del(key, cache)
|
|
||||||
|
|
||||||
|
|
||||||
class uWSGIMixin(Mixin):
|
|
||||||
|
|
||||||
def __init__(self, conf):
|
|
||||||
|
|
||||||
super(uWSGIMixin, self).__init__(conf)
|
|
||||||
|
|
||||||
self.lock = multiprocessing.Lock()
|
|
||||||
self.cache = uWSGICache
|
|
||||||
|
|
||||||
timedelta = conf.getint("moderation", "purge-after")
|
|
||||||
purge = lambda signum: self.db.comments.purge(timedelta)
|
|
||||||
uwsgi.register_signal(1, "", purge)
|
|
||||||
uwsgi.add_timer(1, timedelta)
|
|
||||||
|
|
||||||
# run purge once
|
|
||||||
purge(1)
|
|
@ -1,21 +1,164 @@
|
|||||||
# -*- encoding: utf-8 -*-
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
import sqlite3
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
import sqlite3
|
||||||
|
import binascii
|
||||||
import operator
|
import operator
|
||||||
|
import threading
|
||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from sqlalchemy import Table, Column, MetaData, create_engine
|
||||||
|
from sqlalchemy import ForeignKey, Integer, Float, String, LargeBinary
|
||||||
|
from sqlalchemy.sql import select
|
||||||
|
|
||||||
logger = logging.getLogger("isso")
|
logger = logging.getLogger("isso")
|
||||||
|
|
||||||
from isso.db.comments import Comments
|
|
||||||
from isso.db.threads import Threads
|
class Adapter(object):
|
||||||
from isso.db.spam import Guard
|
|
||||||
from isso.db.preferences import Preferences
|
def __init__(self, db):
|
||||||
|
self.engine = create_engine(db, echo=False)
|
||||||
|
self.metadata = MetaData()
|
||||||
|
|
||||||
|
self.comments = Table("comments", self.metadata,
|
||||||
|
Column("id", Integer, primary_key=True),
|
||||||
|
Column("parent", Integer),
|
||||||
|
Column("thread", None, ForeignKey("threads.id")),
|
||||||
|
Column("created", Float),
|
||||||
|
Column("modified", Float),
|
||||||
|
Column("mode", Integer),
|
||||||
|
Column("remote_addr", String(48)), # XXX use a BigInt
|
||||||
|
Column("text", String(65535)),
|
||||||
|
Column("author", String(255)),
|
||||||
|
Column("email", String(255)),
|
||||||
|
Column("website", String(255)),
|
||||||
|
Column("likes", Integer),
|
||||||
|
Column("dislikes", Integer),
|
||||||
|
Column("voters", LargeBinary(256)))
|
||||||
|
|
||||||
|
self.threads = Table("threads", self.metadata,
|
||||||
|
Column("id", Integer, primary_key=True),
|
||||||
|
Column("uri", String(255), unique=True),
|
||||||
|
Column("title", String(255)))
|
||||||
|
|
||||||
|
preferences = Table("preferences", self.metadata,
|
||||||
|
Column("key", String(255), primary_key=True),
|
||||||
|
Column("value", String(255)))
|
||||||
|
|
||||||
|
self.metadata.create_all(self.engine)
|
||||||
|
self.preferences = Preferences(self.engine, preferences)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def transaction(self):
|
||||||
|
return self.engine.begin()
|
||||||
|
|
||||||
|
|
||||||
class SQLite3:
|
class Preferences(object):
|
||||||
|
"""A simple key-value store using SQL.
|
||||||
|
"""
|
||||||
|
|
||||||
|
defaults = [
|
||||||
|
("session-key", binascii.b2a_hex(os.urandom(24))),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(self, engine, preferences):
|
||||||
|
self.engine = engine
|
||||||
|
self.preferences = preferences
|
||||||
|
|
||||||
|
for (key, value) in Preferences.defaults:
|
||||||
|
if self.get(key) is None:
|
||||||
|
self.set(key, value)
|
||||||
|
|
||||||
|
def get(self, key, default=None):
|
||||||
|
rv = self.engine.execute(
|
||||||
|
select([self.preferences.c.value])
|
||||||
|
.where(self.preferences.c.key == key)).fetchone()
|
||||||
|
|
||||||
|
if rv is None:
|
||||||
|
return default
|
||||||
|
|
||||||
|
return rv[0]
|
||||||
|
|
||||||
|
def set(self, key, value):
|
||||||
|
self.engine.execute(
|
||||||
|
self.preferences.insert().values(
|
||||||
|
key=key, value=value))
|
||||||
|
|
||||||
|
|
||||||
|
class Transaction(object):
|
||||||
|
"""A context manager to lock the database across processes and automatic
|
||||||
|
rollback on failure. On success, reset the isolation level back to normal.
|
||||||
|
|
||||||
|
SQLite3's DEFERRED (default) transaction mode causes database corruption
|
||||||
|
for concurrent writes to the database from multiple processes. IMMEDIATE
|
||||||
|
ensures a global write lock, but reading is still possible.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, con):
|
||||||
|
self.con = con
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
self._orig = self.con.isolation_level
|
||||||
|
self.con.isolation_level = "IMMEDIATE"
|
||||||
|
self.con.execute("BEGIN IMMEDIATE")
|
||||||
|
return self.con
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
try:
|
||||||
|
if exc_type:
|
||||||
|
self.con.rollback()
|
||||||
|
else:
|
||||||
|
self.con.commit()
|
||||||
|
finally:
|
||||||
|
self.con.isolation_level = self._orig
|
||||||
|
|
||||||
|
|
||||||
|
class SQLite3(object):
|
||||||
|
"""SQLite3 connection pool across multiple threads. Implementation idea
|
||||||
|
from `Peewee <https://github.com/coleifer/peewee>`_.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, db):
|
||||||
|
self.db = os.path.expanduser(db)
|
||||||
|
self.lock = threading.Lock()
|
||||||
|
self.local = threading.local()
|
||||||
|
|
||||||
|
def connect(self):
|
||||||
|
with self.lock:
|
||||||
|
self.local.conn = sqlite3.connect(self.db, isolation_level=None)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
with self.lock:
|
||||||
|
self.local.conn.close()
|
||||||
|
self.local.conn = None
|
||||||
|
|
||||||
|
def execute(self, sql, args=()):
|
||||||
|
if isinstance(sql, (list, tuple)):
|
||||||
|
sql = ' '.join(sql)
|
||||||
|
|
||||||
|
return self.connection.execute(sql, args)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def connection(self):
|
||||||
|
if not hasattr(self.local, 'conn') or self.local.conn is None:
|
||||||
|
self.connect()
|
||||||
|
return self.local.conn
|
||||||
|
|
||||||
|
@property
|
||||||
|
def transaction(self):
|
||||||
|
return Transaction(self.connection)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def total_changes(self):
|
||||||
|
return self.connection.total_changes
|
||||||
|
|
||||||
|
|
||||||
|
class Foo(object):
|
||||||
"""DB-dependend wrapper around SQLite3.
|
"""DB-dependend wrapper around SQLite3.
|
||||||
|
|
||||||
Runs migration if `user_version` is older than `MAX_VERSION` and register
|
Runs migration if `user_version` is older than `MAX_VERSION` and register
|
||||||
@ -24,9 +167,8 @@ class SQLite3:
|
|||||||
|
|
||||||
MAX_VERSION = 3
|
MAX_VERSION = 3
|
||||||
|
|
||||||
def __init__(self, path, conf):
|
def __init__(self, conn, conf):
|
||||||
|
self.connection = conn
|
||||||
self.path = os.path.expanduser(path)
|
|
||||||
self.conf = conf
|
self.conf = conf
|
||||||
|
|
||||||
rv = self.execute([
|
rv = self.execute([
|
||||||
@ -40,9 +182,9 @@ class SQLite3:
|
|||||||
self.guard = Guard(self)
|
self.guard = Guard(self)
|
||||||
|
|
||||||
if rv is None:
|
if rv is None:
|
||||||
self.execute("PRAGMA user_version = %i" % SQLite3.MAX_VERSION)
|
self.execute("PRAGMA user_version = %i" % Adapter.MAX_VERSION)
|
||||||
else:
|
else:
|
||||||
self.migrate(to=SQLite3.MAX_VERSION)
|
self.migrate(to=Adapter.MAX_VERSION)
|
||||||
|
|
||||||
self.execute([
|
self.execute([
|
||||||
'CREATE TRIGGER IF NOT EXISTS remove_stale_threads',
|
'CREATE TRIGGER IF NOT EXISTS remove_stale_threads',
|
||||||
@ -51,14 +193,6 @@ class SQLite3:
|
|||||||
' DELETE FROM threads WHERE id NOT IN (SELECT tid FROM comments);',
|
' DELETE FROM threads WHERE id NOT IN (SELECT tid FROM comments);',
|
||||||
'END'])
|
'END'])
|
||||||
|
|
||||||
def execute(self, sql, args=()):
|
|
||||||
|
|
||||||
if isinstance(sql, (list, tuple)):
|
|
||||||
sql = ' '.join(sql)
|
|
||||||
|
|
||||||
with sqlite3.connect(self.path) as con:
|
|
||||||
return con.execute(sql, args)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self):
|
def version(self):
|
||||||
return self.execute("PRAGMA user_version").fetchone()[0]
|
return self.execute("PRAGMA user_version").fetchone()[0]
|
||||||
@ -77,7 +211,7 @@ class SQLite3:
|
|||||||
from isso.utils import Bloomfilter
|
from isso.utils import Bloomfilter
|
||||||
bf = buffer(Bloomfilter(iterable=["127.0.0.0"]).array)
|
bf = buffer(Bloomfilter(iterable=["127.0.0.0"]).array)
|
||||||
|
|
||||||
with sqlite3.connect(self.path) as con:
|
with self.connection.transaction as con:
|
||||||
con.execute('UPDATE comments SET voters=?', (bf, ))
|
con.execute('UPDATE comments SET voters=?', (bf, ))
|
||||||
con.execute('PRAGMA user_version = 1')
|
con.execute('PRAGMA user_version = 1')
|
||||||
logger.info("%i rows changed", con.total_changes)
|
logger.info("%i rows changed", con.total_changes)
|
||||||
@ -85,7 +219,7 @@ class SQLite3:
|
|||||||
# move [general] session-key to database
|
# move [general] session-key to database
|
||||||
if self.version == 1:
|
if self.version == 1:
|
||||||
|
|
||||||
with sqlite3.connect(self.path) as con:
|
with self.connection.transaction as con:
|
||||||
if self.conf.has_option("general", "session-key"):
|
if self.conf.has_option("general", "session-key"):
|
||||||
con.execute('UPDATE preferences SET value=? WHERE key=?', (
|
con.execute('UPDATE preferences SET value=? WHERE key=?', (
|
||||||
self.conf.get("general", "session-key"), "session-key"))
|
self.conf.get("general", "session-key"), "session-key"))
|
||||||
@ -98,7 +232,7 @@ class SQLite3:
|
|||||||
|
|
||||||
first = lambda rv: list(map(operator.itemgetter(0), rv))
|
first = lambda rv: list(map(operator.itemgetter(0), rv))
|
||||||
|
|
||||||
with sqlite3.connect(self.path) as con:
|
with self.connection.transaction as con:
|
||||||
top = first(con.execute("SELECT id FROM comments WHERE parent IS NULL").fetchall())
|
top = first(con.execute("SELECT id FROM comments WHERE parent IS NULL").fetchall())
|
||||||
flattened = defaultdict(set)
|
flattened = defaultdict(set)
|
||||||
|
|
||||||
@ -117,3 +251,6 @@ class SQLite3:
|
|||||||
|
|
||||||
con.execute('PRAGMA user_version = 3')
|
con.execute('PRAGMA user_version = 3')
|
||||||
logger.info("%i rows changed", con.total_changes)
|
logger.info("%i rows changed", con.total_changes)
|
||||||
|
|
||||||
|
def execute(self, sql, args=()):
|
||||||
|
return self.connection.execute(sql, args)
|
||||||
|
@ -1,234 +0,0 @@
|
|||||||
# -*- encoding: utf-8 -*-
|
|
||||||
|
|
||||||
import time
|
|
||||||
|
|
||||||
from isso.utils import Bloomfilter
|
|
||||||
from isso.compat import buffer
|
|
||||||
|
|
||||||
|
|
||||||
class Comments:
|
|
||||||
"""Hopefully DB-independend SQL to store, modify and retrieve all
|
|
||||||
comment-related actions. Here's a short scheme overview:
|
|
||||||
|
|
||||||
| tid (thread id) | id (comment id) | parent | ... | voters | remote_addr |
|
|
||||||
+-----------------+-----------------+--------+-----+--------+-------------+
|
|
||||||
| 1 | 1 | null | ... | BLOB | 127.0.0.0 |
|
|
||||||
| 1 | 2 | 1 | ... | BLOB | 127.0.0.0 |
|
|
||||||
+-----------------+-----------------+--------+-----+--------+-------------+
|
|
||||||
|
|
||||||
The tuple (tid, id) is unique and thus primary key.
|
|
||||||
"""
|
|
||||||
|
|
||||||
fields = ['tid', 'id', 'parent', 'created', 'modified', 'mode', 'remote_addr',
|
|
||||||
'text', 'author', 'email', 'website', 'likes', 'dislikes', 'voters']
|
|
||||||
|
|
||||||
def __init__(self, db):
|
|
||||||
|
|
||||||
self.db = db
|
|
||||||
self.db.execute([
|
|
||||||
'CREATE TABLE IF NOT EXISTS comments (',
|
|
||||||
' tid REFERENCES threads(id), id INTEGER PRIMARY KEY, parent INTEGER,',
|
|
||||||
' created FLOAT NOT NULL, modified FLOAT, mode INTEGER, remote_addr VARCHAR,',
|
|
||||||
' text VARCHAR, author VARCHAR, email VARCHAR, website VARCHAR,',
|
|
||||||
' likes INTEGER DEFAULT 0, dislikes INTEGER DEFAULT 0, voters BLOB NOT NULL);'])
|
|
||||||
|
|
||||||
def add(self, uri, c):
|
|
||||||
"""
|
|
||||||
Add new comment to DB and return a mapping of :attribute:`fields` and
|
|
||||||
database values.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if c.get("parent") is not None:
|
|
||||||
ref = self.get(c["parent"])
|
|
||||||
if ref.get("parent") is not None:
|
|
||||||
c["parent"] = ref["parent"]
|
|
||||||
|
|
||||||
self.db.execute([
|
|
||||||
'INSERT INTO comments (',
|
|
||||||
' tid, parent,'
|
|
||||||
' created, modified, mode, remote_addr,',
|
|
||||||
' text, author, email, website, voters )',
|
|
||||||
'SELECT',
|
|
||||||
' threads.id, ?,',
|
|
||||||
' ?, ?, ?, ?,',
|
|
||||||
' ?, ?, ?, ?, ?',
|
|
||||||
'FROM threads WHERE threads.uri = ?;'], (
|
|
||||||
c.get('parent'),
|
|
||||||
c.get('created') or time.time(), None, c["mode"], c['remote_addr'],
|
|
||||||
c['text'], c.get('author'), c.get('email'), c.get('website'), buffer(
|
|
||||||
Bloomfilter(iterable=[c['remote_addr']]).array),
|
|
||||||
uri)
|
|
||||||
)
|
|
||||||
|
|
||||||
return dict(zip(Comments.fields, self.db.execute(
|
|
||||||
'SELECT *, MAX(c.id) FROM comments AS c INNER JOIN threads ON threads.uri = ?',
|
|
||||||
(uri, )).fetchone()))
|
|
||||||
|
|
||||||
def activate(self, id):
|
|
||||||
"""
|
|
||||||
Activate comment id if pending.
|
|
||||||
"""
|
|
||||||
self.db.execute([
|
|
||||||
'UPDATE comments SET',
|
|
||||||
' mode=1',
|
|
||||||
'WHERE id=? AND mode=2'], (id, ))
|
|
||||||
|
|
||||||
def update(self, id, data):
|
|
||||||
"""
|
|
||||||
Update comment :param:`id` with values from :param:`data` and return
|
|
||||||
updated comment.
|
|
||||||
"""
|
|
||||||
self.db.execute([
|
|
||||||
'UPDATE comments SET',
|
|
||||||
','.join(key + '=' + '?' for key in data),
|
|
||||||
'WHERE id=?;'],
|
|
||||||
list(data.values()) + [id])
|
|
||||||
|
|
||||||
return self.get(id)
|
|
||||||
|
|
||||||
def get(self, id):
|
|
||||||
"""
|
|
||||||
Search for comment :param:`id` and return a mapping of :attr:`fields`
|
|
||||||
and values.
|
|
||||||
"""
|
|
||||||
rv = self.db.execute('SELECT * FROM comments WHERE id=?', (id, )).fetchone()
|
|
||||||
if rv:
|
|
||||||
return dict(zip(Comments.fields, rv))
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def fetch(self, uri, mode=5, after=0, parent='any', order_by='id', limit=None):
|
|
||||||
"""
|
|
||||||
Return comments for :param:`uri` with :param:`mode`.
|
|
||||||
"""
|
|
||||||
sql = [ 'SELECT comments.* FROM comments INNER JOIN threads ON',
|
|
||||||
' threads.uri=? AND comments.tid=threads.id AND (? | comments.mode) = ?',
|
|
||||||
' AND comments.created>?']
|
|
||||||
|
|
||||||
sql_args = [uri, mode, mode, after]
|
|
||||||
|
|
||||||
if parent != 'any':
|
|
||||||
if parent is None:
|
|
||||||
sql.append('AND comments.parent IS NULL')
|
|
||||||
else:
|
|
||||||
sql.append('AND comments.parent=?')
|
|
||||||
sql_args.append(parent)
|
|
||||||
|
|
||||||
sql.append('ORDER BY ? ASC')
|
|
||||||
sql_args.append(order_by)
|
|
||||||
|
|
||||||
if limit:
|
|
||||||
sql.append('LIMIT ?')
|
|
||||||
sql_args.append(limit)
|
|
||||||
|
|
||||||
rv = self.db.execute(sql, sql_args).fetchall()
|
|
||||||
for item in rv:
|
|
||||||
yield dict(zip(Comments.fields, item))
|
|
||||||
|
|
||||||
def _remove_stale(self):
|
|
||||||
|
|
||||||
sql = ('DELETE FROM',
|
|
||||||
' comments',
|
|
||||||
'WHERE',
|
|
||||||
' mode=4 AND id NOT IN (',
|
|
||||||
' SELECT',
|
|
||||||
' parent',
|
|
||||||
' FROM',
|
|
||||||
' comments',
|
|
||||||
' WHERE parent IS NOT NULL)')
|
|
||||||
|
|
||||||
while self.db.execute(sql).rowcount:
|
|
||||||
continue
|
|
||||||
|
|
||||||
def delete(self, id):
|
|
||||||
"""
|
|
||||||
Delete a comment. There are two distinctions: a comment is referenced
|
|
||||||
by another valid comment's parent attribute or stand-a-lone. In this
|
|
||||||
case the comment can't be removed without losing depending comments.
|
|
||||||
Hence, delete removes all visible data such as text, author, email,
|
|
||||||
website sets the mode field to 4.
|
|
||||||
|
|
||||||
In the second case this comment can be safely removed without any side
|
|
||||||
effects."""
|
|
||||||
|
|
||||||
refs = self.db.execute('SELECT * FROM comments WHERE parent=?', (id, )).fetchone()
|
|
||||||
|
|
||||||
if refs is None:
|
|
||||||
self.db.execute('DELETE FROM comments WHERE id=?', (id, ))
|
|
||||||
self._remove_stale()
|
|
||||||
return None
|
|
||||||
|
|
||||||
self.db.execute('UPDATE comments SET text=? WHERE id=?', ('', id))
|
|
||||||
self.db.execute('UPDATE comments SET mode=? WHERE id=?', (4, id))
|
|
||||||
for field in ('author', 'website'):
|
|
||||||
self.db.execute('UPDATE comments SET %s=? WHERE id=?' % field, (None, id))
|
|
||||||
|
|
||||||
self._remove_stale()
|
|
||||||
return self.get(id)
|
|
||||||
|
|
||||||
def vote(self, upvote, id, remote_addr):
|
|
||||||
"""+1 a given comment. Returns the new like count (may not change because
|
|
||||||
the creater can't vote on his/her own comment and multiple votes from the
|
|
||||||
same ip address are ignored as well)."""
|
|
||||||
|
|
||||||
rv = self.db.execute(
|
|
||||||
'SELECT likes, dislikes, voters FROM comments WHERE id=?', (id, )) \
|
|
||||||
.fetchone()
|
|
||||||
|
|
||||||
if rv is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
likes, dislikes, voters = rv
|
|
||||||
if likes + dislikes >= 142:
|
|
||||||
return {'likes': likes, 'dislikes': dislikes}
|
|
||||||
|
|
||||||
bf = Bloomfilter(bytearray(voters), likes + dislikes)
|
|
||||||
if remote_addr in bf:
|
|
||||||
return {'likes': likes, 'dislikes': dislikes}
|
|
||||||
|
|
||||||
bf.add(remote_addr)
|
|
||||||
self.db.execute([
|
|
||||||
'UPDATE comments SET',
|
|
||||||
' likes = likes + 1,' if upvote else 'dislikes = dislikes + 1,',
|
|
||||||
' voters = ?'
|
|
||||||
'WHERE id=?;'], (buffer(bf.array), id))
|
|
||||||
|
|
||||||
if upvote:
|
|
||||||
return {'likes': likes + 1, 'dislikes': dislikes}
|
|
||||||
return {'likes': likes, 'dislikes': dislikes + 1}
|
|
||||||
|
|
||||||
def reply_count(self, url, mode=5, after=0):
|
|
||||||
"""
|
|
||||||
Return comment count for main thread and all reply threads for one url.
|
|
||||||
"""
|
|
||||||
|
|
||||||
sql = ['SELECT comments.parent,count(*)',
|
|
||||||
'FROM comments INNER JOIN threads ON',
|
|
||||||
' threads.uri=? AND comments.tid=threads.id AND',
|
|
||||||
' (? | comments.mode = ?) AND',
|
|
||||||
' comments.created > ?',
|
|
||||||
'GROUP BY comments.parent']
|
|
||||||
|
|
||||||
return dict(self.db.execute(sql, [url, mode, mode, after]).fetchall())
|
|
||||||
|
|
||||||
def count(self, *urls):
|
|
||||||
"""
|
|
||||||
Return comment count for one ore more urls..
|
|
||||||
"""
|
|
||||||
|
|
||||||
threads = dict(self.db.execute([
|
|
||||||
'SELECT threads.uri, COUNT(comments.id) FROM comments',
|
|
||||||
'LEFT OUTER JOIN threads ON threads.id = tid AND comments.mode = 1',
|
|
||||||
'GROUP BY threads.uri'
|
|
||||||
]).fetchall())
|
|
||||||
|
|
||||||
return [threads.get(url, 0) for url in urls]
|
|
||||||
|
|
||||||
def purge(self, delta):
|
|
||||||
"""
|
|
||||||
Remove comments older than :param:`delta`.
|
|
||||||
"""
|
|
||||||
self.db.execute([
|
|
||||||
'DELETE FROM comments WHERE mode = 2 AND ? - created > ?;'
|
|
||||||
], (time.time(), delta))
|
|
||||||
self._remove_stale()
|
|
@ -1,36 +0,0 @@
|
|||||||
# -*- encoding: utf-8 -*-
|
|
||||||
|
|
||||||
import os
|
|
||||||
import binascii
|
|
||||||
|
|
||||||
|
|
||||||
class Preferences:
|
|
||||||
|
|
||||||
defaults = [
|
|
||||||
("session-key", binascii.b2a_hex(os.urandom(24))),
|
|
||||||
]
|
|
||||||
|
|
||||||
def __init__(self, db):
|
|
||||||
|
|
||||||
self.db = db
|
|
||||||
self.db.execute([
|
|
||||||
'CREATE TABLE IF NOT EXISTS preferences (',
|
|
||||||
' key VARCHAR PRIMARY KEY, value VARCHAR',
|
|
||||||
');'])
|
|
||||||
|
|
||||||
for (key, value) in Preferences.defaults:
|
|
||||||
if self.get(key) is None:
|
|
||||||
self.set(key, value)
|
|
||||||
|
|
||||||
def get(self, key, default=None):
|
|
||||||
rv = self.db.execute(
|
|
||||||
'SELECT value FROM preferences WHERE key=?', (key, )).fetchone()
|
|
||||||
|
|
||||||
if rv is None:
|
|
||||||
return default
|
|
||||||
|
|
||||||
return rv[0]
|
|
||||||
|
|
||||||
def set(self, key, value):
|
|
||||||
self.db.execute(
|
|
||||||
'INSERT INTO preferences (key, value) VALUES (?, ?)', (key, value))
|
|
@ -1,67 +0,0 @@
|
|||||||
# -*- encoding: utf-8 -*-
|
|
||||||
|
|
||||||
import time
|
|
||||||
|
|
||||||
|
|
||||||
class Guard:
|
|
||||||
|
|
||||||
def __init__(self, db):
|
|
||||||
|
|
||||||
self.db = db
|
|
||||||
self.conf = db.conf.section("guard")
|
|
||||||
self.max_age = db.conf.getint("general", "max-age")
|
|
||||||
|
|
||||||
def validate(self, uri, comment):
|
|
||||||
|
|
||||||
if not self.conf.getboolean("enabled"):
|
|
||||||
return True, ""
|
|
||||||
|
|
||||||
for func in (self._limit, self._spam):
|
|
||||||
valid, reason = func(uri, comment)
|
|
||||||
if not valid:
|
|
||||||
return False, reason
|
|
||||||
return True, ""
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def ids(cls, rv):
|
|
||||||
return [str(col[0]) for col in rv]
|
|
||||||
|
|
||||||
def _limit(self, uri, comment):
|
|
||||||
|
|
||||||
# block more than :param:`ratelimit` comments per minute
|
|
||||||
rv = self.db.execute([
|
|
||||||
'SELECT id FROM comments WHERE remote_addr = ? AND ? - created < 60;'
|
|
||||||
], (comment["remote_addr"], time.time())).fetchall()
|
|
||||||
|
|
||||||
if len(rv) >= self.conf.getint("ratelimit"):
|
|
||||||
return False, "{0}: ratelimit exceeded ({1})".format(
|
|
||||||
comment["remote_addr"], ', '.join(Guard.ids(rv)))
|
|
||||||
|
|
||||||
# block more than three comments as direct response to the post
|
|
||||||
if comment["parent"] is None:
|
|
||||||
rv = self.db.execute([
|
|
||||||
'SELECT id FROM comments WHERE',
|
|
||||||
' tid = (SELECT id FROM threads WHERE uri = ?)',
|
|
||||||
'AND remote_addr = ?',
|
|
||||||
'AND parent IS NULL;'
|
|
||||||
], (uri, comment["remote_addr"])).fetchall()
|
|
||||||
|
|
||||||
if len(rv) >= self.conf.getint("direct-reply"):
|
|
||||||
return False, "%i direct responses to %s" % (len(rv), uri)
|
|
||||||
|
|
||||||
elif self.conf.getboolean("reply-to-self") == False:
|
|
||||||
rv = self.db.execute([
|
|
||||||
'SELECT id FROM comments WHERE'
|
|
||||||
' remote_addr = ?',
|
|
||||||
'AND id = ?',
|
|
||||||
'AND ? - created < ?'
|
|
||||||
], (comment["remote_addr"], comment["parent"],
|
|
||||||
time.time(), self.max_age)).fetchall()
|
|
||||||
|
|
||||||
if len(rv) > 0:
|
|
||||||
return False, "edit time frame is still open"
|
|
||||||
|
|
||||||
return True, ""
|
|
||||||
|
|
||||||
def _spam(self, uri, comment):
|
|
||||||
return True, ""
|
|
@ -1,30 +0,0 @@
|
|||||||
# -*- encoding: utf-8 -*-
|
|
||||||
|
|
||||||
|
|
||||||
def Thread(id, uri, title):
|
|
||||||
return {
|
|
||||||
"id": id,
|
|
||||||
"uri": uri,
|
|
||||||
"title": title
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class Threads(object):
|
|
||||||
|
|
||||||
def __init__(self, db):
|
|
||||||
|
|
||||||
self.db = db
|
|
||||||
self.db.execute([
|
|
||||||
'CREATE TABLE IF NOT EXISTS threads (',
|
|
||||||
' id INTEGER PRIMARY KEY, uri VARCHAR(256) UNIQUE, title VARCHAR(256))'])
|
|
||||||
|
|
||||||
def __contains__(self, uri):
|
|
||||||
return self.db.execute("SELECT title FROM threads WHERE uri=?", (uri, )) \
|
|
||||||
.fetchone() is not None
|
|
||||||
|
|
||||||
def __getitem__(self, uri):
|
|
||||||
return Thread(*self.db.execute("SELECT * FROM threads WHERE uri=?", (uri, )).fetchone())
|
|
||||||
|
|
||||||
def new(self, uri, title):
|
|
||||||
self.db.execute("INSERT INTO threads (uri, title) VALUES (?, ?)", (uri, title))
|
|
||||||
return self[uri]
|
|
1
isso/defaults.ini
Symbolic link
1
isso/defaults.ini
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../share/isso.conf
|
@ -22,16 +22,17 @@ class Dispatcher(DispatcherMiddleware):
|
|||||||
a relative URI, e.g. /foo.example and /other.bar.
|
a relative URI, e.g. /foo.example and /other.bar.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
default = os.path.join(dist.location, "share", "isso.conf")
|
conf = os.path.join(dist.location, "isso", "defaults.ini")
|
||||||
|
|
||||||
def __init__(self, *confs):
|
def __init__(self, *confs):
|
||||||
|
|
||||||
self.isso = {}
|
self.isso = {}
|
||||||
|
|
||||||
for i, conf in enumerate(map(config.load(Dispatcher.default, conf))):
|
for path in confs:
|
||||||
|
conf = config.load(Dispatcher.conf, path)
|
||||||
|
|
||||||
if not conf.get("general", "name"):
|
if not conf.get("general", "name"):
|
||||||
logger.warn("unable to dispatch %r, no 'name' set", confs[i])
|
logger.warn("unable to dispatch %r, no 'name' set", path)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
self.isso["/" + conf.get("general", "name")] = make_app(conf)
|
self.isso["/" + conf.get("general", "name")] = make_app(conf)
|
||||||
|
@ -8,6 +8,8 @@ define(function() {
|
|||||||
"max-comments-top": "inf",
|
"max-comments-top": "inf",
|
||||||
"max-comments-nested": 5,
|
"max-comments-nested": 5,
|
||||||
"reveal-on-click": 5,
|
"reveal-on-click": 5,
|
||||||
|
"auth": false,
|
||||||
|
"auth-sign-in-url": "",
|
||||||
"avatar": true,
|
"avatar": true,
|
||||||
"avatar-bg": "#f0f0f0",
|
"avatar-bg": "#f0f0f0",
|
||||||
"avatar-fg": ["#9abf88", "#5698c4", "#e279a3", "#9163b6",
|
"avatar-fg": ["#9abf88", "#5698c4", "#e279a3", "#9163b6",
|
||||||
|
@ -3,7 +3,10 @@ define({
|
|||||||
"postbox-author": "Name (optional)",
|
"postbox-author": "Name (optional)",
|
||||||
"postbox-email": "E-mail (optional)",
|
"postbox-email": "E-mail (optional)",
|
||||||
"postbox-website": "Website (optional)",
|
"postbox-website": "Website (optional)",
|
||||||
"postbox-submit": "Submit",
|
"postbox-submit": "submit",
|
||||||
|
"postbox-auth-required": "Authentication required to comment.",
|
||||||
|
"postbox-auth-sign-in": "Sign-in",
|
||||||
|
"postbox-auth-commenting-as": "Commenting as",
|
||||||
|
|
||||||
"num-comments": "One Comment\n{{ n }} Comments",
|
"num-comments": "One Comment\n{{ n }} Comments",
|
||||||
"no-comments": "No Comments Yet",
|
"no-comments": "No Comments Yet",
|
||||||
|
@ -4,8 +4,13 @@ define({
|
|||||||
"postbox-email": "Courriel (optionnel)",
|
"postbox-email": "Courriel (optionnel)",
|
||||||
"postbox-website": "Site web (optionnel)",
|
"postbox-website": "Site web (optionnel)",
|
||||||
"postbox-submit": "Soumettre",
|
"postbox-submit": "Soumettre",
|
||||||
|
"postbox-auth-required": "Connexion nécessaire pour commenter.",
|
||||||
|
"postbox-auth-sign-in": "Se connecter",
|
||||||
|
"postbox-auth-commenting-as": "Commenter en tant que",
|
||||||
|
|
||||||
"num-comments": "{{ n }} commentaire\n{{ n }} commentaires",
|
"num-comments": "{{ n }} commentaire\n{{ n }} commentaires",
|
||||||
"no-comments": "Aucun commentaire pour l'instant",
|
"no-comments": "Aucun commentaire pour l'instant",
|
||||||
|
|
||||||
"comment-reply": "Répondre",
|
"comment-reply": "Répondre",
|
||||||
"comment-edit": "Éditer",
|
"comment-edit": "Éditer",
|
||||||
"comment-save": "Enregistrer",
|
"comment-save": "Enregistrer",
|
||||||
@ -17,6 +22,7 @@ define({
|
|||||||
"comment-queued": "Commentaire en attente de modération.",
|
"comment-queued": "Commentaire en attente de modération.",
|
||||||
"comment-anonymous": "Anonyme",
|
"comment-anonymous": "Anonyme",
|
||||||
"comment-hidden": "1 caché\n{{ n }} cachés",
|
"comment-hidden": "1 caché\n{{ n }} cachés",
|
||||||
|
|
||||||
"date-now": "À l'instant'",
|
"date-now": "À l'instant'",
|
||||||
"date-minute": "Il y a une minute\nIl y a {{ n }} minutes",
|
"date-minute": "Il y a une minute\nIl y a {{ n }} minutes",
|
||||||
"date-hour": "Il y a une heure\nIl y a {{ n }} heures ",
|
"date-hour": "Il y a une heure\nIl y a {{ n }} heures ",
|
||||||
|
@ -7,6 +7,20 @@ define(["app/dom", "app/utils", "app/config", "app/api", "app/jade", "app/i18n",
|
|||||||
|
|
||||||
var Postbox = function(parent) {
|
var Postbox = function(parent) {
|
||||||
|
|
||||||
|
if (config['auth'] === true) {
|
||||||
|
var authCookie = utils.cookie('auth');
|
||||||
|
if (! authCookie) {
|
||||||
|
jade.set("cookie", {'valid': false});
|
||||||
|
} else {
|
||||||
|
var authData = lib.itsdangerous(authCookie)
|
||||||
|
if (! authData) {
|
||||||
|
jade.set("cookie", {'valid': false});
|
||||||
|
} else {
|
||||||
|
jade.set("cookie", {'valid': true, 'data': authData});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var el = $.htmlify(jade.render("postbox"));
|
var el = $.htmlify(jade.render("postbox"));
|
||||||
|
|
||||||
// callback on success (e.g. to toggle the reply button)
|
// callback on success (e.g. to toggle the reply button)
|
||||||
@ -24,7 +38,11 @@ define(["app/dom", "app/utils", "app/config", "app/api", "app/jade", "app/i18n",
|
|||||||
|
|
||||||
// submit form, initialize optional fields with `null` and reset form.
|
// submit form, initialize optional fields with `null` and reset form.
|
||||||
// If replied to a comment, remove form completely.
|
// If replied to a comment, remove form completely.
|
||||||
$("[type=submit]", el).on("click", function() {
|
var submit = $("[type=submit]", el)
|
||||||
|
if (submit === null) {
|
||||||
|
return el;
|
||||||
|
}
|
||||||
|
submit.on("click", function() {
|
||||||
if (! el.validate()) {
|
if (! el.validate()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
define(function (require) {
|
define(function (require) {
|
||||||
return {
|
return {
|
||||||
editorify: require("app/lib/editor"),
|
editorify: require("app/lib/editor"),
|
||||||
identicons: require("app/lib/identicons")
|
identicons: require("app/lib/identicons"),
|
||||||
|
itsdangerous: require("app/lib/itsdangerous")
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
47
isso/js/app/lib/itsdangerous.js
Normal file
47
isso/js/app/lib/itsdangerous.js
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
define(function() {
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
// 2011/01/01 in UTC
|
||||||
|
var epoch = 1293840000;
|
||||||
|
|
||||||
|
var decode = function(str) {
|
||||||
|
return atob(str + new Array((((- str.length) % 4 + 4) % 4) + 1).join("="));
|
||||||
|
};
|
||||||
|
|
||||||
|
var timestamp = function(str) {
|
||||||
|
var bytes = [];
|
||||||
|
for (var i = 0; i < str.length; i++) {
|
||||||
|
bytes.push(str.charCodeAt(i));
|
||||||
|
}
|
||||||
|
|
||||||
|
var a = 0;
|
||||||
|
for (var j = 0; j < bytes.length; j++) {
|
||||||
|
a = a << 8 | +bytes[j];
|
||||||
|
}
|
||||||
|
|
||||||
|
return a + epoch;
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Load data signed with itsdangerous' URLSafeTimedSerializer.
|
||||||
|
*
|
||||||
|
* If no signature was found or the payload has been expired, return
|
||||||
|
* `null`. Otherwise, return unserialized datastructure.
|
||||||
|
*/
|
||||||
|
return function(val, max_age) {
|
||||||
|
|
||||||
|
var _ = val.split(".", 3),
|
||||||
|
payload = _[0], ts = _[1], signature = _[2];
|
||||||
|
|
||||||
|
if (typeof signature === "undefined") {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
var age = (new Date()).getTime() / 1000 - timestamp(decode(ts));
|
||||||
|
if (typeof max_age !== "undefined" && age > max_age) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return JSON.parse(decode(payload));
|
||||||
|
};
|
||||||
|
});
|
@ -1,14 +1,25 @@
|
|||||||
div(class='isso-postbox')
|
mixin form(style)
|
||||||
div(class='form-wrapper')
|
div(class='form-wrapper')
|
||||||
div(class='textarea-wrapper')
|
div(class='textarea-wrapper')
|
||||||
div(class='textarea placeholder' contenteditable='true')
|
div(class='textarea placeholder' contenteditable='true')
|
||||||
= i18n('postbox-text')
|
= i18n('postbox-text')
|
||||||
section(class='auth-section')
|
section(class='auth-section')
|
||||||
p(class='input-wrapper')
|
p(class='input-wrapper' style=style)
|
||||||
input(type='text' name='author' placeholder=i18n('postbox-author'))
|
input(type='text' name='author' placeholder=i18n('postbox-author'))
|
||||||
p(class='input-wrapper')
|
p(class='input-wrapper' style=style)
|
||||||
input(type='email' name='email' placeholder=i18n('postbox-email'))
|
input(type='email' name='email' placeholder=i18n('postbox-email'))
|
||||||
p(class='input-wrapper')
|
p(class='input-wrapper' style=style)
|
||||||
input(type='text' name='website' placeholder=i18n('postbox-website'))
|
input(type='text' name='website' placeholder=i18n('postbox-website'))
|
||||||
p(class='post-action')
|
p(class='post-action')
|
||||||
input(type='submit' value=i18n('postbox-submit'))
|
input(type='submit' value=i18n('postbox-submit'))
|
||||||
|
|
||||||
|
div(class='isso-postbox')
|
||||||
|
if conf.auth
|
||||||
|
if cookie.valid
|
||||||
|
p=i18n('postbox-auth-commenting-as') + ' ' + cookie.data.username
|
||||||
|
+form('visibility: hidden;')
|
||||||
|
else
|
||||||
|
p=i18n('postbox-auth-required')
|
||||||
|
a(href='#{conf["auth-sign-in-url"]}') #{i18n('postbox-auth-sign-in')}
|
||||||
|
else
|
||||||
|
+form('')
|
||||||
|
45
isso/models/__init__.py
Normal file
45
isso/models/__init__.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import operator
|
||||||
|
|
||||||
|
|
||||||
|
class Thread(object):
|
||||||
|
|
||||||
|
__slots__ = ['id', 'uri', 'title']
|
||||||
|
|
||||||
|
def __init__(self, id, uri, title=None):
|
||||||
|
self.id = id
|
||||||
|
self.uri = uri
|
||||||
|
self.title = title
|
||||||
|
|
||||||
|
|
||||||
|
class Comment(object):
|
||||||
|
|
||||||
|
__slots__ = [
|
||||||
|
'id', 'parent', 'thread', 'created', 'modified', 'mode', 'remote_addr',
|
||||||
|
'text', 'author', 'email', 'website', 'likes', 'dislikes', 'voters']
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
for attr in Comment.__slots__:
|
||||||
|
try:
|
||||||
|
setattr(self, attr, kwargs.pop(attr))
|
||||||
|
except KeyError:
|
||||||
|
raise TypeError("missing '{0}' argument".format(attr))
|
||||||
|
|
||||||
|
if kwargs:
|
||||||
|
raise TypeError("unexpected argument '{0}'".format(*kwargs.popitem()))
|
||||||
|
|
||||||
|
def new(self, **kwargs):
|
||||||
|
kw = dict(zip(Comment.__slots__, operator.attrgetter(*Comment.__slots__)(self)))
|
||||||
|
kw.update(kwargs)
|
||||||
|
return Comment(**kw)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def moderated(self):
|
||||||
|
return self.mode | 2 == self.mode
|
||||||
|
|
||||||
|
@property
|
||||||
|
def deleted(self):
|
||||||
|
return self.mode | 4 == self.mode
|
240
isso/queue/__init__.py
Normal file
240
isso/queue/__init__.py
Normal file
@ -0,0 +1,240 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import unicode_literals, division
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import threading
|
||||||
|
|
||||||
|
import math
|
||||||
|
import bisect
|
||||||
|
import functools
|
||||||
|
|
||||||
|
import time
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
try:
|
||||||
|
import queue
|
||||||
|
except ImportError:
|
||||||
|
import Queue as queue
|
||||||
|
|
||||||
|
from isso.compat import iteritems
|
||||||
|
|
||||||
|
logger = logging.getLogger("isso")
|
||||||
|
|
||||||
|
Full = queue.Full
|
||||||
|
Empty = queue.Empty
|
||||||
|
|
||||||
|
|
||||||
|
class Retry(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Timeout(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@functools.total_ordering
|
||||||
|
class Message(object):
|
||||||
|
"""Queue payload sortable by time.
|
||||||
|
|
||||||
|
:param type: task type
|
||||||
|
:param data: task payload
|
||||||
|
:param delay: initial delay before the job gets executed
|
||||||
|
:param wait: subsequent delays for retrying
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, type, data, delay=0, wait=0):
|
||||||
|
self.type = type
|
||||||
|
self.data = data
|
||||||
|
|
||||||
|
self.wait = wait
|
||||||
|
self.timestamp = time.time() + delay
|
||||||
|
|
||||||
|
def __le__(self, other):
|
||||||
|
return self.timestamp + self.wait <= other.timestamp + other.wait
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return self.type == other.type and self.data == other.data
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<Message {0}: {1}>".format(self.type, self.data)
|
||||||
|
|
||||||
|
|
||||||
|
class Queue(object):
|
||||||
|
"""An in-memory queue with requeuing abilities.
|
||||||
|
|
||||||
|
:param maxlen: upperbound limit
|
||||||
|
:param timeout: maximum retry interval after which a :func:`retry` call
|
||||||
|
raises :exception:`Timeout` (defaults to ~34 min)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, maxlen=-1, timeout=2**10):
|
||||||
|
self.queue = []
|
||||||
|
self.maxlen = maxlen
|
||||||
|
self.timeout = timeout
|
||||||
|
|
||||||
|
# lock destructive queue operations
|
||||||
|
self.mutex = threading.Lock()
|
||||||
|
|
||||||
|
def put(self, item):
|
||||||
|
with self.mutex:
|
||||||
|
if -1 < self.maxlen < len(self.queue) + 1:
|
||||||
|
raise queue.Full
|
||||||
|
|
||||||
|
bisect.insort(self.queue, item)
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
with self.mutex:
|
||||||
|
try:
|
||||||
|
msg = self.queue.pop(0)
|
||||||
|
except IndexError:
|
||||||
|
raise queue.Empty
|
||||||
|
|
||||||
|
if msg.timestamp + msg.wait <= time.time():
|
||||||
|
return msg
|
||||||
|
|
||||||
|
self.queue.insert(0, msg)
|
||||||
|
|
||||||
|
raise queue.Empty
|
||||||
|
|
||||||
|
def retry(self, msg):
|
||||||
|
self.put(Queue.delay(msg, self.timeout))
|
||||||
|
|
||||||
|
def requeue(self, msg, timedelta):
|
||||||
|
self.put(Message(msg.type, msg.data, timedelta.total_seconds()))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size(self):
|
||||||
|
with self.mutex:
|
||||||
|
return len(self.queue)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def delay(cls, msg, timeout, delayfunc=lambda i: max(1, i * 2)):
|
||||||
|
wait = delayfunc(msg.wait)
|
||||||
|
if wait >= timeout:
|
||||||
|
raise Timeout("Exceeded time limit of {0}".format(timeout))
|
||||||
|
return Message(msg.type, msg.data, 0, wait)
|
||||||
|
|
||||||
|
|
||||||
|
class Worker(threading.Thread):
|
||||||
|
"""Thread that pulls data from the queue, does the actual work. If the queue
|
||||||
|
is empty, sleep for longer intervals (see :func:`wait` for details)
|
||||||
|
|
||||||
|
On startup, all recurring tasks are automatically queued with zero delay
|
||||||
|
to run at least once.
|
||||||
|
|
||||||
|
A task may throw :exception Retry: to indicate a expected failure (e.g.
|
||||||
|
network not reachable) and asking to retry later.
|
||||||
|
|
||||||
|
:param queue: a Queue
|
||||||
|
:param targets: a mapping of task names and the actual task objects"""
|
||||||
|
|
||||||
|
interval = 0.05
|
||||||
|
|
||||||
|
def __init__(self, queue, targets):
|
||||||
|
super(Worker, self).__init__()
|
||||||
|
|
||||||
|
self.alive = True
|
||||||
|
self.queue = queue
|
||||||
|
self.targets = targets
|
||||||
|
|
||||||
|
for name, target in iteritems(targets):
|
||||||
|
if isinstance(target, Cron):
|
||||||
|
queue.put(Message(name, None))
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
while self.alive:
|
||||||
|
try:
|
||||||
|
payload = self.queue.get()
|
||||||
|
except queue.Empty:
|
||||||
|
Worker.wait(0.5)
|
||||||
|
else:
|
||||||
|
task = self.targets.get(payload.type)
|
||||||
|
if task is None:
|
||||||
|
logger.warn("No such task '%s'", payload.type)
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
logger.debug("Executing {0} with '{1}'".format(
|
||||||
|
payload.type, json.dumps(payload.data)))
|
||||||
|
task.run(payload.data)
|
||||||
|
except Retry:
|
||||||
|
try:
|
||||||
|
self.queue.retry(payload)
|
||||||
|
except Timeout:
|
||||||
|
logger.exception("Uncaught exception while retrying "
|
||||||
|
"%s.run", task)
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Uncaught exception while executing "
|
||||||
|
"%s.run", task)
|
||||||
|
finally:
|
||||||
|
if isinstance(task, Cron):
|
||||||
|
self.queue.requeue(payload, task.timedelta)
|
||||||
|
|
||||||
|
def join(self, timeout=None):
|
||||||
|
self.alive = False
|
||||||
|
super(Worker, self).join(timeout)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def wait(cls, seconds):
|
||||||
|
"""Sleep for :param seconds: but split into :var interval: sleeps to
|
||||||
|
be interruptable.
|
||||||
|
"""
|
||||||
|
f, i = math.modf(seconds / Worker.interval)
|
||||||
|
|
||||||
|
for x in range(int(i)):
|
||||||
|
time.sleep(Worker.interval)
|
||||||
|
|
||||||
|
time.sleep(f * Worker.interval)
|
||||||
|
|
||||||
|
|
||||||
|
class Task(object):
|
||||||
|
|
||||||
|
__metaclass__ = abc.ABCMeta
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def run(self, data):
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
class Cron(Task):
|
||||||
|
|
||||||
|
__metaclass__ = abc.ABCMeta
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.timedelta = datetime.timedelta(*args, **kwargs)
|
||||||
|
|
||||||
|
def run(self, data):
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
from .tasks import db
|
||||||
|
|
||||||
|
|
||||||
|
class Jobs(dict):
|
||||||
|
"""Obviously a poor man's factory"""
|
||||||
|
|
||||||
|
available = {
|
||||||
|
"db-purge": db.Purge
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(Jobs, self).__init__()
|
||||||
|
|
||||||
|
def register(self, name, *args, **kwargs):
|
||||||
|
if name in self:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
cls = Jobs.available[name]
|
||||||
|
except KeyError:
|
||||||
|
raise RuntimeError("No such task '%s'" % name)
|
||||||
|
|
||||||
|
self[name] = cls(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
from .sqlite import SQLite3Queue
|
||||||
|
|
||||||
|
__all__ = ["Full", "Empty", "Retry", "Timeout", "Message", "Queue", "Targets",
|
||||||
|
"Task", "Cron", "SQLite3Queue"]
|
58
isso/queue/sqlite.py
Normal file
58
isso/queue/sqlite.py
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
|
||||||
|
from . import Queue, Full, Empty, Message
|
||||||
|
|
||||||
|
|
||||||
|
pickle = lambda val: json.dumps(val).encode("utf-8")
|
||||||
|
unpickle = lambda val: json.loads(val.decode("utf-8"))
|
||||||
|
|
||||||
|
|
||||||
|
class SQLite3Queue(Queue):
|
||||||
|
"""Implements a shared queue using SQLite3.
|
||||||
|
|
||||||
|
:param connection: SQLite3 connection
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, connection, maxlen=-1, timeout=2**10):
|
||||||
|
super(SQLite3Queue, self).__init__(maxlen, timeout)
|
||||||
|
self.connection = connection
|
||||||
|
self.connection.execute(
|
||||||
|
'CREATE TABLE IF NOT EXISTS queue ('
|
||||||
|
' id INTEGER PRIMARY KEY AUTOINCREMENT,'
|
||||||
|
' type TEXT,'
|
||||||
|
' data BLOB,'
|
||||||
|
' timestamp FLOAT,'
|
||||||
|
' wait FLOAT)')
|
||||||
|
|
||||||
|
def put(self, item):
|
||||||
|
with self.connection.transaction as con:
|
||||||
|
count = con.execute('SELECT COUNT(*) FROM queue').fetchone()[0] + 1
|
||||||
|
if -1 < self.maxlen < count:
|
||||||
|
raise Full
|
||||||
|
|
||||||
|
con.execute(
|
||||||
|
'INSERT INTO queue (type, data, timestamp, wait) VALUES (?, ?, ?, ?)',
|
||||||
|
(item.type, pickle(item.data), item.timestamp, item.wait))
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
with self.connection.transaction as con:
|
||||||
|
row = con.execute(
|
||||||
|
'SELECT id, type, data FROM queue '
|
||||||
|
'WHERE (? > timestamp + wait) ORDER BY timestamp LIMIT 1',
|
||||||
|
(time.time(), )).fetchone()
|
||||||
|
if not row:
|
||||||
|
raise Empty
|
||||||
|
|
||||||
|
id, type, data = row
|
||||||
|
con.execute('DELETE FROM queue WHERE id = ?', (str(id), ))
|
||||||
|
return Message(type, unpickle(data))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size(self):
|
||||||
|
with self.connection.transaction as con:
|
||||||
|
return con.execute('SELECT COUNT(*) FROM queue').fetchone()[0]
|
58
isso/queue/tasks/__init__.py
Normal file
58
isso/queue/tasks/__init__.py
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
|
class Task(object):
|
||||||
|
"""A task. Override :func:`run` with custom functionality. Tasks itself
|
||||||
|
may cause blocking I/O but should terminate.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__metaclass__ = abc.ABCMeta
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def run(self, data):
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
class Cron(Task):
|
||||||
|
"""Crons are tasks which are re-scheduled after each execution."""
|
||||||
|
|
||||||
|
__metaclass__ = abc.ABCMeta
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.timedelta = datetime.timedelta(*args, **kwargs)
|
||||||
|
|
||||||
|
def run(self, data):
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
from . import db, http, mail
|
||||||
|
|
||||||
|
|
||||||
|
class Jobs(dict):
|
||||||
|
"""Obviously a poor man's factory"""
|
||||||
|
|
||||||
|
available = {
|
||||||
|
"db-purge": db.Purge,
|
||||||
|
"http-fetch": http.Fetch,
|
||||||
|
"mail-send": mail.Send
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(Jobs, self).__init__()
|
||||||
|
|
||||||
|
def register(self, name, *args, **kwargs):
|
||||||
|
if name in self:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
cls = Jobs.available[name]
|
||||||
|
except KeyError:
|
||||||
|
raise RuntimeError("No such task '%s'" % name)
|
||||||
|
|
||||||
|
self[name] = cls(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["Job", "Cron", "Jobs"]
|
16
isso/queue/tasks/db.py
Normal file
16
isso/queue/tasks/db.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from isso.controllers import comments
|
||||||
|
|
||||||
|
from . import Cron
|
||||||
|
|
||||||
|
|
||||||
|
class Purge(Cron):
|
||||||
|
|
||||||
|
def __init__(self, db, after):
|
||||||
|
super(Purge, self).__init__(hours=1)
|
||||||
|
self.comments = comments.Controller(db)
|
||||||
|
self.after = after
|
||||||
|
|
||||||
|
def run(self, data):
|
||||||
|
self.comments.purge(self.after)
|
12
isso/queue/tasks/http.py
Normal file
12
isso/queue/tasks/http.py
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from . import Task
|
||||||
|
|
||||||
|
|
||||||
|
class Fetch(Task):
|
||||||
|
|
||||||
|
def __init__(self, db):
|
||||||
|
self.db = db
|
||||||
|
|
||||||
|
def run(self, data):
|
||||||
|
raise NotImplemented
|
59
isso/queue/tasks/mail.py
Normal file
59
isso/queue/tasks/mail.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import smtplib
|
||||||
|
|
||||||
|
from email.utils import formatdate
|
||||||
|
from email.header import Header
|
||||||
|
from email.mime.text import MIMEText
|
||||||
|
|
||||||
|
from . import Task
|
||||||
|
|
||||||
|
|
||||||
|
class Send(Task):
|
||||||
|
|
||||||
|
def __init__(self, section):
|
||||||
|
self.conf = section
|
||||||
|
self.client = None
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
klass = (smtplib.SMTP_SSL if self.conf.get('security') == 'ssl' else smtplib.SMTP)
|
||||||
|
self.client = klass(host=self.conf.get('host'),
|
||||||
|
port=self.conf.getint('port'),
|
||||||
|
timeout=self.conf.getint('timeout'))
|
||||||
|
|
||||||
|
if self.conf.get('security') == 'starttls':
|
||||||
|
if sys.version_info >= (3, 4):
|
||||||
|
import ssl
|
||||||
|
self.client.starttls(context=ssl.create_default_context())
|
||||||
|
else:
|
||||||
|
self.client.starttls()
|
||||||
|
|
||||||
|
if self.conf.get('username') and self.conf.get('password'):
|
||||||
|
self.client.login(self.conf.get('username'),
|
||||||
|
self.conf.get('password'))
|
||||||
|
|
||||||
|
return self.client
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
if self.client is not None:
|
||||||
|
self.client.quit()
|
||||||
|
self.client = None
|
||||||
|
|
||||||
|
def run(self, data):
|
||||||
|
subject = data["subject"]
|
||||||
|
body = data["body"]
|
||||||
|
|
||||||
|
from_addr = self.conf.get("from")
|
||||||
|
to_addr = self.conf.get("to")
|
||||||
|
|
||||||
|
msg = MIMEText(body, 'plain', 'utf-8')
|
||||||
|
msg['From'] = from_addr
|
||||||
|
msg['To'] = to_addr
|
||||||
|
msg['Date'] = formatdate(localtime=True)
|
||||||
|
msg['Subject'] = Header(subject, 'utf-8')
|
||||||
|
|
||||||
|
with self as con:
|
||||||
|
con.sendmail(from_addr, to_addr, msg.as_string())
|
@ -9,6 +9,5 @@ from isso import dist, config
|
|||||||
|
|
||||||
application = make_app(
|
application = make_app(
|
||||||
config.load(
|
config.load(
|
||||||
os.path.join(dist.location, "share", "isso.conf"),
|
os.path.join(dist.location, "isso", "defaults.ini"),
|
||||||
os.environ.get('ISSO_SETTINGS')),
|
os.environ.get('ISSO_SETTINGS')))
|
||||||
multiprocessing=True)
|
|
||||||
|
56
isso/spam/__init__.py
Normal file
56
isso/spam/__init__.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import time
|
||||||
|
|
||||||
|
from sqlalchemy.sql import select, func
|
||||||
|
|
||||||
|
|
||||||
|
class Guard(object):
|
||||||
|
|
||||||
|
def __init__(self, db, enabled, ratelimit=2, direct_reply=3,
|
||||||
|
reply_to_self=False, max_age=15*60):
|
||||||
|
self.db = db
|
||||||
|
self.enabled = enabled
|
||||||
|
|
||||||
|
self.ratelimit = ratelimit
|
||||||
|
self.direct_reply = direct_reply
|
||||||
|
self.reply_to_self = reply_to_self
|
||||||
|
self.max_age = max_age
|
||||||
|
|
||||||
|
def validate(self, remote_addr, thread, comment):
|
||||||
|
|
||||||
|
now = time.time()
|
||||||
|
|
||||||
|
# block more than :param:`ratelimit` comments per minute
|
||||||
|
count = self.db.engine.execute(
|
||||||
|
select([func.count(self.db.comments)])
|
||||||
|
.where(self.db.comments.c.remote_addr == remote_addr)
|
||||||
|
.where(now - self.db.comments.c.created < 60)).fetchone()[0]
|
||||||
|
|
||||||
|
if count >= self.ratelimit > -1:
|
||||||
|
return False, "%s: ratelimit exceeded" % remote_addr
|
||||||
|
|
||||||
|
# block more than three comments as direct response to the post
|
||||||
|
if comment["parent"] is None:
|
||||||
|
count = self.db.engine.execute(
|
||||||
|
select([func.count(self.db.comments)])
|
||||||
|
.where(self.db.comments.c.thread == thread.id)
|
||||||
|
.where(self.db.comments.c.remote_addr == remote_addr)
|
||||||
|
.where(self.db.comments.c.parent == None)).fetchone()[0]
|
||||||
|
|
||||||
|
if count >= self.direct_reply:
|
||||||
|
return False, "only {0} direct response(s) to {1}".format(
|
||||||
|
count, thread.uri)
|
||||||
|
|
||||||
|
elif not self.reply_to_self:
|
||||||
|
count = self.db.engine.execute(
|
||||||
|
select([func.count(self.db.comments)])
|
||||||
|
.where(self.db.comments.c.remote_addr == remote_addr)
|
||||||
|
.where(now - self.db.comments.c.created < self.max_age)).fetchone()[0]
|
||||||
|
|
||||||
|
if count > 0:
|
||||||
|
return False, "editing frame is still open"
|
||||||
|
|
||||||
|
return True, ""
|
236
isso/tests/controllers/test_comments.py
Normal file
236
isso/tests/controllers/test_comments.py
Normal file
@ -0,0 +1,236 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import time
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from isso import db
|
||||||
|
|
||||||
|
from isso.models import Thread
|
||||||
|
from isso.controllers import comments
|
||||||
|
|
||||||
|
IP = "127.0.0.1"
|
||||||
|
TH = Thread(1, "/")
|
||||||
|
|
||||||
|
|
||||||
|
class TestValidator(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_validator(self):
|
||||||
|
|
||||||
|
default = dict(zip(["parent", "text", "author", "email", "website"], [None]*5))
|
||||||
|
new = lambda **z: dict(default, **z)
|
||||||
|
verify = lambda data: comments.Validator.verify(data)[0]
|
||||||
|
|
||||||
|
# invalid types
|
||||||
|
self.assertFalse(verify(new(text=None)))
|
||||||
|
self.assertFalse(verify(new(parent="xxx")))
|
||||||
|
for key in ("author", "website", "email"):
|
||||||
|
self.assertFalse(verify(new(text="...", **{key: 3.14})))
|
||||||
|
|
||||||
|
# text too short and/or blank
|
||||||
|
for text in ("", "\n\n\n"):
|
||||||
|
self.assertFalse(verify(new(text=text)))
|
||||||
|
|
||||||
|
# email/website length
|
||||||
|
self.assertTrue(verify(new(text="...", email="*"*254)))
|
||||||
|
self.assertTrue(verify(new(text="...", website="google.de/" + "a"*128)))
|
||||||
|
|
||||||
|
self.assertFalse(verify(new(text="...", email="*"*1024)))
|
||||||
|
self.assertFalse(verify(new(text="...", website="google.de/" + "*"*1024)))
|
||||||
|
|
||||||
|
# invalid url
|
||||||
|
self.assertFalse(verify(new(text="...", website="spam")))
|
||||||
|
|
||||||
|
def test_isurl(self):
|
||||||
|
isurl = comments.Validator.isurl
|
||||||
|
|
||||||
|
# valid website url
|
||||||
|
self.assertTrue(isurl("example.tld"))
|
||||||
|
self.assertTrue(isurl("http://example.tld"))
|
||||||
|
self.assertTrue(isurl("https://example.tld"))
|
||||||
|
self.assertTrue(isurl("https://example.tld:1337/"))
|
||||||
|
self.assertTrue(isurl("https://example.tld:1337/foobar"))
|
||||||
|
self.assertTrue(isurl("https://example.tld:1337/foobar?p=1#isso-thread"))
|
||||||
|
|
||||||
|
self.assertFalse(isurl("ftp://example.tld/"))
|
||||||
|
self.assertFalse(isurl("tel:+1234567890"))
|
||||||
|
self.assertFalse(isurl("+1234567890"))
|
||||||
|
self.assertFalse(isurl("spam"))
|
||||||
|
|
||||||
|
|
||||||
|
class TestController(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.controller = comments.Controller(db.Adapter("sqlite:///:memory:"))
|
||||||
|
|
||||||
|
def test_new(self):
|
||||||
|
obj = self.controller.new(IP, TH, dict(text="Здравствуй, мир!"))
|
||||||
|
|
||||||
|
self.assertEqual(obj.id, 1)
|
||||||
|
self.assertEqual(obj.text, "Здравствуй, мир!")
|
||||||
|
self.assertLess(obj.created, time.time())
|
||||||
|
self.assertIn(IP, obj.voters)
|
||||||
|
self.assertFalse(obj.moderated)
|
||||||
|
self.assertFalse(obj.deleted)
|
||||||
|
|
||||||
|
sec = self.controller.new(IP, TH, dict(text="Ohai"), moderated=True)
|
||||||
|
self.assertEqual(sec.id, 2)
|
||||||
|
self.assertEqual(sec.text, "Ohai")
|
||||||
|
self.assertTrue(sec.moderated)
|
||||||
|
self.assertFalse(sec.deleted)
|
||||||
|
|
||||||
|
self.assertRaises(comments.Invalid, self.controller.new, IP, TH, dict())
|
||||||
|
|
||||||
|
def test_create_invalid_parent(self):
|
||||||
|
a = self.controller.new(IP, TH, dict(text="..."))
|
||||||
|
b = self.controller.new(IP, TH, dict(text="...", parent=a.id))
|
||||||
|
c = self.controller.new(IP, TH, dict(text="...", parent=b.id))
|
||||||
|
|
||||||
|
# automatic insertion to a maximum nesting level of 1
|
||||||
|
self.assertEqual(c.parent, b.parent)
|
||||||
|
|
||||||
|
# remove invalid reference
|
||||||
|
d = self.controller.new(IP, TH, dict(text="...", parent=42))
|
||||||
|
self.assertIsNone(d.parent)
|
||||||
|
|
||||||
|
def test_edit(self):
|
||||||
|
a = self.controller.new(IP, TH, dict(text="Hello!"))
|
||||||
|
z = self.controller.new(IP, TH, dict(text="Dummy"))
|
||||||
|
|
||||||
|
a = self.controller.edit(a.id, dict(
|
||||||
|
text="Hello, World!", author="Hans", email="123", website="http://example.tld/"))
|
||||||
|
b = self.controller.get(a.id)
|
||||||
|
|
||||||
|
self.assertEqual(a.text, "Hello, World!")
|
||||||
|
self.assertEqual(a.author, "Hans")
|
||||||
|
self.assertEqual(a.email, "123")
|
||||||
|
self.assertEqual(a.website, "http://example.tld/")
|
||||||
|
|
||||||
|
for attr in ("text", "author", "email", "website"):
|
||||||
|
self.assertEqual(getattr(a, attr), getattr(b, attr))
|
||||||
|
|
||||||
|
self.assertEqual(self.controller.get(z.id).text, z.text)
|
||||||
|
|
||||||
|
# edit invalid data
|
||||||
|
self.assertRaises(comments.Invalid, self.controller.edit, a.id, dict(text=""))
|
||||||
|
|
||||||
|
# edit invalid comment
|
||||||
|
self.assertIsNone(self.controller.edit(23, dict(text="...")))
|
||||||
|
|
||||||
|
def test_get(self):
|
||||||
|
obj = self.controller.get(23)
|
||||||
|
self.assertIsNone(obj)
|
||||||
|
|
||||||
|
self.controller.new(IP, TH, dict(text="..."))
|
||||||
|
obj = self.controller.get(1)
|
||||||
|
|
||||||
|
self.assertEqual(obj.id, 1)
|
||||||
|
self.assertEqual(obj.thread, 1)
|
||||||
|
|
||||||
|
def test_all(self):
|
||||||
|
foo, bar = Thread(0, "/foo"), Thread(1, "/bar")
|
||||||
|
args = ("One", "Two", "three")
|
||||||
|
|
||||||
|
for text in args:
|
||||||
|
self.controller.new(IP, foo, dict(text=text))
|
||||||
|
|
||||||
|
# control group
|
||||||
|
self.controller.new(IP, bar, dict(text="..."))
|
||||||
|
|
||||||
|
rv = self.controller.all(foo)
|
||||||
|
self.assertEqual(len(rv), 3)
|
||||||
|
|
||||||
|
for text, obj in zip(args, rv):
|
||||||
|
self.assertEqual(text, obj.text)
|
||||||
|
|
||||||
|
def test_delete(self):
|
||||||
|
for _ in range(3):
|
||||||
|
self.controller.new(IP, TH, dict(text="..."))
|
||||||
|
|
||||||
|
for n in range(3):
|
||||||
|
self.controller.delete(n + 1)
|
||||||
|
self.assertIsNone(self.controller.get(n+1))
|
||||||
|
|
||||||
|
# delete invalid comment
|
||||||
|
self.assertIsNone(self.controller.delete(23))
|
||||||
|
|
||||||
|
def test_delete_nested(self):
|
||||||
|
p = self.controller.new(IP, TH, dict(text="parent"))
|
||||||
|
c1 = self.controller.new(IP, TH, dict(text="child", parent=p.id))
|
||||||
|
c2 = self.controller.new(IP, TH, dict(text="child", parent=p.id))
|
||||||
|
|
||||||
|
self.controller.delete(p.id)
|
||||||
|
p = self.controller.get(p.id)
|
||||||
|
|
||||||
|
self.assertIsNotNone(p)
|
||||||
|
self.assertTrue(p.deleted)
|
||||||
|
self.assertEqual(p.text, "")
|
||||||
|
|
||||||
|
self.controller.delete(c1.id)
|
||||||
|
|
||||||
|
self.assertIsNone(self.controller.get(c1.id))
|
||||||
|
self.assertIsNotNone(self.controller.get(c2.id))
|
||||||
|
self.assertIsNotNone(self.controller.get(p.id))
|
||||||
|
|
||||||
|
self.controller.delete(c2.id)
|
||||||
|
self.assertIsNone(self.controller.get(p.id))
|
||||||
|
|
||||||
|
def test_count(self):
|
||||||
|
threads = [Thread(0, "a"), None, Thread(1, "c")]
|
||||||
|
counter = [1, 0, 2]
|
||||||
|
|
||||||
|
self.assertEqual(self.controller.count(*threads), [0, 0, 0])
|
||||||
|
|
||||||
|
for thread, count in zip(threads, counter):
|
||||||
|
if thread is None:
|
||||||
|
continue
|
||||||
|
for _ in range(count):
|
||||||
|
self.controller.new(IP, thread, dict(text="..."))
|
||||||
|
|
||||||
|
self.assertEqual(self.controller.count(*threads), counter)
|
||||||
|
|
||||||
|
def test_votes(self):
|
||||||
|
author = "127.0.0.1"
|
||||||
|
foo, bar = "1.2.3.4", "1.3.3.7"
|
||||||
|
|
||||||
|
c = self.controller.new(author, TH, dict(text="..."))
|
||||||
|
self.assertEqual(c.likes, 0)
|
||||||
|
self.assertEqual(c.dislikes, 0)
|
||||||
|
|
||||||
|
# author can not vote on own comment
|
||||||
|
self.assertFalse(self.controller.like(author, c.id))
|
||||||
|
|
||||||
|
# but others can (at least once)
|
||||||
|
self.assertTrue(self.controller.like(foo, c.id))
|
||||||
|
self.assertTrue(self.controller.dislike(bar, c.id))
|
||||||
|
|
||||||
|
self.assertFalse(self.controller.like(foo, c.id))
|
||||||
|
self.assertFalse(self.controller.like(bar, c.id))
|
||||||
|
|
||||||
|
c = self.controller.get(c.id)
|
||||||
|
self.assertEqual(c.likes, 1)
|
||||||
|
self.assertEqual(c.dislikes, 1)
|
||||||
|
|
||||||
|
# vote a non-existent comment
|
||||||
|
self.assertFalse(self.controller.like(foo, 23))
|
||||||
|
|
||||||
|
def test_activation(self):
|
||||||
|
c = self.controller.new(IP, TH, dict(text="..."), moderated=True)
|
||||||
|
self.assertEqual(len(self.controller.all(TH)), 0)
|
||||||
|
|
||||||
|
self.assertTrue(self.controller.activate(c.id))
|
||||||
|
self.assertFalse(self.controller.activate(c.id))
|
||||||
|
self.assertEqual(len(self.controller.all(TH)), 1)
|
||||||
|
|
||||||
|
# invalid comment returns False
|
||||||
|
self.assertFalse(self.controller.activate(23))
|
||||||
|
|
||||||
|
def test_prune(self):
|
||||||
|
|
||||||
|
c = self.controller.new(IP, TH, dict(text="..."), moderated=True)
|
||||||
|
self.controller.prune(42)
|
||||||
|
self.assertIsNotNone(self.controller.get(c.id))
|
||||||
|
|
||||||
|
self.controller.prune(0)
|
||||||
|
self.assertIsNone(self.controller.get(c.id))
|
56
isso/tests/controllers/test_threads.py
Normal file
56
isso/tests/controllers/test_threads.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from isso import db
|
||||||
|
from isso.controllers import comments, threads
|
||||||
|
|
||||||
|
|
||||||
|
class TestController(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
_db = db.Adapter("sqlite:///:memory:")
|
||||||
|
self.comments = comments.Controller(_db)
|
||||||
|
self.threads = threads.Controller(_db)
|
||||||
|
|
||||||
|
def test_new(self):
|
||||||
|
thread = self.threads.new("/", None)
|
||||||
|
|
||||||
|
self.assertEqual(thread.id, 1)
|
||||||
|
self.assertEqual(thread.uri, "/")
|
||||||
|
self.assertEqual(thread.title, None)
|
||||||
|
|
||||||
|
def test_get(self):
|
||||||
|
self.assertIsNone(self.threads.get("/"))
|
||||||
|
|
||||||
|
th = self.threads.get(self.threads.new("/", None).uri)
|
||||||
|
self.assertIsNotNone(th)
|
||||||
|
|
||||||
|
self.assertEqual(th.id, 1)
|
||||||
|
self.assertEqual(th.uri, "/")
|
||||||
|
self.assertEqual(th.title, None)
|
||||||
|
|
||||||
|
def test_delete(self):
|
||||||
|
th = self.threads.new("/", None)
|
||||||
|
self.threads.delete(th.uri)
|
||||||
|
|
||||||
|
self.assertIsNone(self.threads.get(th.uri))
|
||||||
|
|
||||||
|
def test_delete_removes_comments(self):
|
||||||
|
th = self.threads.new("/", None)
|
||||||
|
cg = self.threads.new("/control/group", None)
|
||||||
|
|
||||||
|
for _ in range(3):
|
||||||
|
self.comments.new("127.0.0.1", th, dict(text="..."))
|
||||||
|
self.comments.new("127.0.0.1", cg, dict(text="..."))
|
||||||
|
|
||||||
|
self.assertEqual(self.comments.count(th), [3])
|
||||||
|
self.assertEqual(self.comments.count(cg), [3])
|
||||||
|
|
||||||
|
# now remove the thread
|
||||||
|
self.threads.delete(th.uri)
|
||||||
|
|
||||||
|
self.assertEqual(self.comments.count(th), [0])
|
||||||
|
self.assertEqual(self.comments.count(cg), [3])
|
@ -1,41 +0,0 @@
|
|||||||
# -*- encoding: utf-8 -*-
|
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
from werkzeug.test import Client
|
|
||||||
|
|
||||||
|
|
||||||
class FakeIP(object):
|
|
||||||
|
|
||||||
def __init__(self, app, ip):
|
|
||||||
self.app = app
|
|
||||||
self.ip = ip
|
|
||||||
|
|
||||||
def __call__(self, environ, start_response):
|
|
||||||
environ['REMOTE_ADDR'] = self.ip
|
|
||||||
return self.app(environ, start_response)
|
|
||||||
|
|
||||||
|
|
||||||
class JSONClient(Client):
|
|
||||||
|
|
||||||
def open(self, *args, **kwargs):
|
|
||||||
kwargs.setdefault('content_type', 'application/json')
|
|
||||||
return super(JSONClient, self).open(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class Dummy:
|
|
||||||
|
|
||||||
status = 200
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def read(self):
|
|
||||||
return ''
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
curl = lambda method, host, path: Dummy()
|
|
||||||
loads = lambda data: json.loads(data.decode('utf-8'))
|
|
59
isso/tests/test_cache.py
Normal file
59
isso/tests/test_cache.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from isso.compat import text_type as str
|
||||||
|
|
||||||
|
from isso.db import SQLite3
|
||||||
|
from isso.cache import Cache, SQLite3Cache
|
||||||
|
|
||||||
|
ns = "test"
|
||||||
|
|
||||||
|
|
||||||
|
class TestCache(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.cache = Cache(threshold=8)
|
||||||
|
|
||||||
|
def test_cache(self):
|
||||||
|
cache = self.cache
|
||||||
|
|
||||||
|
cache.delete(ns, "foo")
|
||||||
|
self.assertIsNone(cache.get(ns, "foo"))
|
||||||
|
|
||||||
|
cache.set(ns, "foo", "bar")
|
||||||
|
self.assertEqual(cache.get(ns, "foo"), "bar")
|
||||||
|
|
||||||
|
cache.delete(ns, "foo")
|
||||||
|
self.assertIsNone(cache.get(ns, "foo"))
|
||||||
|
|
||||||
|
def test_full(self):
|
||||||
|
cache = self.cache
|
||||||
|
|
||||||
|
cache.set(ns, "foo", "bar")
|
||||||
|
|
||||||
|
for i in range(7):
|
||||||
|
cache.set(ns, str(i), "Spam!")
|
||||||
|
|
||||||
|
for i in range(7):
|
||||||
|
self.assertEqual(cache.get(ns, str(i)), "Spam!")
|
||||||
|
|
||||||
|
self.assertIsNotNone(cache.get(ns, "foo"))
|
||||||
|
|
||||||
|
cache.set(ns, "bar", "baz")
|
||||||
|
self.assertIsNone(cache.get(ns, "foo"))
|
||||||
|
|
||||||
|
def test_primitives(self):
|
||||||
|
cache = self.cache
|
||||||
|
|
||||||
|
for val in (None, True, [1, 2, 3], {"bar": "baz"}):
|
||||||
|
cache.set(ns, "val", val)
|
||||||
|
self.assertEqual(cache.get(ns, "val"), val, val.__class__.__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class TestSQLite3Cache(TestCache):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.cache = SQLite3Cache(SQLite3(":memory:"), threshold=8)
|
@ -1,441 +0,0 @@
|
|||||||
# -*- encoding: utf-8 -*-
|
|
||||||
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import os
|
|
||||||
import json
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
try:
|
|
||||||
import unittest2 as unittest
|
|
||||||
except ImportError:
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
try:
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
except ImportError:
|
|
||||||
from urllib import urlencode
|
|
||||||
|
|
||||||
from werkzeug.wrappers import Response
|
|
||||||
|
|
||||||
from isso import Isso, core, config, dist
|
|
||||||
from isso.utils import http
|
|
||||||
from isso.views import comments
|
|
||||||
|
|
||||||
from isso.compat import iteritems
|
|
||||||
|
|
||||||
from fixtures import curl, loads, FakeIP, JSONClient
|
|
||||||
http.curl = curl
|
|
||||||
|
|
||||||
|
|
||||||
class TestComments(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
fd, self.path = tempfile.mkstemp()
|
|
||||||
conf = config.load(os.path.join(dist.location, "share", "isso.conf"))
|
|
||||||
conf.set("general", "dbpath", self.path)
|
|
||||||
conf.set("guard", "enabled", "off")
|
|
||||||
conf.set("hash", "algorithm", "none")
|
|
||||||
|
|
||||||
class App(Isso, core.Mixin):
|
|
||||||
pass
|
|
||||||
|
|
||||||
self.app = App(conf)
|
|
||||||
self.app.wsgi_app = FakeIP(self.app.wsgi_app, "192.168.1.1")
|
|
||||||
|
|
||||||
self.client = JSONClient(self.app, Response)
|
|
||||||
self.get = self.client.get
|
|
||||||
self.put = self.client.put
|
|
||||||
self.post = self.client.post
|
|
||||||
self.delete = self.client.delete
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
os.unlink(self.path)
|
|
||||||
|
|
||||||
def testGet(self):
|
|
||||||
|
|
||||||
self.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'Lorem ipsum ...'}))
|
|
||||||
r = self.get('/id/1')
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
rv = loads(r.data)
|
|
||||||
|
|
||||||
self.assertEqual(rv['id'], 1)
|
|
||||||
self.assertEqual(rv['text'], '<p>Lorem ipsum ...</p>')
|
|
||||||
|
|
||||||
def testCreate(self):
|
|
||||||
|
|
||||||
rv = self.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'Lorem ipsum ...'}))
|
|
||||||
|
|
||||||
self.assertEqual(rv.status_code, 201)
|
|
||||||
self.assertIn("Set-Cookie", rv.headers)
|
|
||||||
|
|
||||||
rv = loads(rv.data)
|
|
||||||
|
|
||||||
self.assertEqual(rv["mode"], 1)
|
|
||||||
self.assertEqual(rv["text"], '<p>Lorem ipsum ...</p>')
|
|
||||||
|
|
||||||
def textCreateWithNonAsciiText(self):
|
|
||||||
|
|
||||||
rv = self.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'Здравствуй, мир!'}))
|
|
||||||
|
|
||||||
self.assertEqual(rv.status_code, 201)
|
|
||||||
rv = loads(rv.data)
|
|
||||||
|
|
||||||
self.assertEqual(rv["mode"], 1)
|
|
||||||
self.assertEqual(rv["text"], '<p>Здравствуй, мир!</p>')
|
|
||||||
|
|
||||||
def testCreateMultiple(self):
|
|
||||||
|
|
||||||
a = self.post('/new?uri=test', data=json.dumps({'text': '...'}))
|
|
||||||
b = self.post('/new?uri=test', data=json.dumps({'text': '...'}))
|
|
||||||
c = self.post('/new?uri=test', data=json.dumps({'text': '...'}))
|
|
||||||
|
|
||||||
self.assertEqual(loads(a.data)["id"], 1)
|
|
||||||
self.assertEqual(loads(b.data)["id"], 2)
|
|
||||||
self.assertEqual(loads(c.data)["id"], 3)
|
|
||||||
|
|
||||||
def testCreateAndGetMultiple(self):
|
|
||||||
|
|
||||||
for i in range(20):
|
|
||||||
self.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'Spam'}))
|
|
||||||
|
|
||||||
r = self.get('/?uri=%2Fpath%2F')
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
rv = loads(r.data)
|
|
||||||
self.assertEqual(len(rv['replies']), 20)
|
|
||||||
|
|
||||||
def testCreateInvalidParent(self):
|
|
||||||
|
|
||||||
self.post('/new?uri=test', data=json.dumps({'text': '...'}))
|
|
||||||
self.post('/new?uri=test', data=json.dumps({'text': '...', 'parent': 1}))
|
|
||||||
invalid = self.post('/new?uri=test', data=json.dumps({'text': '...', 'parent': 2}))
|
|
||||||
|
|
||||||
self.assertEqual(loads(invalid.data)["parent"], 1)
|
|
||||||
|
|
||||||
def testVerifyFields(self):
|
|
||||||
|
|
||||||
verify = lambda comment: comments.API.verify(comment)[0]
|
|
||||||
|
|
||||||
# text is missing
|
|
||||||
self.assertFalse(verify({}))
|
|
||||||
|
|
||||||
# invalid types
|
|
||||||
self.assertFalse(verify({"text": "...", "parent": "xxx"}))
|
|
||||||
for key in ("author", "website", "email"):
|
|
||||||
self.assertFalse(verify({"text": True, key: 3.14}))
|
|
||||||
|
|
||||||
# text too short and/or blank
|
|
||||||
for text in ("", "\n\n\n"):
|
|
||||||
self.assertFalse(verify({"text": text}))
|
|
||||||
|
|
||||||
# email/website length
|
|
||||||
self.assertTrue(verify({"text": "...", "email": "*"*254}))
|
|
||||||
self.assertTrue(verify({"text": "...", "website": "google.de/" + "a"*128}))
|
|
||||||
|
|
||||||
self.assertFalse(verify({"text": "...", "email": "*"*1024}))
|
|
||||||
self.assertFalse(verify({"text": "...", "website": "google.de/" + "*"*1024}))
|
|
||||||
|
|
||||||
# valid website url
|
|
||||||
self.assertTrue(comments.isurl("example.tld"))
|
|
||||||
self.assertTrue(comments.isurl("http://example.tld"))
|
|
||||||
self.assertTrue(comments.isurl("https://example.tld"))
|
|
||||||
self.assertTrue(comments.isurl("https://example.tld:1337/"))
|
|
||||||
self.assertTrue(comments.isurl("https://example.tld:1337/foobar"))
|
|
||||||
self.assertTrue(comments.isurl("https://example.tld:1337/foobar?p=1#isso-thread"))
|
|
||||||
|
|
||||||
self.assertFalse(comments.isurl("ftp://example.tld/"))
|
|
||||||
self.assertFalse(comments.isurl("tel:+1234567890"))
|
|
||||||
self.assertFalse(comments.isurl("+1234567890"))
|
|
||||||
self.assertFalse(comments.isurl("spam"))
|
|
||||||
|
|
||||||
def testGetInvalid(self):
|
|
||||||
|
|
||||||
self.assertEqual(self.get('/?uri=%2Fpath%2F&id=123').status_code, 404)
|
|
||||||
self.assertEqual(self.get('/?uri=%2Fpath%2Fspam%2F&id=123').status_code, 404)
|
|
||||||
self.assertEqual(self.get('/?uri=?uri=%foo%2F').status_code, 404)
|
|
||||||
|
|
||||||
def testGetLimited(self):
|
|
||||||
|
|
||||||
for i in range(20):
|
|
||||||
self.post('/new?uri=test', data=json.dumps({'text': '...'}))
|
|
||||||
|
|
||||||
r = self.get('/?uri=test&limit=10')
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
rv = loads(r.data)
|
|
||||||
self.assertEqual(len(rv['replies']), 10)
|
|
||||||
|
|
||||||
def testGetNested(self):
|
|
||||||
|
|
||||||
self.post('/new?uri=test', data=json.dumps({'text': '...'}))
|
|
||||||
self.post('/new?uri=test', data=json.dumps({'text': '...', 'parent': 1}))
|
|
||||||
|
|
||||||
r = self.get('/?uri=test&parent=1')
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
rv = loads(r.data)
|
|
||||||
self.assertEqual(len(rv['replies']), 1)
|
|
||||||
|
|
||||||
def testGetLimitedNested(self):
|
|
||||||
|
|
||||||
self.post('/new?uri=test', data=json.dumps({'text': '...'}))
|
|
||||||
for i in range(20):
|
|
||||||
self.post('/new?uri=test', data=json.dumps({'text': '...', 'parent': 1}))
|
|
||||||
|
|
||||||
r = self.get('/?uri=test&parent=1&limit=10')
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
rv = loads(r.data)
|
|
||||||
self.assertEqual(len(rv['replies']), 10)
|
|
||||||
|
|
||||||
def testUpdate(self):
|
|
||||||
|
|
||||||
self.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'Lorem ipsum ...'}))
|
|
||||||
self.put('/id/1', data=json.dumps({
|
|
||||||
'text': 'Hello World', 'author': 'me', 'website': 'http://example.com/'}))
|
|
||||||
|
|
||||||
r = self.get('/id/1?plain=1')
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
rv = loads(r.data)
|
|
||||||
self.assertEqual(rv['text'], 'Hello World')
|
|
||||||
self.assertEqual(rv['author'], 'me')
|
|
||||||
self.assertEqual(rv['website'], 'http://example.com/')
|
|
||||||
self.assertIn('modified', rv)
|
|
||||||
|
|
||||||
def testDelete(self):
|
|
||||||
|
|
||||||
self.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'Lorem ipsum ...'}))
|
|
||||||
r = self.delete('/id/1')
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
self.assertEqual(loads(r.data), None)
|
|
||||||
self.assertEqual(self.get('/id/1').status_code, 404)
|
|
||||||
|
|
||||||
def testDeleteWithReference(self):
|
|
||||||
|
|
||||||
client = JSONClient(self.app, Response)
|
|
||||||
client.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'First'}))
|
|
||||||
client.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'First', 'parent': 1}))
|
|
||||||
|
|
||||||
r = client.delete('/id/1')
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
self.assertEqual(loads(r.data)['mode'], 4)
|
|
||||||
self.assertIn('/path/', self.app.db.threads)
|
|
||||||
|
|
||||||
data = loads(client.get("/?uri=%2Fpath%2F").data)
|
|
||||||
self.assertEqual(data["total_replies"], 1)
|
|
||||||
|
|
||||||
self.assertEqual(self.get('/?uri=%2Fpath%2F&id=1').status_code, 200)
|
|
||||||
self.assertEqual(self.get('/?uri=%2Fpath%2F&id=2').status_code, 200)
|
|
||||||
|
|
||||||
r = client.delete('/id/2')
|
|
||||||
self.assertEqual(self.get('/?uri=%2Fpath%2F').status_code, 404)
|
|
||||||
self.assertNotIn('/path/', self.app.db.threads)
|
|
||||||
|
|
||||||
def testDeleteWithMultipleReferences(self):
|
|
||||||
"""
|
|
||||||
[ comment 1 ]
|
|
||||||
|
|
|
||||||
--- [ comment 2, ref 1 ]
|
|
||||||
|
|
|
||||||
--- [ comment 3, ref 1 ]
|
|
||||||
[ comment 4 ]
|
|
||||||
"""
|
|
||||||
client = JSONClient(self.app, Response)
|
|
||||||
|
|
||||||
client.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'First'}))
|
|
||||||
client.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'Second', 'parent': 1}))
|
|
||||||
client.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'Third', 'parent': 1}))
|
|
||||||
client.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'Last'}))
|
|
||||||
|
|
||||||
client.delete('/id/1')
|
|
||||||
self.assertEqual(self.get('/?uri=%2Fpath%2F').status_code, 200)
|
|
||||||
client.delete('/id/2')
|
|
||||||
self.assertEqual(self.get('/?uri=%2Fpath%2F').status_code, 200)
|
|
||||||
client.delete('/id/3')
|
|
||||||
self.assertEqual(self.get('/?uri=%2Fpath%2F').status_code, 200)
|
|
||||||
client.delete('/id/4')
|
|
||||||
self.assertEqual(self.get('/?uri=%2Fpath%2F').status_code, 404)
|
|
||||||
|
|
||||||
def testPathVariations(self):
|
|
||||||
|
|
||||||
paths = ['/sub/path/', '/path.html', '/sub/path.html', 'path', '/']
|
|
||||||
|
|
||||||
for path in paths:
|
|
||||||
self.assertEqual(self.post('/new?' + urlencode({'uri': path}),
|
|
||||||
data=json.dumps({'text': '...'})).status_code, 201)
|
|
||||||
|
|
||||||
for i, path in enumerate(paths):
|
|
||||||
self.assertEqual(self.get('/?' + urlencode({'uri': path})).status_code, 200)
|
|
||||||
self.assertEqual(self.get('/id/%i' % (i + 1)).status_code, 200)
|
|
||||||
|
|
||||||
def testDeleteAndCreateByDifferentUsersButSamePostId(self):
|
|
||||||
|
|
||||||
mallory = JSONClient(self.app, Response)
|
|
||||||
mallory.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'Foo'}))
|
|
||||||
mallory.delete('/id/1')
|
|
||||||
|
|
||||||
bob = JSONClient(self.app, Response)
|
|
||||||
bob.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'Bar'}))
|
|
||||||
|
|
||||||
self.assertEqual(mallory.delete('/id/1').status_code, 403)
|
|
||||||
self.assertEqual(bob.delete('/id/1').status_code, 200)
|
|
||||||
|
|
||||||
def testHash(self):
|
|
||||||
|
|
||||||
a = self.post('/new?uri=%2Fpath%2F', data=json.dumps({"text": "Aaa"}))
|
|
||||||
b = self.post('/new?uri=%2Fpath%2F', data=json.dumps({"text": "Bbb"}))
|
|
||||||
c = self.post('/new?uri=%2Fpath%2F', data=json.dumps({"text": "Ccc", "email": "..."}))
|
|
||||||
|
|
||||||
a = loads(a.data)
|
|
||||||
b = loads(b.data)
|
|
||||||
c = loads(c.data)
|
|
||||||
|
|
||||||
self.assertNotEqual(a['hash'], '192.168.1.1')
|
|
||||||
self.assertEqual(a['hash'], b['hash'])
|
|
||||||
self.assertNotEqual(a['hash'], c['hash'])
|
|
||||||
|
|
||||||
def testVisibleFields(self):
|
|
||||||
|
|
||||||
rv = self.post('/new?uri=%2Fpath%2F', data=json.dumps({"text": "...", "invalid": "field"}))
|
|
||||||
self.assertEqual(rv.status_code, 201)
|
|
||||||
|
|
||||||
rv = loads(rv.data)
|
|
||||||
|
|
||||||
for key in comments.API.FIELDS:
|
|
||||||
rv.pop(key)
|
|
||||||
|
|
||||||
self.assertListEqual(list(rv.keys()), [])
|
|
||||||
|
|
||||||
def testCounts(self):
|
|
||||||
|
|
||||||
self.assertEqual(self.get('/count?uri=%2Fpath%2F').status_code, 404)
|
|
||||||
self.post('/new?uri=%2Fpath%2F', data=json.dumps({"text": "..."}))
|
|
||||||
|
|
||||||
rv = self.get('/count?uri=%2Fpath%2F')
|
|
||||||
self.assertEqual(rv.status_code, 200)
|
|
||||||
self.assertEqual(loads(rv.data), 1)
|
|
||||||
|
|
||||||
for x in range(3):
|
|
||||||
self.post('/new?uri=%2Fpath%2F', data=json.dumps({"text": "..."}))
|
|
||||||
|
|
||||||
rv = self.get('/count?uri=%2Fpath%2F')
|
|
||||||
self.assertEqual(rv.status_code, 200)
|
|
||||||
self.assertEqual(loads(rv.data), 4)
|
|
||||||
|
|
||||||
for x in range(4):
|
|
||||||
self.delete('/id/%i' % (x + 1))
|
|
||||||
|
|
||||||
rv = self.get('/count?uri=%2Fpath%2F')
|
|
||||||
self.assertEqual(rv.status_code, 404)
|
|
||||||
|
|
||||||
def testMultipleCounts(self):
|
|
||||||
|
|
||||||
expected = {'a': 1, 'b': 2, 'c': 0}
|
|
||||||
|
|
||||||
for uri, count in iteritems(expected):
|
|
||||||
for _ in range(count):
|
|
||||||
self.post('/new?uri=%s' % uri, data=json.dumps({"text": "..."}))
|
|
||||||
|
|
||||||
rv = self.post('/count', data=json.dumps(list(expected.keys())))
|
|
||||||
self.assertEqual(loads(rv.data), list(expected.values()))
|
|
||||||
|
|
||||||
def testModify(self):
|
|
||||||
self.post('/new?uri=test', data=json.dumps({"text": "Tpyo"}))
|
|
||||||
|
|
||||||
self.put('/id/1', data=json.dumps({"text": "Tyop"}))
|
|
||||||
self.assertEqual(loads(self.get('/id/1').data)["text"], "<p>Tyop</p>")
|
|
||||||
|
|
||||||
self.put('/id/1', data=json.dumps({"text": "Typo"}))
|
|
||||||
self.assertEqual(loads(self.get('/id/1').data)["text"], "<p>Typo</p>")
|
|
||||||
|
|
||||||
def testDeleteCommentRemovesThread(self):
|
|
||||||
|
|
||||||
self.client.post('/new?uri=%2F', data=json.dumps({"text": "..."}))
|
|
||||||
self.assertIn('/', self.app.db.threads)
|
|
||||||
self.client.delete('/id/1')
|
|
||||||
self.assertNotIn('/', self.app.db.threads)
|
|
||||||
|
|
||||||
def testCSRF(self):
|
|
||||||
|
|
||||||
js = "application/json"
|
|
||||||
form = "application/x-www-form-urlencoded"
|
|
||||||
|
|
||||||
self.post('/new?uri=%2F', data=json.dumps({"text": "..."}))
|
|
||||||
|
|
||||||
# no header is fine (default for XHR)
|
|
||||||
self.assertEqual(self.post('/id/1/dislike', content_type="").status_code, 200)
|
|
||||||
|
|
||||||
# x-www-form-urlencoded is definitely not RESTful
|
|
||||||
self.assertEqual(self.post('/id/1/dislike', content_type=form).status_code, 403)
|
|
||||||
self.assertEqual(self.post('/new?uri=%2F', data=json.dumps({"text": "..."}),
|
|
||||||
content_type=form).status_code, 403)
|
|
||||||
# just for the record
|
|
||||||
self.assertEqual(self.post('/id/1/dislike', content_type=js).status_code, 200)
|
|
||||||
|
|
||||||
|
|
||||||
class TestModeratedComments(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
fd, self.path = tempfile.mkstemp()
|
|
||||||
conf = config.load(os.path.join(dist.location, "share", "isso.conf"))
|
|
||||||
conf.set("general", "dbpath", self.path)
|
|
||||||
conf.set("moderation", "enabled", "true")
|
|
||||||
conf.set("guard", "enabled", "off")
|
|
||||||
conf.set("hash", "algorithm", "none")
|
|
||||||
|
|
||||||
class App(Isso, core.Mixin):
|
|
||||||
pass
|
|
||||||
|
|
||||||
self.app = App(conf)
|
|
||||||
self.app.wsgi_app = FakeIP(self.app.wsgi_app, "192.168.1.1")
|
|
||||||
self.client = JSONClient(self.app, Response)
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
os.unlink(self.path)
|
|
||||||
|
|
||||||
def testAddComment(self):
|
|
||||||
|
|
||||||
rv = self.client.post('/new?uri=test', data=json.dumps({"text": "..."}))
|
|
||||||
self.assertEqual(rv.status_code, 202)
|
|
||||||
|
|
||||||
self.assertEqual(self.client.get('/id/1').status_code, 200)
|
|
||||||
self.assertEqual(self.client.get('/?uri=test').status_code, 404)
|
|
||||||
|
|
||||||
self.app.db.comments.activate(1)
|
|
||||||
self.assertEqual(self.client.get('/?uri=test').status_code, 200)
|
|
||||||
|
|
||||||
|
|
||||||
class TestPurgeComments(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
fd, self.path = tempfile.mkstemp()
|
|
||||||
conf = config.load(os.path.join(dist.location, "share", "isso.conf"))
|
|
||||||
conf.set("general", "dbpath", self.path)
|
|
||||||
conf.set("moderation", "enabled", "true")
|
|
||||||
conf.set("guard", "enabled", "off")
|
|
||||||
conf.set("hash", "algorithm", "none")
|
|
||||||
|
|
||||||
class App(Isso, core.Mixin):
|
|
||||||
pass
|
|
||||||
|
|
||||||
self.app = App(conf)
|
|
||||||
self.app.wsgi_app = FakeIP(self.app.wsgi_app, "192.168.1.1")
|
|
||||||
self.client = JSONClient(self.app, Response)
|
|
||||||
|
|
||||||
def testPurgeDoesNoHarm(self):
|
|
||||||
self.client.post('/new?uri=test', data=json.dumps({"text": "..."}))
|
|
||||||
self.app.db.comments.activate(1)
|
|
||||||
self.app.db.comments.purge(0)
|
|
||||||
self.assertEqual(self.client.get('/?uri=test').status_code, 200)
|
|
||||||
|
|
||||||
def testPurgeWorks(self):
|
|
||||||
self.client.post('/new?uri=test', data=json.dumps({"text": "..."}))
|
|
||||||
self.app.db.comments.purge(0)
|
|
||||||
self.assertEqual(self.client.get('/id/1').status_code, 404)
|
|
||||||
|
|
||||||
self.client.post('/new?uri=test', data=json.dumps({"text": "..."}))
|
|
||||||
self.app.db.comments.purge(3600)
|
|
||||||
self.assertEqual(self.client.get('/id/1').status_code, 200)
|
|
@ -1,11 +1,7 @@
|
|||||||
# -*- encoding: utf-8 -*-
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
try:
|
|
||||||
import unittest2 as unittest
|
|
||||||
except ImportError:
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
import io
|
import io
|
||||||
|
import unittest
|
||||||
|
|
||||||
from isso import config
|
from isso import config
|
||||||
|
|
||||||
|
@ -1,10 +1,8 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
try:
|
import unittest
|
||||||
import unittest2 as unittest
|
|
||||||
except ImportError:
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
from werkzeug.test import Client
|
from werkzeug.test import Client
|
||||||
from werkzeug.wrappers import Response
|
from werkzeug.wrappers import Response
|
||||||
|
@ -1,28 +1,48 @@
|
|||||||
# -*- encoding: utf-8 -*-
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
try:
|
from __future__ import unicode_literals
|
||||||
import unittest2 as unittest
|
|
||||||
except ImportError:
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
import os
|
import unittest
|
||||||
import sqlite3
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
from isso import config
|
from isso import config
|
||||||
from isso.db import SQLite3
|
from isso.db import SQLite3, Adapter
|
||||||
|
|
||||||
from isso.compat import iteritems
|
from isso.compat import iteritems
|
||||||
|
|
||||||
|
|
||||||
|
class TestSQLite3(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_connection(self):
|
||||||
|
con = SQLite3(":memory:")
|
||||||
|
|
||||||
|
con.connect()
|
||||||
|
self.assertTrue(hasattr(con.local, "conn"))
|
||||||
|
|
||||||
|
con.close()
|
||||||
|
self.assertIsNone(con.local.conn)
|
||||||
|
|
||||||
|
def test_autoconnect(self):
|
||||||
|
con = SQLite3(":memory:")
|
||||||
|
con.execute("")
|
||||||
|
self.assertTrue(hasattr(con.local, "conn"))
|
||||||
|
|
||||||
|
def test_rollback(self):
|
||||||
|
con = SQLite3(":memory:")
|
||||||
|
con.execute("CREATE TABLE foo (bar INTEGER)")
|
||||||
|
con.execute("INSERT INTO foo (bar) VALUES (42)")
|
||||||
|
|
||||||
|
try:
|
||||||
|
with con.transaction as con:
|
||||||
|
con.execute("INSERT INTO foo (bar) VALUES (23)")
|
||||||
|
raise ValueError("some error")
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertEqual(len(con.execute("SELECT bar FROM foo").fetchall()), 1)
|
||||||
|
|
||||||
|
|
||||||
class TestDBMigration(unittest.TestCase):
|
class TestDBMigration(unittest.TestCase):
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
fd, self.path = tempfile.mkstemp()
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
os.unlink(self.path)
|
|
||||||
|
|
||||||
def test_defaults(self):
|
def test_defaults(self):
|
||||||
|
|
||||||
conf = config.new({
|
conf = config.new({
|
||||||
@ -31,9 +51,9 @@ class TestDBMigration(unittest.TestCase):
|
|||||||
"max-age": "1h"
|
"max-age": "1h"
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
db = SQLite3(self.path, conf)
|
db = Adapter(SQLite3(":memory:"), conf)
|
||||||
|
|
||||||
self.assertEqual(db.version, SQLite3.MAX_VERSION)
|
self.assertEqual(db.version, Adapter.MAX_VERSION)
|
||||||
self.assertTrue(db.preferences.get("session-key", "").isalnum())
|
self.assertTrue(db.preferences.get("session-key", "").isalnum())
|
||||||
|
|
||||||
def test_session_key_migration(self):
|
def test_session_key_migration(self):
|
||||||
@ -46,21 +66,23 @@ class TestDBMigration(unittest.TestCase):
|
|||||||
})
|
})
|
||||||
conf.set("general", "session-key", "supersecretkey")
|
conf.set("general", "session-key", "supersecretkey")
|
||||||
|
|
||||||
with sqlite3.connect(self.path) as con:
|
connection = SQLite3(":memory:")
|
||||||
|
|
||||||
|
with connection.transaction as con:
|
||||||
con.execute("PRAGMA user_version = 1")
|
con.execute("PRAGMA user_version = 1")
|
||||||
con.execute("CREATE TABLE threads (id INTEGER PRIMARY KEY)")
|
con.execute("CREATE TABLE threads (id INTEGER PRIMARY KEY)")
|
||||||
|
|
||||||
db = SQLite3(self.path, conf)
|
db = Adapter(connection, conf)
|
||||||
|
|
||||||
self.assertEqual(db.version, SQLite3.MAX_VERSION)
|
self.assertEqual(db.version, Adapter.MAX_VERSION)
|
||||||
self.assertEqual(db.preferences.get("session-key"),
|
self.assertEqual(db.preferences.get("session-key"),
|
||||||
conf.get("general", "session-key"))
|
conf.get("general", "session-key"))
|
||||||
|
|
||||||
# try again, now with the session-key removed from our conf
|
# try again, now with the session-key removed from our conf
|
||||||
conf.remove_option("general", "session-key")
|
conf.remove_option("general", "session-key")
|
||||||
db = SQLite3(self.path, conf)
|
db = Adapter(connection, conf)
|
||||||
|
|
||||||
self.assertEqual(db.version, SQLite3.MAX_VERSION)
|
self.assertEqual(db.version, Adapter.MAX_VERSION)
|
||||||
self.assertEqual(db.preferences.get("session-key"),
|
self.assertEqual(db.preferences.get("session-key"),
|
||||||
"supersecretkey")
|
"supersecretkey")
|
||||||
|
|
||||||
@ -76,7 +98,9 @@ class TestDBMigration(unittest.TestCase):
|
|||||||
6: None
|
6: None
|
||||||
}
|
}
|
||||||
|
|
||||||
with sqlite3.connect(self.path) as con:
|
connection = SQLite3(":memory:")
|
||||||
|
|
||||||
|
with connection.transaction as con:
|
||||||
con.execute("PRAGMA user_version = 2")
|
con.execute("PRAGMA user_version = 2")
|
||||||
con.execute("CREATE TABLE threads ("
|
con.execute("CREATE TABLE threads ("
|
||||||
" id INTEGER PRIMARY KEY,"
|
" id INTEGER PRIMARY KEY,"
|
||||||
@ -106,7 +130,7 @@ class TestDBMigration(unittest.TestCase):
|
|||||||
"max-age": "1h"
|
"max-age": "1h"
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
SQLite3(self.path, conf)
|
Adapter(connection, conf)
|
||||||
|
|
||||||
flattened = [
|
flattened = [
|
||||||
(1, None),
|
(1, None),
|
||||||
@ -118,6 +142,6 @@ class TestDBMigration(unittest.TestCase):
|
|||||||
(7, 2)
|
(7, 2)
|
||||||
]
|
]
|
||||||
|
|
||||||
with sqlite3.connect(self.path) as con:
|
with connection.transaction as con:
|
||||||
rv = con.execute("SELECT id, parent FROM comments ORDER BY created").fetchall()
|
rv = con.execute("SELECT id, parent FROM comments ORDER BY created").fetchall()
|
||||||
self.assertEqual(flattened, rv)
|
self.assertEqual(flattened, rv)
|
||||||
|
@ -1,115 +0,0 @@
|
|||||||
# -*- encoding: utf-8 -*-
|
|
||||||
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
try:
|
|
||||||
import unittest2 as unittest
|
|
||||||
except ImportError:
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
import os
|
|
||||||
import json
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
from werkzeug import __version__
|
|
||||||
from werkzeug.test import Client
|
|
||||||
from werkzeug.wrappers import Response
|
|
||||||
|
|
||||||
from isso import Isso, config, core, dist
|
|
||||||
from isso.utils import http
|
|
||||||
|
|
||||||
from fixtures import curl, FakeIP
|
|
||||||
http.curl = curl
|
|
||||||
|
|
||||||
if __version__.startswith("0.8"):
|
|
||||||
class Response(Response):
|
|
||||||
|
|
||||||
def get_data(self, as_text=False):
|
|
||||||
return self.data.decode("utf-8")
|
|
||||||
|
|
||||||
|
|
||||||
class TestGuard(unittest.TestCase):
|
|
||||||
|
|
||||||
data = json.dumps({"text": "Lorem ipsum."})
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
self.path = tempfile.NamedTemporaryFile().name
|
|
||||||
|
|
||||||
def makeClient(self, ip, ratelimit=2, direct_reply=3, self_reply=False):
|
|
||||||
|
|
||||||
conf = config.load(os.path.join(dist.location, "share", "isso.conf"))
|
|
||||||
conf.set("general", "dbpath", self.path)
|
|
||||||
conf.set("hash", "algorithm", "none")
|
|
||||||
conf.set("guard", "enabled", "true")
|
|
||||||
conf.set("guard", "ratelimit", str(ratelimit))
|
|
||||||
conf.set("guard", "direct-reply", str(direct_reply))
|
|
||||||
conf.set("guard", "reply-to-self", "1" if self_reply else "0")
|
|
||||||
|
|
||||||
class App(Isso, core.Mixin):
|
|
||||||
pass
|
|
||||||
|
|
||||||
app = App(conf)
|
|
||||||
app.wsgi_app = FakeIP(app.wsgi_app, ip)
|
|
||||||
|
|
||||||
return Client(app, Response)
|
|
||||||
|
|
||||||
def testRateLimit(self):
|
|
||||||
|
|
||||||
bob = self.makeClient("127.0.0.1", 2)
|
|
||||||
|
|
||||||
for i in range(2):
|
|
||||||
rv = bob.post('/new?uri=test', data=self.data)
|
|
||||||
self.assertEqual(rv.status_code, 201)
|
|
||||||
|
|
||||||
rv = bob.post('/new?uri=test', data=self.data)
|
|
||||||
|
|
||||||
self.assertEqual(rv.status_code, 403)
|
|
||||||
self.assertIn("ratelimit exceeded", rv.get_data(as_text=True))
|
|
||||||
|
|
||||||
alice = self.makeClient("1.2.3.4", 2)
|
|
||||||
for i in range(2):
|
|
||||||
self.assertEqual(alice.post("/new?uri=test", data=self.data).status_code, 201)
|
|
||||||
|
|
||||||
bob.application.db.execute([
|
|
||||||
"UPDATE comments SET",
|
|
||||||
" created = created - 60",
|
|
||||||
"WHERE remote_addr = '127.0.0.0'"
|
|
||||||
])
|
|
||||||
|
|
||||||
self.assertEqual(bob.post("/new?uri=test", data=self.data).status_code, 201)
|
|
||||||
|
|
||||||
def testDirectReply(self):
|
|
||||||
|
|
||||||
client = self.makeClient("127.0.0.1", 15, 3)
|
|
||||||
|
|
||||||
for url in ("foo", "bar", "baz", "spam"):
|
|
||||||
for _ in range(3):
|
|
||||||
rv = client.post("/new?uri=%s" % url, data=self.data)
|
|
||||||
self.assertEqual(rv.status_code, 201)
|
|
||||||
|
|
||||||
for url in ("foo", "bar", "baz", "spam"):
|
|
||||||
rv = client.post("/new?uri=%s" % url, data=self.data)
|
|
||||||
|
|
||||||
self.assertEqual(rv.status_code, 403)
|
|
||||||
self.assertIn("direct responses to", rv.get_data(as_text=True))
|
|
||||||
|
|
||||||
def testSelfReply(self):
|
|
||||||
|
|
||||||
payload = lambda id: json.dumps({"text": "...", "parent": id})
|
|
||||||
|
|
||||||
client = self.makeClient("127.0.0.1", self_reply=False)
|
|
||||||
self.assertEqual(client.post("/new?uri=test", data=self.data).status_code, 201)
|
|
||||||
self.assertEqual(client.post("/new?uri=test", data=payload(1)).status_code, 403)
|
|
||||||
|
|
||||||
client.application.db.execute([
|
|
||||||
"UPDATE comments SET",
|
|
||||||
" created = created - ?",
|
|
||||||
"WHERE id = 1"
|
|
||||||
], (client.application.conf.getint("general", "max-age"), ))
|
|
||||||
|
|
||||||
self.assertEqual(client.post("/new?uri=test", data=payload(1)).status_code, 201)
|
|
||||||
|
|
||||||
client = self.makeClient("128.0.0.1", ratelimit=3, self_reply=False)
|
|
||||||
self.assertEqual(client.post("/new?uri=test", data=self.data).status_code, 201)
|
|
||||||
self.assertEqual(client.post("/new?uri=test", data=payload(1)).status_code, 201)
|
|
||||||
self.assertEqual(client.post("/new?uri=test", data=payload(2)).status_code, 201)
|
|
@ -1,13 +1,8 @@
|
|||||||
# -*- encoding: utf-8 -*-
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
try:
|
|
||||||
import unittest2 as unittest
|
|
||||||
except ImportError:
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
import textwrap
|
import textwrap
|
||||||
|
import unittest
|
||||||
|
|
||||||
from isso import config
|
|
||||||
from isso.utils import html
|
from isso.utils import html
|
||||||
|
|
||||||
|
|
||||||
@ -87,13 +82,6 @@ class TestHTML(unittest.TestCase):
|
|||||||
self.assertEqual(html.sanitize(sanitizer, input), expected)
|
self.assertEqual(html.sanitize(sanitizer, input), expected)
|
||||||
|
|
||||||
def test_render(self):
|
def test_render(self):
|
||||||
conf = config.new({
|
renderer = html.Markup(["autolink", ]).render
|
||||||
"markup": {
|
|
||||||
"options": "autolink",
|
|
||||||
"allowed-elements": "",
|
|
||||||
"allowed-attributes": ""
|
|
||||||
}
|
|
||||||
})
|
|
||||||
renderer = html.Markup(conf.section("markup")).render
|
|
||||||
self.assertEqual(renderer("http://example.org/ and sms:+1234567890"),
|
self.assertEqual(renderer("http://example.org/ and sms:+1234567890"),
|
||||||
'<p><a href="http://example.org/">http://example.org/</a> and sms:+1234567890</p>')
|
'<p><a href="http://example.org/">http://example.org/</a> and sms:+1234567890</p>')
|
||||||
|
@ -2,17 +2,12 @@
|
|||||||
|
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
try:
|
import unittest
|
||||||
import unittest2 as unittest
|
|
||||||
except ImportError:
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
import tempfile
|
|
||||||
from os.path import join, dirname
|
from os.path import join, dirname
|
||||||
|
|
||||||
from isso import config
|
from isso import config
|
||||||
|
|
||||||
from isso.db import SQLite3
|
from isso.db import SQLite3, Adapter
|
||||||
from isso.migrate import Disqus, WordPress, autodetect
|
from isso.migrate import Disqus, WordPress, autodetect
|
||||||
|
|
||||||
conf = config.new({
|
conf = config.new({
|
||||||
@ -28,9 +23,8 @@ class TestMigration(unittest.TestCase):
|
|||||||
def test_disqus(self):
|
def test_disqus(self):
|
||||||
|
|
||||||
xml = join(dirname(__file__), "disqus.xml")
|
xml = join(dirname(__file__), "disqus.xml")
|
||||||
xxx = tempfile.NamedTemporaryFile()
|
|
||||||
|
|
||||||
db = SQLite3(xxx.name, conf)
|
db = Adapter(SQLite3(":memory:"), conf)
|
||||||
Disqus(db, xml).migrate()
|
Disqus(db, xml).migrate()
|
||||||
|
|
||||||
self.assertEqual(len(db.execute("SELECT id FROM comments").fetchall()), 2)
|
self.assertEqual(len(db.execute("SELECT id FROM comments").fetchall()), 2)
|
||||||
@ -50,9 +44,8 @@ class TestMigration(unittest.TestCase):
|
|||||||
def test_wordpress(self):
|
def test_wordpress(self):
|
||||||
|
|
||||||
xml = join(dirname(__file__), "wordpress.xml")
|
xml = join(dirname(__file__), "wordpress.xml")
|
||||||
xxx = tempfile.NamedTemporaryFile()
|
|
||||||
|
|
||||||
db = SQLite3(xxx.name, conf)
|
db = Adapter(SQLite3(":memory:"), conf)
|
||||||
WordPress(db, xml).migrate()
|
WordPress(db, xml).migrate()
|
||||||
|
|
||||||
self.assertEqual(db.threads["/2014/test/"]["title"], "Hello, World…")
|
self.assertEqual(db.threads["/2014/test/"]["title"], "Hello, World…")
|
||||||
|
103
isso/tests/test_queue.py
Normal file
103
isso/tests/test_queue.py
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from isso.db import SQLite3
|
||||||
|
from isso.queue import Message, Queue, Full, Empty, Timeout, SQLite3Queue
|
||||||
|
|
||||||
|
|
||||||
|
class TestMessage(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_message(self):
|
||||||
|
a = Message("Foo", None)
|
||||||
|
b = Message("Bar", None)
|
||||||
|
|
||||||
|
self.assertLess(a, b)
|
||||||
|
|
||||||
|
def test_message_delay(self):
|
||||||
|
a = Message("Foo", None, delay=1)
|
||||||
|
b = Message("Bar", None, delay=0)
|
||||||
|
|
||||||
|
self.assertGreater(a, b)
|
||||||
|
|
||||||
|
def test_message_wait(self):
|
||||||
|
a = Message("Foo", None)
|
||||||
|
b = Message("Foo", None)
|
||||||
|
a = Queue.delay(a, 1, delayfunc=lambda i: 0.5)
|
||||||
|
|
||||||
|
self.assertGreater(a, b)
|
||||||
|
|
||||||
|
|
||||||
|
class TestQueue(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.cls = Queue
|
||||||
|
|
||||||
|
def test_queue(self):
|
||||||
|
q = self.cls()
|
||||||
|
msgs = [Message("Foo", None) for _ in range(3)]
|
||||||
|
|
||||||
|
for msg in msgs:
|
||||||
|
q.put(msg)
|
||||||
|
|
||||||
|
self.assertEqual(q.size, 3)
|
||||||
|
|
||||||
|
for msg in msgs:
|
||||||
|
self.assertEqual(q.get(), msg)
|
||||||
|
|
||||||
|
self.assertEqual(q.size, 0)
|
||||||
|
|
||||||
|
def test_data_primitives(self):
|
||||||
|
q = self.cls()
|
||||||
|
m = Message("Foo", {"foo": True, "bar": [2, 3]})
|
||||||
|
|
||||||
|
q.put(m)
|
||||||
|
self.assertEqual(q.get(), m)
|
||||||
|
|
||||||
|
def test_queue_full(self):
|
||||||
|
q = self.cls(maxlen=1)
|
||||||
|
q.put(Message("Foo", None))
|
||||||
|
|
||||||
|
self.assertRaises(Full, q.put, Message("Bar", None))
|
||||||
|
|
||||||
|
def test_queue_empty(self):
|
||||||
|
q = self.cls()
|
||||||
|
msg = Message("Foo", None)
|
||||||
|
|
||||||
|
self.assertRaises(Empty, q.get)
|
||||||
|
q.put(msg)
|
||||||
|
q.get()
|
||||||
|
self.assertRaises(Empty, q.get)
|
||||||
|
|
||||||
|
def test_retry(self):
|
||||||
|
q = self.cls()
|
||||||
|
msg = Message("Foo", None)
|
||||||
|
|
||||||
|
q.retry(msg)
|
||||||
|
self.assertRaises(Empty, q.get)
|
||||||
|
self.assertEqual(q.size, 1)
|
||||||
|
|
||||||
|
def test_retry_timeout(self):
|
||||||
|
q = self.cls(timeout=0)
|
||||||
|
msg = Message("Foo", None)
|
||||||
|
|
||||||
|
self.assertRaises(Timeout, q.retry, msg)
|
||||||
|
|
||||||
|
def test_requeue(self):
|
||||||
|
q = self.cls()
|
||||||
|
msg = Message("Foo", None)
|
||||||
|
|
||||||
|
q.put(msg)
|
||||||
|
q.requeue(q.get(), datetime.timedelta(seconds=1))
|
||||||
|
|
||||||
|
self.assertRaises(Empty, q.get)
|
||||||
|
self.assertEqual(q.size, 1)
|
||||||
|
|
||||||
|
|
||||||
|
class TestSQLite3Queue(TestQueue):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.cls = lambda *x, **z: SQLite3Queue(SQLite3(":memory:"), *x, **z)
|
92
isso/tests/test_spam.py
Normal file
92
isso/tests/test_spam.py
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from isso import db, spam
|
||||||
|
from isso.models import Thread
|
||||||
|
from isso.controllers import comments
|
||||||
|
|
||||||
|
bob = "127.0.0.1"
|
||||||
|
alice = "127.0.0.2"
|
||||||
|
|
||||||
|
comment = dict(text="...")
|
||||||
|
|
||||||
|
|
||||||
|
class TestGuard(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.db = db.Adapter("sqlite:///:memory:")
|
||||||
|
|
||||||
|
def test_ratelimit(self):
|
||||||
|
thread = Thread(0, "/")
|
||||||
|
|
||||||
|
guard = spam.Guard(self.db, enabled=True, ratelimit=2)
|
||||||
|
controller = comments.Controller(self.db, guard)
|
||||||
|
|
||||||
|
for _ in range(2):
|
||||||
|
controller.new(bob, thread, comment)
|
||||||
|
|
||||||
|
try:
|
||||||
|
controller.new(bob, thread, comment)
|
||||||
|
except Exception as ex:
|
||||||
|
self.assertIsInstance(ex, comments.Denied)
|
||||||
|
self.assertIn("ratelimit exceeded", ex.args[0])
|
||||||
|
else:
|
||||||
|
self.assertTrue(False)
|
||||||
|
|
||||||
|
# check if Alice can still comment
|
||||||
|
for _ in range(2):
|
||||||
|
controller.new(alice, thread, comment)
|
||||||
|
|
||||||
|
# 60 seconds are gone now
|
||||||
|
self.db.engine.execute(
|
||||||
|
self.db.comments.update().values(
|
||||||
|
created=self.db.comments.c.created - 60
|
||||||
|
).where(self.db.comments.c.remote_addr == bob))
|
||||||
|
|
||||||
|
controller.new(bob, thread, comment)
|
||||||
|
|
||||||
|
def test_direct_reply(self):
|
||||||
|
threads = [Thread(0, "/foo"), Thread(1, "/bar")]
|
||||||
|
|
||||||
|
guard = spam.Guard(self.db, enabled=True, ratelimit=-1, direct_reply=3)
|
||||||
|
controller = comments.Controller(self.db, guard)
|
||||||
|
|
||||||
|
for thread in threads:
|
||||||
|
for _ in range(3):
|
||||||
|
controller.new(bob, thread, comment)
|
||||||
|
|
||||||
|
for thread in threads:
|
||||||
|
try:
|
||||||
|
controller.new(bob, thread, comment)
|
||||||
|
except Exception as ex:
|
||||||
|
self.assertIsInstance(ex, comments.Denied)
|
||||||
|
self.assertIn("direct response", ex.args[0])
|
||||||
|
else:
|
||||||
|
self.assertTrue(False)
|
||||||
|
|
||||||
|
def test_self_reply(self):
|
||||||
|
thread = Thread(0, "/")
|
||||||
|
|
||||||
|
guard = spam.Guard(self.db, enabled=True, reply_to_self=False)
|
||||||
|
controller = comments.Controller(self.db, guard)
|
||||||
|
|
||||||
|
ref = controller.new(bob, thread, comment).id
|
||||||
|
|
||||||
|
try:
|
||||||
|
controller.new(bob, thread, dict(text="...", parent=ref))
|
||||||
|
except Exception as ex:
|
||||||
|
self.assertIsInstance(ex, comments.Denied)
|
||||||
|
self.assertIn("editing frame is still open", ex.args[0])
|
||||||
|
else:
|
||||||
|
self.assertTrue(False)
|
||||||
|
|
||||||
|
# fast-forward `max-age` seconds
|
||||||
|
self.db.engine.execute(
|
||||||
|
self.db.comments.update().values(
|
||||||
|
created=self.db.comments.c.created - guard.max_age
|
||||||
|
).where(self.db.comments.c.id == ref))
|
||||||
|
|
||||||
|
controller.new(bob, thread, dict(text="...", parent=ref))
|
@ -1,10 +1,8 @@
|
|||||||
# -*- encoding: utf-8 -*-
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
try:
|
import unittest
|
||||||
import unittest2 as unittest
|
|
||||||
except ImportError:
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
|
import itsdangerous
|
||||||
|
|
||||||
from isso import utils
|
from isso import utils
|
||||||
from isso.utils import parse
|
from isso.utils import parse
|
||||||
@ -23,6 +21,22 @@ class TestUtils(unittest.TestCase):
|
|||||||
self.assertEqual(utils.anonymize(addr), anonymized)
|
self.assertEqual(utils.anonymize(addr), anonymized)
|
||||||
|
|
||||||
|
|
||||||
|
class TestURLSafeTimedSerializer(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_serializer(self):
|
||||||
|
signer = utils.URLSafeTimedSerializer("")
|
||||||
|
payload = [1, "x" * 1024]
|
||||||
|
self.assertEqual(signer.loads(signer.dumps(payload)), payload)
|
||||||
|
|
||||||
|
def test_nocompression(self):
|
||||||
|
plain = utils.URLSafeTimedSerializer("")
|
||||||
|
zlib = itsdangerous.URLSafeTimedSerializer("")
|
||||||
|
|
||||||
|
payload = "x" * 1024
|
||||||
|
self.assertTrue(zlib.dumps(payload).startswith("."))
|
||||||
|
self.assertNotEqual(plain.dumps(payload), zlib.dumps(payload))
|
||||||
|
|
||||||
|
|
||||||
class TestParse(unittest.TestCase):
|
class TestParse(unittest.TestCase):
|
||||||
|
|
||||||
def test_thread(self):
|
def test_thread(self):
|
||||||
|
@ -2,12 +2,7 @@
|
|||||||
|
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
try:
|
import unittest
|
||||||
import unittest2 as unittest
|
|
||||||
except ImportError:
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
from isso import config
|
|
||||||
|
|
||||||
from isso.compat import PY2K, string_types
|
from isso.compat import PY2K, string_types
|
||||||
from isso.utils.hash import Hash, PBKDF2, new
|
from isso.utils.hash import Hash, PBKDF2, new
|
||||||
@ -49,25 +44,16 @@ class TestCreate(unittest.TestCase):
|
|||||||
|
|
||||||
def test_custom(self):
|
def test_custom(self):
|
||||||
|
|
||||||
def _new(val):
|
sha1 = new("sha1")
|
||||||
conf = config.new({
|
|
||||||
"hash": {
|
|
||||||
"algorithm": val,
|
|
||||||
"salt": ""
|
|
||||||
}
|
|
||||||
})
|
|
||||||
return new(conf.section("hash"))
|
|
||||||
|
|
||||||
sha1 = _new("sha1")
|
|
||||||
self.assertIsInstance(sha1, Hash)
|
self.assertIsInstance(sha1, Hash)
|
||||||
self.assertEqual(sha1.func, "sha1")
|
self.assertEqual(sha1.func, "sha1")
|
||||||
self.assertRaises(ValueError, _new, "foo")
|
self.assertRaises(ValueError, new, "foo")
|
||||||
|
|
||||||
pbkdf2 = _new("pbkdf2:16")
|
pbkdf2 = new("pbkdf2:16")
|
||||||
self.assertIsInstance(pbkdf2, PBKDF2)
|
self.assertIsInstance(pbkdf2, PBKDF2)
|
||||||
self.assertEqual(pbkdf2.iterations, 16)
|
self.assertEqual(pbkdf2.iterations, 16)
|
||||||
|
|
||||||
pbkdf2 = _new("pbkdf2:16:2:md5")
|
pbkdf2 = new("pbkdf2:16:2:md5")
|
||||||
self.assertIsInstance(pbkdf2, PBKDF2)
|
self.assertIsInstance(pbkdf2, PBKDF2)
|
||||||
self.assertEqual(pbkdf2.dklen, 2)
|
self.assertEqual(pbkdf2.dklen, 2)
|
||||||
self.assertEqual(pbkdf2.func, "md5")
|
self.assertEqual(pbkdf2.func, "md5")
|
24
isso/tests/test_utils_types.py
Normal file
24
isso/tests/test_utils_types.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from isso.compat import string_types
|
||||||
|
from isso.utils import types
|
||||||
|
|
||||||
|
|
||||||
|
class TestTypes(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_require(self):
|
||||||
|
|
||||||
|
try:
|
||||||
|
types.require("foo", string_types)
|
||||||
|
except TypeError:
|
||||||
|
self.assertTrue(False)
|
||||||
|
|
||||||
|
def test_require_raises(self):
|
||||||
|
|
||||||
|
self.assertRaises(TypeError, types.require, 1, bool)
|
||||||
|
self.assertRaises(TypeError, types.require, 1, str)
|
||||||
|
|
@ -1,96 +0,0 @@
|
|||||||
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import os
|
|
||||||
import json
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
try:
|
|
||||||
import unittest2 as unittest
|
|
||||||
except ImportError:
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
from werkzeug.wrappers import Response
|
|
||||||
|
|
||||||
from isso import Isso, core, config, dist
|
|
||||||
from isso.utils import http
|
|
||||||
|
|
||||||
from fixtures import curl, loads, FakeIP, JSONClient
|
|
||||||
http.curl = curl
|
|
||||||
|
|
||||||
|
|
||||||
class TestVote(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
self.path = tempfile.NamedTemporaryFile().name
|
|
||||||
|
|
||||||
def makeClient(self, ip):
|
|
||||||
|
|
||||||
conf = config.load(os.path.join(dist.location, "share", "isso.conf"))
|
|
||||||
conf.set("general", "dbpath", self.path)
|
|
||||||
conf.set("guard", "enabled", "off")
|
|
||||||
conf.set("hash", "algorithm", "none")
|
|
||||||
|
|
||||||
class App(Isso, core.Mixin):
|
|
||||||
pass
|
|
||||||
|
|
||||||
app = App(conf)
|
|
||||||
app.wsgi_app = FakeIP(app.wsgi_app, ip)
|
|
||||||
|
|
||||||
return JSONClient(app, Response)
|
|
||||||
|
|
||||||
def testZeroLikes(self):
|
|
||||||
|
|
||||||
rv = self.makeClient("127.0.0.1").post("/new?uri=test", data=json.dumps({"text": "..."}))
|
|
||||||
self.assertEqual(loads(rv.data)['likes'], 0)
|
|
||||||
self.assertEqual(loads(rv.data)['dislikes'], 0)
|
|
||||||
|
|
||||||
def testSingleLike(self):
|
|
||||||
|
|
||||||
self.makeClient("127.0.0.1").post("/new?uri=test", data=json.dumps({"text": "..."}))
|
|
||||||
rv = self.makeClient("0.0.0.0").post("/id/1/like")
|
|
||||||
|
|
||||||
self.assertEqual(rv.status_code, 200)
|
|
||||||
self.assertEqual(loads(rv.data)["likes"], 1)
|
|
||||||
|
|
||||||
def testSelfLike(self):
|
|
||||||
|
|
||||||
bob = self.makeClient("127.0.0.1")
|
|
||||||
bob.post("/new?uri=test", data=json.dumps({"text": "..."}))
|
|
||||||
rv = bob.post('/id/1/like')
|
|
||||||
|
|
||||||
self.assertEqual(rv.status_code, 200)
|
|
||||||
self.assertEqual(loads(rv.data)["likes"], 0)
|
|
||||||
|
|
||||||
def testMultipleLikes(self):
|
|
||||||
|
|
||||||
self.makeClient("127.0.0.1").post("/new?uri=test", data=json.dumps({"text": "..."}))
|
|
||||||
for num in range(15):
|
|
||||||
rv = self.makeClient("1.2.%i.0" % num).post('/id/1/like')
|
|
||||||
self.assertEqual(rv.status_code, 200)
|
|
||||||
self.assertEqual(loads(rv.data)["likes"], num + 1)
|
|
||||||
|
|
||||||
def testVoteOnNonexistentComment(self):
|
|
||||||
rv = self.makeClient("1.2.3.4").post('/id/1/like')
|
|
||||||
self.assertEqual(rv.status_code, 200)
|
|
||||||
self.assertEqual(loads(rv.data), None)
|
|
||||||
|
|
||||||
def testTooManyLikes(self):
|
|
||||||
|
|
||||||
self.makeClient("127.0.0.1").post("/new?uri=test", data=json.dumps({"text": "..."}))
|
|
||||||
for num in range(256):
|
|
||||||
rv = self.makeClient("1.2.%i.0" % num).post('/id/1/like')
|
|
||||||
self.assertEqual(rv.status_code, 200)
|
|
||||||
|
|
||||||
if num >= 142:
|
|
||||||
self.assertEqual(loads(rv.data)["likes"], 142)
|
|
||||||
else:
|
|
||||||
self.assertEqual(loads(rv.data)["likes"], num + 1)
|
|
||||||
|
|
||||||
def testDislike(self):
|
|
||||||
self.makeClient("127.0.0.1").post("/new?uri=test", data=json.dumps({"text": "..."}))
|
|
||||||
rv = self.makeClient("1.2.3.4").post('/id/1/dislike')
|
|
||||||
|
|
||||||
self.assertEqual(rv.status_code, 200)
|
|
||||||
self.assertEqual(loads(rv.data)['likes'], 0)
|
|
||||||
self.assertEqual(loads(rv.data)['dislikes'], 1)
|
|
@ -1,10 +1,6 @@
|
|||||||
# -*- encoding: utf-8 -*-
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
try:
|
import unittest
|
||||||
import unittest2 as unittest
|
|
||||||
except ImportError:
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
|
|
||||||
from isso import wsgi
|
from isso import wsgi
|
||||||
|
|
||||||
|
175
isso/tests/views/test_api.py
Normal file
175
isso/tests/views/test_api.py
Normal file
@ -0,0 +1,175 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from werkzeug.test import Client
|
||||||
|
from werkzeug.wrappers import Response
|
||||||
|
|
||||||
|
from isso import Isso, config, dist
|
||||||
|
from isso.utils import http
|
||||||
|
|
||||||
|
|
||||||
|
class FakeIP(object):
|
||||||
|
|
||||||
|
def __init__(self, app, ip):
|
||||||
|
self.app = app
|
||||||
|
self.ip = ip
|
||||||
|
|
||||||
|
def __call__(self, environ, start_response):
|
||||||
|
environ['REMOTE_ADDR'] = self.ip
|
||||||
|
return self.app(environ, start_response)
|
||||||
|
|
||||||
|
|
||||||
|
class JSONClient(Client):
|
||||||
|
|
||||||
|
def open(self, *args, **kwargs):
|
||||||
|
kwargs.setdefault('content_type', 'application/json')
|
||||||
|
return super(JSONClient, self).open(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class Dummy(object):
|
||||||
|
|
||||||
|
status = 200
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def read(self):
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
curl = lambda method, host, path: Dummy()
|
||||||
|
loads = lambda data: json.loads(data.decode('utf-8'))
|
||||||
|
|
||||||
|
http.curl = curl
|
||||||
|
|
||||||
|
|
||||||
|
class TestComments(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
conf = config.load(os.path.join(dist.location, "isso", "defaults.ini"))
|
||||||
|
conf.set("general", "dbpath", "sqlite:///:memory:")
|
||||||
|
conf.set("guard", "enabled", "off")
|
||||||
|
conf.set("hash", "algorithm", "none")
|
||||||
|
|
||||||
|
self.app = Isso(conf)
|
||||||
|
self.app.wsgi_app = FakeIP(self.app.wsgi_app, "192.168.1.1")
|
||||||
|
|
||||||
|
self.client = JSONClient(self.app, Response)
|
||||||
|
self.get = self.client.get
|
||||||
|
self.put = self.client.put
|
||||||
|
self.post = self.client.post
|
||||||
|
self.delete = self.client.delete
|
||||||
|
|
||||||
|
# done (except Markup)
|
||||||
|
def testGet(self):
|
||||||
|
|
||||||
|
self.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'Lorem ipsum ...'}))
|
||||||
|
r = self.get('/id/1')
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
rv = loads(r.data)
|
||||||
|
|
||||||
|
self.assertEqual(rv['id'], 1)
|
||||||
|
self.assertEqual(rv['text'], '<p>Lorem ipsum ...</p>')
|
||||||
|
|
||||||
|
# done (except Set-Cookie)
|
||||||
|
def testCreate(self):
|
||||||
|
|
||||||
|
rv = self.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'Lorem ipsum ...'}))
|
||||||
|
|
||||||
|
self.assertEqual(rv.status_code, 201)
|
||||||
|
self.assertIn("Set-Cookie", rv.headers)
|
||||||
|
self.assertIn("X-Set-Cookie", rv.headers)
|
||||||
|
|
||||||
|
rv = loads(rv.data)
|
||||||
|
|
||||||
|
self.assertEqual(rv["mode"], 1)
|
||||||
|
self.assertEqual(rv["text"], '<p>Lorem ipsum ...</p>')
|
||||||
|
|
||||||
|
def testGetLimited(self):
|
||||||
|
|
||||||
|
for i in range(20):
|
||||||
|
self.post('/new?uri=test', data=json.dumps({'text': '...'}))
|
||||||
|
|
||||||
|
r = self.get('/?uri=test&limit=10')
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
rv = loads(r.data)
|
||||||
|
self.assertEqual(len(rv['replies']), 10)
|
||||||
|
|
||||||
|
def testGetNested(self):
|
||||||
|
|
||||||
|
self.post('/new?uri=test', data=json.dumps({'text': '...'}))
|
||||||
|
self.post('/new?uri=test', data=json.dumps({'text': '...', 'parent': 1}))
|
||||||
|
|
||||||
|
r = self.get('/?uri=test&parent=1')
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
rv = loads(r.data)
|
||||||
|
self.assertEqual(len(rv['replies']), 1)
|
||||||
|
|
||||||
|
def testGetLimitedNested(self):
|
||||||
|
|
||||||
|
self.post('/new?uri=test', data=json.dumps({'text': '...'}))
|
||||||
|
for i in range(20):
|
||||||
|
self.post('/new?uri=test', data=json.dumps({'text': '...', 'parent': 1}))
|
||||||
|
|
||||||
|
r = self.get('/?uri=test&parent=1&limit=10')
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
rv = loads(r.data)
|
||||||
|
self.assertEqual(len(rv['replies']), 10)
|
||||||
|
|
||||||
|
# done (except plain)
|
||||||
|
def testUpdate(self):
|
||||||
|
|
||||||
|
self.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'Lorem ipsum ...'}))
|
||||||
|
self.put('/id/1', data=json.dumps({
|
||||||
|
'text': 'Hello World', 'author': 'me', 'website': 'http://example.com/'}))
|
||||||
|
|
||||||
|
r = self.get('/id/1?plain=1')
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
rv = loads(r.data)
|
||||||
|
self.assertEqual(rv['text'], 'Hello World')
|
||||||
|
self.assertEqual(rv['author'], 'me')
|
||||||
|
self.assertEqual(rv['website'], 'http://example.com/')
|
||||||
|
self.assertIn('modified', rv)
|
||||||
|
|
||||||
|
def testDeleteAndCreateByDifferentUsersButSamePostId(self):
|
||||||
|
|
||||||
|
mallory = JSONClient(self.app, Response)
|
||||||
|
mallory.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'Foo'}))
|
||||||
|
mallory.delete('/id/1')
|
||||||
|
|
||||||
|
bob = JSONClient(self.app, Response)
|
||||||
|
bob.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'Bar'}))
|
||||||
|
|
||||||
|
self.assertEqual(mallory.delete('/id/1').status_code, 403)
|
||||||
|
self.assertEqual(bob.delete('/id/1').status_code, 200)
|
||||||
|
|
||||||
|
def testCSRF(self):
|
||||||
|
|
||||||
|
js = "application/json"
|
||||||
|
form = "application/x-www-form-urlencoded"
|
||||||
|
|
||||||
|
self.post('/new?uri=%2F', data=json.dumps({"text": "..."}))
|
||||||
|
|
||||||
|
# no header is fine (default for XHR)
|
||||||
|
self.assertEqual(self.post('/id/1/dislike', content_type="").status_code, 200)
|
||||||
|
|
||||||
|
# x-www-form-urlencoded is definitely not RESTful
|
||||||
|
self.assertEqual(self.post('/id/1/dislike', content_type=form).status_code, 403)
|
||||||
|
self.assertEqual(self.post('/new?uri=%2F', data=json.dumps({"text": "..."}),
|
||||||
|
content_type=form).status_code, 403)
|
||||||
|
# just for the record
|
||||||
|
self.assertEqual(self.post('/id/1/dislike', content_type=js).status_code, 200)
|
@ -6,8 +6,11 @@ import pkg_resources
|
|||||||
werkzeug = pkg_resources.get_distribution("werkzeug")
|
werkzeug = pkg_resources.get_distribution("werkzeug")
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
|
from itsdangerous import BadPayload, TimedSerializer, compact_json
|
||||||
|
|
||||||
from werkzeug.wrappers import Request, Response
|
from werkzeug.wrappers import Request, Response
|
||||||
from werkzeug.exceptions import BadRequest
|
from werkzeug.exceptions import BadRequest
|
||||||
|
|
||||||
@ -33,7 +36,7 @@ def anonymize(remote_addr):
|
|||||||
return u'' + ipv6.exploded.rsplit(':', 5)[0] + ':' + ':'.join(['0000']*5)
|
return u'' + ipv6.exploded.rsplit(':', 5)[0] + ':' + ':'.join(['0000']*5)
|
||||||
|
|
||||||
|
|
||||||
class Bloomfilter:
|
class Bloomfilter(object):
|
||||||
"""A space-efficient probabilistic data structure. False-positive rate:
|
"""A space-efficient probabilistic data structure. False-positive rate:
|
||||||
|
|
||||||
* 1e-05 for <80 elements
|
* 1e-05 for <80 elements
|
||||||
@ -46,15 +49,13 @@ class Bloomfilter:
|
|||||||
|
|
||||||
>>> bf = Bloomfilter()
|
>>> bf = Bloomfilter()
|
||||||
>>> bf.add("127.0.0.1")
|
>>> bf.add("127.0.0.1")
|
||||||
>>> not any(map(bf.__contains__, ("1.2.%i.4" for i in range(256))))
|
>>> not any(addr in bf for addr in ("1.2.%i.4" for i in range(256)))
|
||||||
True
|
True
|
||||||
|
|
||||||
>>> bf = Bloomfilter()
|
>>> bf = Bloomfilter()
|
||||||
>>> for i in range(256):
|
>>> for i in range(256):
|
||||||
... bf.add("1.2.%i.4" % i)
|
... bf.add("1.2.%i.4" % i)
|
||||||
...
|
...
|
||||||
>>> len(bf)
|
|
||||||
256
|
|
||||||
>>> "1.2.3.4" in bf
|
>>> "1.2.3.4" in bf
|
||||||
True
|
True
|
||||||
>>> "127.0.0.1" in bf
|
>>> "127.0.0.1" in bf
|
||||||
@ -64,9 +65,8 @@ class Bloomfilter:
|
|||||||
http://code.activestate.com/recipes/577684-bloom-filter/
|
http://code.activestate.com/recipes/577684-bloom-filter/
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, array=None, elements=0, iterable=()):
|
def __init__(self, array=None, iterable=()):
|
||||||
self.array = array or bytearray(256)
|
self.array = array or bytearray(256)
|
||||||
self.elements = elements
|
|
||||||
self.k = 11
|
self.k = 11
|
||||||
self.m = len(self.array) * 8
|
self.m = len(self.array) * 8
|
||||||
|
|
||||||
@ -82,13 +82,9 @@ class Bloomfilter:
|
|||||||
def add(self, key):
|
def add(self, key):
|
||||||
for i in self.get_probes(key):
|
for i in self.get_probes(key):
|
||||||
self.array[i//8] |= 2 ** (i%8)
|
self.array[i//8] |= 2 ** (i%8)
|
||||||
self.elements += 1
|
|
||||||
|
|
||||||
def __contains__(self, key):
|
def __contains__(self, key):
|
||||||
return all(self.array[i//8] & (2 ** (i%8)) for i in self.get_probes(key))
|
return all(self.array[i//8] & (2 ** (i % 8)) for i in self.get_probes(key))
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return self.elements
|
|
||||||
|
|
||||||
|
|
||||||
class JSONRequest(Request):
|
class JSONRequest(Request):
|
||||||
@ -110,3 +106,21 @@ class JSONResponse(Response):
|
|||||||
kwargs["content_type"] = "application/json"
|
kwargs["content_type"] = "application/json"
|
||||||
super(JSONResponse, self).__init__(
|
super(JSONResponse, self).__init__(
|
||||||
json.dumps(obj).encode("utf-8"), *args, **kwargs)
|
json.dumps(obj).encode("utf-8"), *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class URLSafeTimedSerializer(TimedSerializer):
|
||||||
|
|
||||||
|
default_serializer = compact_json
|
||||||
|
|
||||||
|
def load_payload(self, payload):
|
||||||
|
try:
|
||||||
|
json = base64.b64decode(payload + b"=" * (-len(payload) % 4))
|
||||||
|
except Exception as e:
|
||||||
|
raise BadPayload('Could not base64 decode the payload because of '
|
||||||
|
'an exception', original_error=e)
|
||||||
|
|
||||||
|
return super(TimedSerializer, self).load_payload(json)
|
||||||
|
|
||||||
|
def dump_payload(self, obj):
|
||||||
|
json = super(TimedSerializer, self).dump_payload(obj)
|
||||||
|
return base64.b64encode(json).rstrip(b"=")
|
||||||
|
@ -5,6 +5,7 @@ from __future__ import unicode_literals
|
|||||||
import codecs
|
import codecs
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
|
from isso.utils import types
|
||||||
from isso.compat import string_types, text_type as str
|
from isso.compat import string_types, text_type as str
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -20,11 +21,6 @@ except ImportError:
|
|||||||
"to `werkzeug` 0.9 or install `passlib`.")
|
"to `werkzeug` 0.9 or install `passlib`.")
|
||||||
|
|
||||||
|
|
||||||
def _TypeError(name, expected, val):
|
|
||||||
return TypeError("'{0}' must be {1}, not {2}".format(
|
|
||||||
name, expected, val.__class__.__name__))
|
|
||||||
|
|
||||||
|
|
||||||
class Hash(object):
|
class Hash(object):
|
||||||
|
|
||||||
func = None
|
func = None
|
||||||
@ -37,29 +33,25 @@ class Hash(object):
|
|||||||
self.func = func
|
self.func = func
|
||||||
|
|
||||||
if salt is not None:
|
if salt is not None:
|
||||||
if not isinstance(salt, bytes):
|
types.require(salt, bytes)
|
||||||
raise _TypeError("salt", "bytes", salt)
|
|
||||||
self.salt = salt
|
self.salt = salt
|
||||||
|
|
||||||
def hash(self, val):
|
def hash(self, val):
|
||||||
"""Calculate hash from value (must be bytes)."""
|
"""Calculate hash from value (must be bytes).
|
||||||
|
"""
|
||||||
|
|
||||||
if not isinstance(val, bytes):
|
types.require(val, bytes)
|
||||||
raise _TypeError("val", "bytes", val)
|
|
||||||
|
|
||||||
rv = self.compute(val)
|
rv = self.compute(val)
|
||||||
|
|
||||||
if not isinstance(val, bytes):
|
types.require(rv, bytes)
|
||||||
raise _TypeError("val", "bytes", rv)
|
|
||||||
|
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
def uhash(self, val):
|
def uhash(self, val):
|
||||||
"""Calculate hash from unicode value and return hex value as unicode"""
|
"""Calculate hash from unicode value and return hex value as unicode
|
||||||
|
"""
|
||||||
if not isinstance(val, string_types):
|
types.require(val, string_types)
|
||||||
raise _TypeError("val", "str", val)
|
|
||||||
|
|
||||||
return codecs.encode(self.hash(val.encode("utf-8")), "hex_codec").decode("utf-8")
|
return codecs.encode(self.hash(val.encode("utf-8")), "hex_codec").decode("utf-8")
|
||||||
|
|
||||||
def compute(self, val):
|
def compute(self, val):
|
||||||
@ -85,13 +77,14 @@ class PBKDF2(Hash):
|
|||||||
return pbkdf2(val, self.salt, self.iterations, self.dklen, self.func)
|
return pbkdf2(val, self.salt, self.iterations, self.dklen, self.func)
|
||||||
|
|
||||||
|
|
||||||
def new(conf):
|
def new(algorithm, salt=None):
|
||||||
"""Factory to create hash functions from configuration section. If an
|
"""Factory to create hash functions from configuration section. If an
|
||||||
algorithm takes custom parameters, you can separate them by a colon like
|
algorithm takes custom parameters, you can separate them by a colon like
|
||||||
this: pbkdf2:arg1:arg2:arg3."""
|
this: pbkdf2:arg1:arg2:arg3."""
|
||||||
|
|
||||||
algorithm = conf.get("algorithm")
|
if salt is None:
|
||||||
salt = conf.get("salt").encode("utf-8")
|
salt = ''
|
||||||
|
salt = salt.encode("utf-8")
|
||||||
|
|
||||||
if algorithm == "none":
|
if algorithm == "none":
|
||||||
return Hash(salt, None)
|
return Hash(salt, None)
|
||||||
|
@ -28,10 +28,15 @@ def Sanitizer(elements, attributes):
|
|||||||
"pre", "code", "blockquote",
|
"pre", "code", "blockquote",
|
||||||
"del", "ins", "strong", "em",
|
"del", "ins", "strong", "em",
|
||||||
"h1", "h2", "h3", "h4", "h5", "h6",
|
"h1", "h2", "h3", "h4", "h5", "h6",
|
||||||
"table", "thead", "tbody", "th", "td"] + elements
|
"table", "thead", "tbody", "th", "td"]
|
||||||
|
|
||||||
# href for <a> and align for <table>
|
# href for <a> and align for <table>
|
||||||
allowed_attributes = ["align", "href"] + attributes
|
allowed_attributes = ["align", "href"]
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(Inner, self).__init__(*args, **kwargs)
|
||||||
|
self.allowed_elements = Inner.allowed_elements + elements
|
||||||
|
self.allowed_attributes = Inner.allowed_attributes + attributes
|
||||||
|
|
||||||
# remove disallowed tokens from the output
|
# remove disallowed tokens from the output
|
||||||
def disallowed_token(self, token, token_type):
|
def disallowed_token(self, token, token_type):
|
||||||
@ -81,13 +86,23 @@ class Unofficial(misaka.HtmlRenderer):
|
|||||||
|
|
||||||
|
|
||||||
class Markup(object):
|
class Markup(object):
|
||||||
|
"""Text to HTML conversion using Markdown (+ configurable extensions) and
|
||||||
|
an HTML sanitizer to remove malicious elements.
|
||||||
|
|
||||||
def __init__(self, conf):
|
:param options: a list of parameters for the used renderer
|
||||||
|
:param elements: allowed HTML elements in the output
|
||||||
|
:param attributes: allowed HTML attributes in the output
|
||||||
|
"""
|
||||||
|
|
||||||
parser = Markdown(conf.getlist("options"))
|
def __init__(self, options, elements=None, attributes=None):
|
||||||
sanitizer = Sanitizer(
|
if elements is None:
|
||||||
conf.getlist("allowed-elements"),
|
elements = []
|
||||||
conf.getlist("allowed-attributes"))
|
|
||||||
|
if attributes is None:
|
||||||
|
attributes = []
|
||||||
|
|
||||||
|
parser = Markdown(options)
|
||||||
|
sanitizer = Sanitizer(elements, attributes)
|
||||||
|
|
||||||
self._render = lambda text: sanitize(sanitizer, parser(text))
|
self._render = lambda text: sanitize(sanitizer, parser(text))
|
||||||
|
|
||||||
|
24
isso/utils/types.py
Normal file
24
isso/utils/types.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
def _TypeError(expected, val):
|
||||||
|
if isinstance(expected, (list, tuple)):
|
||||||
|
expected = ", ".join(ex.__name__ for ex in expected)
|
||||||
|
else:
|
||||||
|
expected = expected.__name__
|
||||||
|
return TypeError("Expected {0}, not {1}".format(
|
||||||
|
expected, val.__class__.__name__))
|
||||||
|
|
||||||
|
|
||||||
|
def require(val, expected):
|
||||||
|
"""Assure that :param val: is an instance of :param expected: or raise a
|
||||||
|
:exception TypeError: indicating what's expected.
|
||||||
|
|
||||||
|
>>> require(23, int)
|
||||||
|
>>> require(None, bool)
|
||||||
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
TypeError: Expected bool, not NoneType
|
||||||
|
"""
|
||||||
|
if not isinstance(val, expected):
|
||||||
|
raise _TypeError(expected, val)
|
@ -2,20 +2,10 @@
|
|||||||
|
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import pkg_resources
|
|
||||||
dist = pkg_resources.get_distribution("isso")
|
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
from werkzeug.wrappers import Response
|
|
||||||
from werkzeug.routing import Rule
|
|
||||||
from werkzeug.exceptions import BadRequest
|
from werkzeug.exceptions import BadRequest
|
||||||
|
|
||||||
from isso import local
|
|
||||||
from isso.compat import text_type as str
|
|
||||||
|
|
||||||
|
class requires(object):
|
||||||
class requires:
|
|
||||||
"""Verify that the request URL contains and can parse the parameter.
|
"""Verify that the request URL contains and can parse the parameter.
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
@ -46,20 +36,7 @@ class requires:
|
|||||||
|
|
||||||
return dec
|
return dec
|
||||||
|
|
||||||
|
from .api import API
|
||||||
|
from .info import Info
|
||||||
|
|
||||||
class Info(object):
|
__all__ = ["requires", "API", "Info"]
|
||||||
|
|
||||||
def __init__(self, isso):
|
|
||||||
self.moderation = isso.conf.getboolean("moderation", "enabled")
|
|
||||||
isso.urls.add(Rule('/info', endpoint=self.show))
|
|
||||||
|
|
||||||
def show(self, environ, request):
|
|
||||||
|
|
||||||
rv = {
|
|
||||||
"version": dist.version,
|
|
||||||
"host": str(local("host")),
|
|
||||||
"origin": str(local("origin")),
|
|
||||||
"moderation": self.moderation,
|
|
||||||
}
|
|
||||||
|
|
||||||
return Response(json.dumps(rv), 200, content_type="application/json")
|
|
||||||
|
410
isso/views/api.py
Normal file
410
isso/views/api.py
Normal file
@ -0,0 +1,410 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import cgi
|
||||||
|
import functools
|
||||||
|
|
||||||
|
from itsdangerous import SignatureExpired, BadSignature
|
||||||
|
|
||||||
|
from werkzeug.http import dump_cookie
|
||||||
|
from werkzeug.wrappers import Response
|
||||||
|
from werkzeug.exceptions import BadRequest, Forbidden, NotFound
|
||||||
|
|
||||||
|
from isso.compat import text_type as str, string_types
|
||||||
|
|
||||||
|
from isso import utils
|
||||||
|
from isso.utils import JSONResponse as JSON
|
||||||
|
from isso.views import requires
|
||||||
|
from isso.utils.hash import sha1
|
||||||
|
|
||||||
|
from isso.controllers import threads, comments
|
||||||
|
|
||||||
|
|
||||||
|
def normalize(url):
|
||||||
|
if not url.startswith(("http://", "https://")):
|
||||||
|
return "http://" + url
|
||||||
|
return url
|
||||||
|
|
||||||
|
|
||||||
|
def xhr(func):
|
||||||
|
"""A decorator to check for CSRF on POST/PUT/DELETE using a <form>
|
||||||
|
element and JS to execute automatically (see #40 for a proof-of-concept).
|
||||||
|
|
||||||
|
When an attacker uses a <form> to downvote a comment, the browser *should*
|
||||||
|
add a `Content-Type: ...` header with three possible values:
|
||||||
|
|
||||||
|
* application/x-www-form-urlencoded
|
||||||
|
* multipart/form-data
|
||||||
|
* text/plain
|
||||||
|
|
||||||
|
If the header is not sent or requests `application/json`, the request is
|
||||||
|
not forged (XHR is restricted by CORS separately).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def dec(self, env, req, *args, **kwargs):
|
||||||
|
|
||||||
|
if req.content_type and not req.content_type.startswith("application/json"):
|
||||||
|
raise Forbidden("CSRF")
|
||||||
|
return func(self, env, req, *args, **kwargs)
|
||||||
|
|
||||||
|
return dec
|
||||||
|
|
||||||
|
|
||||||
|
def auth(func):
|
||||||
|
"""A decorator to check the validity of an auth cookie."""
|
||||||
|
|
||||||
|
def dec(self, env, req, *args, **kwargs):
|
||||||
|
|
||||||
|
if not self.conf.getboolean("auth", "enabled"):
|
||||||
|
return func(self, env, req, *args, **kwargs)
|
||||||
|
try:
|
||||||
|
self.load(req.cookies.get("auth", ""))
|
||||||
|
except (SignatureExpired, BadSignature):
|
||||||
|
raise Forbidden
|
||||||
|
return func(self, env, req, *args, **kwargs)
|
||||||
|
|
||||||
|
return dec
|
||||||
|
|
||||||
|
|
||||||
|
class API(object):
|
||||||
|
|
||||||
|
# comment fields, that can be submitted
|
||||||
|
ACCEPT = set(['text', 'author', 'email', 'website', 'parent'])
|
||||||
|
|
||||||
|
def __init__(self, conf, cache, db, guard, hash, markup, signer):
|
||||||
|
self.conf = conf
|
||||||
|
|
||||||
|
self.db = db
|
||||||
|
self.cache = cache
|
||||||
|
|
||||||
|
self.hash = hash
|
||||||
|
self.markup = markup
|
||||||
|
|
||||||
|
self.threads = threads.Controller(db)
|
||||||
|
self.comments = comments.Controller(db, guard)
|
||||||
|
|
||||||
|
self.max_age = conf.getint("general", "max-age")
|
||||||
|
self.moderated = conf.getboolean("moderation", "enabled")
|
||||||
|
|
||||||
|
self.sign = signer.dumps
|
||||||
|
self.load = functools.partial(signer.loads, max_age=self.max_age)
|
||||||
|
|
||||||
|
def serialize(self, comment, markup=True):
|
||||||
|
_id = str(comment.id)
|
||||||
|
obj = {
|
||||||
|
"id": comment.id, "parent": comment.parent,
|
||||||
|
"mode": comment.mode,
|
||||||
|
"created": comment.created, "modified": comment.modified,
|
||||||
|
"text": comment.text, "author": comment.author,
|
||||||
|
"email": comment.email, "website": comment.website,
|
||||||
|
"likes": comment.likes, "dislikes": comment.dislikes}
|
||||||
|
|
||||||
|
if markup:
|
||||||
|
html = self.cache.get("text", _id)
|
||||||
|
if html is None:
|
||||||
|
html = self.markup.render(comment.text)
|
||||||
|
self.cache.set("text", _id, html)
|
||||||
|
obj["text"] = html
|
||||||
|
|
||||||
|
hash = self.cache.get("hash", _id)
|
||||||
|
if hash is None:
|
||||||
|
hash = self.hash(comment.email or comment.remote_addr)
|
||||||
|
self.cache.set("hash", _id, hash)
|
||||||
|
obj["hash"] = hash
|
||||||
|
|
||||||
|
return obj
|
||||||
|
|
||||||
|
def auth_cookie_to_data(self, request, data):
|
||||||
|
"""Update the received data with the information from cookie."""
|
||||||
|
|
||||||
|
if self.conf.getboolean("auth", "enabled"):
|
||||||
|
auth_data = self.load(request.cookies.get("auth", ""))
|
||||||
|
data["author"] = auth_data.get("username")
|
||||||
|
data["email"] = auth_data.get("email")
|
||||||
|
data["website"] = auth_data.get("website")
|
||||||
|
|
||||||
|
@xhr
|
||||||
|
@auth
|
||||||
|
@requires(str, 'uri')
|
||||||
|
def new(self, environ, request, uri):
|
||||||
|
data = request.get_json()
|
||||||
|
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
raise BadRequest(400, "request data is not an object")
|
||||||
|
|
||||||
|
for field in set(data.keys()) - API.ACCEPT:
|
||||||
|
data.pop(field)
|
||||||
|
|
||||||
|
self.auth_cookie_to_data(request, data)
|
||||||
|
|
||||||
|
for field in ("author", "email", "website"):
|
||||||
|
if isinstance(data.get(field, None), string_types):
|
||||||
|
data[field] = cgi.escape(data[field])
|
||||||
|
|
||||||
|
if isinstance(data.get("website", None), string_types):
|
||||||
|
data["website"] = normalize(data["website"])
|
||||||
|
|
||||||
|
remote_addr = utils.anonymize(str(request.remote_addr))
|
||||||
|
|
||||||
|
with self.db.transaction:
|
||||||
|
thread = self.threads.get(uri)
|
||||||
|
if thread is None:
|
||||||
|
thread = self.threads.new(uri)
|
||||||
|
try:
|
||||||
|
comment = self.comments.new(remote_addr, thread, data,
|
||||||
|
moderated=self.moderated)
|
||||||
|
except comments.Invalid as ex:
|
||||||
|
raise BadRequest(ex.message)
|
||||||
|
except comments.Denied as ex:
|
||||||
|
raise Forbidden(ex.message)
|
||||||
|
|
||||||
|
# TODO queue new thread, send notification
|
||||||
|
|
||||||
|
_id = str(comment.id)
|
||||||
|
signature = self.sign([comment.id, sha1(comment.text)])
|
||||||
|
|
||||||
|
resp = JSON(
|
||||||
|
self.serialize(comment),
|
||||||
|
202 if comment.moderated == 2 else 201)
|
||||||
|
resp.headers.add("Set-Cookie", dump_cookie(_id, signature))
|
||||||
|
resp.headers.add("X-Set-Cookie", dump_cookie("isso-" + _id, signature))
|
||||||
|
return resp
|
||||||
|
|
||||||
|
def view(self, environ, request, id):
|
||||||
|
comment = self.comments.get(id)
|
||||||
|
|
||||||
|
if comment is None:
|
||||||
|
raise NotFound
|
||||||
|
|
||||||
|
markup = request.args.get('plain', '0') == '0'
|
||||||
|
|
||||||
|
return JSON(self.serialize(comment, markup), 200)
|
||||||
|
|
||||||
|
@xhr
|
||||||
|
@auth
|
||||||
|
def edit(self, environ, request, id):
|
||||||
|
|
||||||
|
try:
|
||||||
|
rv = self.load(request.cookies.get(str(id), ""))
|
||||||
|
except (SignatureExpired, BadSignature):
|
||||||
|
raise Forbidden
|
||||||
|
|
||||||
|
if rv[0] != id:
|
||||||
|
raise Forbidden
|
||||||
|
|
||||||
|
comment = self.comments.get(id)
|
||||||
|
if comment is None:
|
||||||
|
raise NotFound
|
||||||
|
|
||||||
|
# verify checksum, mallory might skip cookie deletion when
|
||||||
|
# he deletes a comment
|
||||||
|
if rv[1] != sha1(comment.text):
|
||||||
|
raise Forbidden
|
||||||
|
|
||||||
|
data = request.get_json()
|
||||||
|
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
raise BadRequest(400, "request data is not an object")
|
||||||
|
|
||||||
|
for field in set(data.keys()) - API.ACCEPT:
|
||||||
|
data.pop(field)
|
||||||
|
|
||||||
|
self.auth_cookie_to_data(request, data)
|
||||||
|
|
||||||
|
with self.db.transaction:
|
||||||
|
comment = self.comments.edit(id, data)
|
||||||
|
|
||||||
|
_id = str(comment.id)
|
||||||
|
signature = self.sign([comment.id, sha1(comment.text)])
|
||||||
|
|
||||||
|
self.cache.delete("text", _id)
|
||||||
|
self.cache.delete("hash", _id)
|
||||||
|
|
||||||
|
resp = JSON(self.serialize(comment), 200)
|
||||||
|
resp.headers.add("Set-Cookie", dump_cookie(_id, signature))
|
||||||
|
resp.headers.add("X-Set-Cookie", dump_cookie("isso-" + _id, signature))
|
||||||
|
return resp
|
||||||
|
|
||||||
|
@xhr
|
||||||
|
@auth
|
||||||
|
def delete(self, environ, request, id, key=None):
|
||||||
|
|
||||||
|
try:
|
||||||
|
rv = self.load(request.cookies.get(str(id), ""))
|
||||||
|
except (SignatureExpired, BadSignature):
|
||||||
|
raise Forbidden
|
||||||
|
|
||||||
|
if rv[0] != id:
|
||||||
|
raise Forbidden
|
||||||
|
|
||||||
|
comment = self.comments.get(id)
|
||||||
|
if comment is None:
|
||||||
|
raise NotFound
|
||||||
|
|
||||||
|
if rv[1] != sha1(comment.text):
|
||||||
|
raise Forbidden
|
||||||
|
|
||||||
|
_id = str(comment.id)
|
||||||
|
|
||||||
|
self.cache.delete("text", _id)
|
||||||
|
self.cache.delete("hash", _id)
|
||||||
|
|
||||||
|
with self.db.transaction:
|
||||||
|
comment = self.comments.delete(id)
|
||||||
|
|
||||||
|
cookie = functools.partial(dump_cookie, expires=0, max_age=0)
|
||||||
|
|
||||||
|
resp = JSON(self.serialize(comment) if comment else None, 200)
|
||||||
|
resp.headers.add("Set-Cookie", cookie(_id))
|
||||||
|
resp.headers.add("X-Set-Cookie", cookie("isso-" + _id))
|
||||||
|
return resp
|
||||||
|
|
||||||
|
def moderate(self, environ, request, id, action, key):
|
||||||
|
|
||||||
|
try:
|
||||||
|
id = self.load(key, max_age=2**32)
|
||||||
|
except (BadSignature, SignatureExpired):
|
||||||
|
raise Forbidden
|
||||||
|
|
||||||
|
comment = self.comments.get(id)
|
||||||
|
if comment is None:
|
||||||
|
raise NotFound
|
||||||
|
|
||||||
|
if request.method == "GET":
|
||||||
|
modal = (
|
||||||
|
"<!DOCTYPE html>"
|
||||||
|
"<html>"
|
||||||
|
"<head>"
|
||||||
|
"<script>"
|
||||||
|
" if (confirm('{0}: Are you sure?')) {"
|
||||||
|
" xhr = new XMLHttpRequest;"
|
||||||
|
" xhr.open('POST', window.location.href);"
|
||||||
|
" xhr.send(null);"
|
||||||
|
" }"
|
||||||
|
"</script>".format(action.capitalize()))
|
||||||
|
|
||||||
|
return Response(modal, 200, content_type="text/html")
|
||||||
|
|
||||||
|
if action == "activate":
|
||||||
|
with self.db.transaction:
|
||||||
|
self.comments.activate(id)
|
||||||
|
else:
|
||||||
|
with self.db.transaction:
|
||||||
|
self.comments.delete(id)
|
||||||
|
|
||||||
|
self.cache.delete("text", str(comment.id))
|
||||||
|
self.cache.delete("hash", str(comment.id))
|
||||||
|
|
||||||
|
return Response("Ok", 200)
|
||||||
|
|
||||||
|
# FIXME move logic into controller
|
||||||
|
@requires(str, 'uri')
|
||||||
|
def fetch(self, environ, request, uri):
|
||||||
|
|
||||||
|
thread = self.threads.get(uri)
|
||||||
|
if thread is None:
|
||||||
|
raise NotFound
|
||||||
|
|
||||||
|
args = {
|
||||||
|
'thread': thread,
|
||||||
|
'after': request.args.get('after', 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
args['limit'] = int(request.args.get('limit'))
|
||||||
|
except TypeError:
|
||||||
|
args['limit'] = None
|
||||||
|
except ValueError:
|
||||||
|
return BadRequest("limit should be integer")
|
||||||
|
|
||||||
|
if request.args.get('parent') is not None:
|
||||||
|
try:
|
||||||
|
args['parent'] = int(request.args.get('parent'))
|
||||||
|
root_id = args['parent']
|
||||||
|
except ValueError:
|
||||||
|
return BadRequest("parent should be integer")
|
||||||
|
else:
|
||||||
|
args['parent'] = None
|
||||||
|
root_id = None
|
||||||
|
|
||||||
|
reply_counts = self.comments.reply_count(thread, after=args['after'])
|
||||||
|
|
||||||
|
if args['limit'] == 0:
|
||||||
|
root_list = []
|
||||||
|
else:
|
||||||
|
root_list = list(self.comments.all(**args))
|
||||||
|
if not root_list:
|
||||||
|
raise NotFound
|
||||||
|
|
||||||
|
if root_id not in reply_counts:
|
||||||
|
reply_counts[root_id] = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
nested_limit = int(request.args.get('nested_limit'))
|
||||||
|
except TypeError:
|
||||||
|
nested_limit = None
|
||||||
|
except ValueError:
|
||||||
|
return BadRequest("nested_limit should be integer")
|
||||||
|
|
||||||
|
rv = {
|
||||||
|
'id' : root_id,
|
||||||
|
'total_replies' : reply_counts[root_id],
|
||||||
|
'hidden_replies' : reply_counts[root_id] - len(root_list),
|
||||||
|
'replies' : self._process_fetched_list(root_list)
|
||||||
|
}
|
||||||
|
# We are only checking for one level deep comments
|
||||||
|
if root_id is None:
|
||||||
|
for comment in rv['replies']:
|
||||||
|
if comment['id'] in reply_counts:
|
||||||
|
comment['total_replies'] = reply_counts[comment['id']]
|
||||||
|
if nested_limit is not None:
|
||||||
|
if nested_limit > 0:
|
||||||
|
args['parent'] = comment['id']
|
||||||
|
args['limit'] = nested_limit
|
||||||
|
replies = list(self.comments.all(**args))
|
||||||
|
else:
|
||||||
|
replies = []
|
||||||
|
else:
|
||||||
|
args['parent'] = comment['id']
|
||||||
|
replies = list(self.comments.all(**args))
|
||||||
|
else:
|
||||||
|
comment['total_replies'] = 0
|
||||||
|
replies = []
|
||||||
|
|
||||||
|
comment['hidden_replies'] = comment['total_replies'] - len(replies)
|
||||||
|
comment['replies'] = self._process_fetched_list(replies)
|
||||||
|
|
||||||
|
return JSON(rv, 200)
|
||||||
|
|
||||||
|
def _process_fetched_list(self, fetched_list):
|
||||||
|
return map(self.serialize, fetched_list)
|
||||||
|
|
||||||
|
@xhr
|
||||||
|
@auth
|
||||||
|
def like(self, environ, request, id):
|
||||||
|
remote_addr = utils.anonymize(str(request.remote_addr))
|
||||||
|
|
||||||
|
if not self.comments.like(remote_addr, id):
|
||||||
|
raise BadRequest
|
||||||
|
|
||||||
|
return Response("Ok", 200)
|
||||||
|
|
||||||
|
@xhr
|
||||||
|
@auth
|
||||||
|
def dislike(self, environ, request, id):
|
||||||
|
remote_addr = utils.anonymize(str(request.remote_addr))
|
||||||
|
|
||||||
|
if not self.comments.dislike(remote_addr, id):
|
||||||
|
raise BadRequest
|
||||||
|
|
||||||
|
return Response("Ok", 200)
|
||||||
|
|
||||||
|
def count(self, environ, request):
|
||||||
|
data = request.get_json()
|
||||||
|
|
||||||
|
if not isinstance(data, list) and not all(isinstance(x, str) for x in data):
|
||||||
|
raise BadRequest("JSON must be a list of URLs")
|
||||||
|
|
||||||
|
th = [self.threads.get(uri) for uri in data]
|
||||||
|
return JSON(self.comments.count(*th), 200)
|
@ -1,477 +0,0 @@
|
|||||||
# -*- encoding: utf-8 -*-
|
|
||||||
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import re
|
|
||||||
import cgi
|
|
||||||
import time
|
|
||||||
import functools
|
|
||||||
|
|
||||||
from itsdangerous import SignatureExpired, BadSignature
|
|
||||||
|
|
||||||
from werkzeug.http import dump_cookie
|
|
||||||
from werkzeug.wsgi import get_current_url
|
|
||||||
from werkzeug.utils import redirect
|
|
||||||
from werkzeug.routing import Rule
|
|
||||||
from werkzeug.wrappers import Response
|
|
||||||
from werkzeug.exceptions import BadRequest, Forbidden, NotFound
|
|
||||||
|
|
||||||
from isso.compat import text_type as str
|
|
||||||
|
|
||||||
from isso import utils, local
|
|
||||||
from isso.utils import http, parse, JSONResponse as JSON
|
|
||||||
from isso.views import requires
|
|
||||||
from isso.utils.hash import sha1
|
|
||||||
|
|
||||||
# from Django appearently, looks good to me *duck*
|
|
||||||
__url_re = re.compile(
|
|
||||||
r'^'
|
|
||||||
r'(https?://)?'
|
|
||||||
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
|
|
||||||
r'localhost|' # localhost...
|
|
||||||
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
|
|
||||||
r'(?::\d+)?' # optional port
|
|
||||||
r'(?:/?|[/?]\S+)'
|
|
||||||
r'$', re.IGNORECASE)
|
|
||||||
|
|
||||||
|
|
||||||
def isurl(text):
|
|
||||||
return __url_re.match(text) is not None
|
|
||||||
|
|
||||||
|
|
||||||
def normalize(url):
|
|
||||||
if not url.startswith(("http://", "https://")):
|
|
||||||
return "http://" + url
|
|
||||||
return url
|
|
||||||
|
|
||||||
|
|
||||||
def xhr(func):
|
|
||||||
"""A decorator to check for CSRF on POST/PUT/DELETE using a <form>
|
|
||||||
element and JS to execute automatically (see #40 for a proof-of-concept).
|
|
||||||
|
|
||||||
When an attacker uses a <form> to downvote a comment, the browser *should*
|
|
||||||
add a `Content-Type: ...` header with three possible values:
|
|
||||||
|
|
||||||
* application/x-www-form-urlencoded
|
|
||||||
* multipart/form-data
|
|
||||||
* text/plain
|
|
||||||
|
|
||||||
If the header is not sent or requests `application/json`, the request is
|
|
||||||
not forged (XHR is restricted by CORS separately).
|
|
||||||
"""
|
|
||||||
|
|
||||||
def dec(self, env, req, *args, **kwargs):
|
|
||||||
|
|
||||||
if req.content_type and not req.content_type.startswith("application/json"):
|
|
||||||
raise Forbidden("CSRF")
|
|
||||||
return func(self, env, req, *args, **kwargs)
|
|
||||||
|
|
||||||
return dec
|
|
||||||
|
|
||||||
|
|
||||||
class API(object):
|
|
||||||
|
|
||||||
FIELDS = set(['id', 'parent', 'text', 'author', 'website', 'email',
|
|
||||||
'mode', 'created', 'modified', 'likes', 'dislikes', 'hash'])
|
|
||||||
|
|
||||||
# comment fields, that can be submitted
|
|
||||||
ACCEPT = set(['text', 'author', 'website', 'email', 'parent'])
|
|
||||||
|
|
||||||
VIEWS = [
|
|
||||||
('fetch', ('GET', '/')),
|
|
||||||
('new', ('POST', '/new')),
|
|
||||||
('count', ('GET', '/count')),
|
|
||||||
('counts', ('POST', '/count')),
|
|
||||||
('view', ('GET', '/id/<int:id>')),
|
|
||||||
('edit', ('PUT', '/id/<int:id>')),
|
|
||||||
('delete', ('DELETE', '/id/<int:id>')),
|
|
||||||
('moderate',('GET', '/id/<int:id>/<any(activate,delete):action>/<string:key>')),
|
|
||||||
('moderate',('POST', '/id/<int:id>/<any(activate,delete):action>/<string:key>')),
|
|
||||||
('like', ('POST', '/id/<int:id>/like')),
|
|
||||||
('dislike', ('POST', '/id/<int:id>/dislike')),
|
|
||||||
('demo', ('GET', '/demo'))
|
|
||||||
]
|
|
||||||
|
|
||||||
def __init__(self, isso, hasher):
|
|
||||||
|
|
||||||
self.isso = isso
|
|
||||||
self.hash = hasher.uhash
|
|
||||||
self.cache = isso.cache
|
|
||||||
self.signal = isso.signal
|
|
||||||
|
|
||||||
self.conf = isso.conf.section("general")
|
|
||||||
self.moderated = isso.conf.getboolean("moderation", "enabled")
|
|
||||||
|
|
||||||
self.guard = isso.db.guard
|
|
||||||
self.threads = isso.db.threads
|
|
||||||
self.comments = isso.db.comments
|
|
||||||
|
|
||||||
for (view, (method, path)) in self.VIEWS:
|
|
||||||
isso.urls.add(
|
|
||||||
Rule(path, methods=[method], endpoint=getattr(self, view)))
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def verify(cls, comment):
|
|
||||||
|
|
||||||
if "text" not in comment:
|
|
||||||
return False, "text is missing"
|
|
||||||
|
|
||||||
if not isinstance(comment.get("parent"), (int, type(None))):
|
|
||||||
return False, "parent must be an integer or null"
|
|
||||||
|
|
||||||
for key in ("text", "author", "website", "email"):
|
|
||||||
if not isinstance(comment.get(key), (str, type(None))):
|
|
||||||
return False, "%s must be a string or null" % key
|
|
||||||
|
|
||||||
if len(comment["text"].rstrip()) < 3:
|
|
||||||
return False, "text is too short (minimum length: 3)"
|
|
||||||
|
|
||||||
if len(comment.get("email") or "") > 254:
|
|
||||||
return False, "http://tools.ietf.org/html/rfc5321#section-4.5.3"
|
|
||||||
|
|
||||||
if comment.get("website"):
|
|
||||||
if len(comment["website"]) > 254:
|
|
||||||
return False, "arbitrary length limit"
|
|
||||||
if not isurl(comment["website"]):
|
|
||||||
return False, "Website not Django-conform"
|
|
||||||
|
|
||||||
return True, ""
|
|
||||||
|
|
||||||
@xhr
|
|
||||||
@requires(str, 'uri')
|
|
||||||
def new(self, environ, request, uri):
|
|
||||||
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
for field in set(data.keys()) - API.ACCEPT:
|
|
||||||
data.pop(field)
|
|
||||||
|
|
||||||
for key in ("author", "email", "website", "parent"):
|
|
||||||
data.setdefault(key, None)
|
|
||||||
|
|
||||||
valid, reason = API.verify(data)
|
|
||||||
if not valid:
|
|
||||||
return BadRequest(reason)
|
|
||||||
|
|
||||||
for field in ("author", "email", "website"):
|
|
||||||
if data.get(field) is not None:
|
|
||||||
data[field] = cgi.escape(data[field])
|
|
||||||
|
|
||||||
if data.get("website"):
|
|
||||||
data["website"] = normalize(data["website"])
|
|
||||||
|
|
||||||
data['mode'] = 2 if self.moderated else 1
|
|
||||||
data['remote_addr'] = utils.anonymize(str(request.remote_addr))
|
|
||||||
|
|
||||||
with self.isso.lock:
|
|
||||||
if uri not in self.threads:
|
|
||||||
with http.curl('GET', local("origin"), uri) as resp:
|
|
||||||
if resp and resp.status == 200:
|
|
||||||
uri, title = parse.thread(resp.read(), id=uri)
|
|
||||||
else:
|
|
||||||
return NotFound('URI does not exist')
|
|
||||||
|
|
||||||
thread = self.threads.new(uri, title)
|
|
||||||
self.signal("comments.new:new-thread", thread)
|
|
||||||
else:
|
|
||||||
thread = self.threads[uri]
|
|
||||||
|
|
||||||
# notify extensions that the new comment is about to save
|
|
||||||
self.signal("comments.new:before-save", thread, data)
|
|
||||||
|
|
||||||
valid, reason = self.guard.validate(uri, data)
|
|
||||||
if not valid:
|
|
||||||
self.signal("comments.new:guard", reason)
|
|
||||||
raise Forbidden(reason)
|
|
||||||
|
|
||||||
with self.isso.lock:
|
|
||||||
rv = self.comments.add(uri, data)
|
|
||||||
|
|
||||||
# notify extension, that the new comment has been successfully saved
|
|
||||||
self.signal("comments.new:after-save", thread, rv)
|
|
||||||
|
|
||||||
cookie = functools.partial(dump_cookie,
|
|
||||||
value=self.isso.sign([rv["id"], sha1(rv["text"])]),
|
|
||||||
max_age=self.conf.getint('max-age'))
|
|
||||||
|
|
||||||
rv["text"] = self.isso.render(rv["text"])
|
|
||||||
rv["hash"] = self.hash(rv['email'] or rv['remote_addr'])
|
|
||||||
|
|
||||||
self.cache.set('hash', (rv['email'] or rv['remote_addr']).encode('utf-8'), rv['hash'])
|
|
||||||
|
|
||||||
for key in set(rv.keys()) - API.FIELDS:
|
|
||||||
rv.pop(key)
|
|
||||||
|
|
||||||
# success!
|
|
||||||
self.signal("comments.new:finish", thread, rv)
|
|
||||||
|
|
||||||
resp = JSON(rv, 202 if rv["mode"] == 2 else 201)
|
|
||||||
resp.headers.add("Set-Cookie", cookie(str(rv["id"])))
|
|
||||||
resp.headers.add("X-Set-Cookie", cookie("isso-%i" % rv["id"]))
|
|
||||||
return resp
|
|
||||||
|
|
||||||
def view(self, environ, request, id):
|
|
||||||
|
|
||||||
rv = self.comments.get(id)
|
|
||||||
if rv is None:
|
|
||||||
raise NotFound
|
|
||||||
|
|
||||||
for key in set(rv.keys()) - API.FIELDS:
|
|
||||||
rv.pop(key)
|
|
||||||
|
|
||||||
if request.args.get('plain', '0') == '0':
|
|
||||||
rv['text'] = self.isso.render(rv['text'])
|
|
||||||
|
|
||||||
return JSON(rv, 200)
|
|
||||||
|
|
||||||
@xhr
|
|
||||||
def edit(self, environ, request, id):
|
|
||||||
|
|
||||||
try:
|
|
||||||
rv = self.isso.unsign(request.cookies.get(str(id), ''))
|
|
||||||
except (SignatureExpired, BadSignature):
|
|
||||||
raise Forbidden
|
|
||||||
|
|
||||||
if rv[0] != id:
|
|
||||||
raise Forbidden
|
|
||||||
|
|
||||||
# verify checksum, mallory might skip cookie deletion when he deletes a comment
|
|
||||||
if rv[1] != sha1(self.comments.get(id)["text"]):
|
|
||||||
raise Forbidden
|
|
||||||
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
if "text" not in data or data["text"] is None or len(data["text"]) < 3:
|
|
||||||
raise BadRequest("no text given")
|
|
||||||
|
|
||||||
for key in set(data.keys()) - set(["text", "author", "website"]):
|
|
||||||
data.pop(key)
|
|
||||||
|
|
||||||
data['modified'] = time.time()
|
|
||||||
|
|
||||||
with self.isso.lock:
|
|
||||||
rv = self.comments.update(id, data)
|
|
||||||
|
|
||||||
for key in set(rv.keys()) - API.FIELDS:
|
|
||||||
rv.pop(key)
|
|
||||||
|
|
||||||
self.signal("comments.edit", rv)
|
|
||||||
|
|
||||||
cookie = functools.partial(dump_cookie,
|
|
||||||
value=self.isso.sign([rv["id"], sha1(rv["text"])]),
|
|
||||||
max_age=self.conf.getint('max-age'))
|
|
||||||
|
|
||||||
rv["text"] = self.isso.render(rv["text"])
|
|
||||||
|
|
||||||
resp = JSON(rv, 200)
|
|
||||||
resp.headers.add("Set-Cookie", cookie(str(rv["id"])))
|
|
||||||
resp.headers.add("X-Set-Cookie", cookie("isso-%i" % rv["id"]))
|
|
||||||
return resp
|
|
||||||
|
|
||||||
@xhr
|
|
||||||
def delete(self, environ, request, id, key=None):
|
|
||||||
|
|
||||||
try:
|
|
||||||
rv = self.isso.unsign(request.cookies.get(str(id), ""))
|
|
||||||
except (SignatureExpired, BadSignature):
|
|
||||||
raise Forbidden
|
|
||||||
else:
|
|
||||||
if rv[0] != id:
|
|
||||||
raise Forbidden
|
|
||||||
|
|
||||||
# verify checksum, mallory might skip cookie deletion when he deletes a comment
|
|
||||||
if rv[1] != sha1(self.comments.get(id)["text"]):
|
|
||||||
raise Forbidden
|
|
||||||
|
|
||||||
item = self.comments.get(id)
|
|
||||||
|
|
||||||
if item is None:
|
|
||||||
raise NotFound
|
|
||||||
|
|
||||||
self.cache.delete('hash', (item['email'] or item['remote_addr']).encode('utf-8'))
|
|
||||||
|
|
||||||
with self.isso.lock:
|
|
||||||
rv = self.comments.delete(id)
|
|
||||||
|
|
||||||
if rv:
|
|
||||||
for key in set(rv.keys()) - API.FIELDS:
|
|
||||||
rv.pop(key)
|
|
||||||
|
|
||||||
self.signal("comments.delete", id)
|
|
||||||
|
|
||||||
resp = JSON(rv, 200)
|
|
||||||
cookie = functools.partial(dump_cookie, expires=0, max_age=0)
|
|
||||||
resp.headers.add("Set-Cookie", cookie(str(id)))
|
|
||||||
resp.headers.add("X-Set-Cookie", cookie("isso-%i" % id))
|
|
||||||
return resp
|
|
||||||
|
|
||||||
def moderate(self, environ, request, id, action, key):
|
|
||||||
|
|
||||||
try:
|
|
||||||
id = self.isso.unsign(key, max_age=2**32)
|
|
||||||
except (BadSignature, SignatureExpired):
|
|
||||||
raise Forbidden
|
|
||||||
|
|
||||||
item = self.comments.get(id)
|
|
||||||
|
|
||||||
if item is None:
|
|
||||||
raise NotFound
|
|
||||||
|
|
||||||
if request.method == "GET":
|
|
||||||
modal = (
|
|
||||||
"<!DOCTYPE html>"
|
|
||||||
"<html>"
|
|
||||||
"<head>"
|
|
||||||
"<script>"
|
|
||||||
" if (confirm('%s: Are you sure?')) {"
|
|
||||||
" xhr = new XMLHttpRequest;"
|
|
||||||
" xhr.open('POST', window.location.href);"
|
|
||||||
" xhr.send(null);"
|
|
||||||
" }"
|
|
||||||
"</script>" % action.capitalize())
|
|
||||||
|
|
||||||
return Response(modal, 200, content_type="text/html")
|
|
||||||
|
|
||||||
if action == "activate":
|
|
||||||
with self.isso.lock:
|
|
||||||
self.comments.activate(id)
|
|
||||||
self.signal("comments.activate", id)
|
|
||||||
else:
|
|
||||||
with self.isso.lock:
|
|
||||||
self.comments.delete(id)
|
|
||||||
self.cache.delete('hash', (item['email'] or item['remote_addr']).encode('utf-8'))
|
|
||||||
self.signal("comments.delete", id)
|
|
||||||
|
|
||||||
return Response("Yo", 200)
|
|
||||||
|
|
||||||
@requires(str, 'uri')
|
|
||||||
def fetch(self, environ, request, uri):
|
|
||||||
|
|
||||||
args = {
|
|
||||||
'uri': uri,
|
|
||||||
'after': request.args.get('after', 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
try:
|
|
||||||
args['limit'] = int(request.args.get('limit'))
|
|
||||||
except TypeError:
|
|
||||||
args['limit'] = None
|
|
||||||
except ValueError:
|
|
||||||
return BadRequest("limit should be integer")
|
|
||||||
|
|
||||||
if request.args.get('parent') is not None:
|
|
||||||
try:
|
|
||||||
args['parent'] = int(request.args.get('parent'))
|
|
||||||
root_id = args['parent']
|
|
||||||
except ValueError:
|
|
||||||
return BadRequest("parent should be integer")
|
|
||||||
else:
|
|
||||||
args['parent'] = None
|
|
||||||
root_id = None
|
|
||||||
|
|
||||||
plain = request.args.get('plain', '0') == '0'
|
|
||||||
|
|
||||||
reply_counts = self.comments.reply_count(uri, after=args['after'])
|
|
||||||
|
|
||||||
if args['limit'] == 0:
|
|
||||||
root_list = []
|
|
||||||
else:
|
|
||||||
root_list = list(self.comments.fetch(**args))
|
|
||||||
if not root_list:
|
|
||||||
raise NotFound
|
|
||||||
|
|
||||||
if root_id not in reply_counts:
|
|
||||||
reply_counts[root_id] = 0
|
|
||||||
|
|
||||||
try:
|
|
||||||
nested_limit = int(request.args.get('nested_limit'))
|
|
||||||
except TypeError:
|
|
||||||
nested_limit = None
|
|
||||||
except ValueError:
|
|
||||||
return BadRequest("nested_limit should be integer")
|
|
||||||
|
|
||||||
rv = {
|
|
||||||
'id' : root_id,
|
|
||||||
'total_replies' : reply_counts[root_id],
|
|
||||||
'hidden_replies' : reply_counts[root_id] - len(root_list),
|
|
||||||
'replies' : self._process_fetched_list(root_list, plain)
|
|
||||||
}
|
|
||||||
# We are only checking for one level deep comments
|
|
||||||
if root_id is None:
|
|
||||||
for comment in rv['replies']:
|
|
||||||
if comment['id'] in reply_counts:
|
|
||||||
comment['total_replies'] = reply_counts[comment['id']]
|
|
||||||
if nested_limit is not None:
|
|
||||||
if nested_limit > 0:
|
|
||||||
args['parent'] = comment['id']
|
|
||||||
args['limit'] = nested_limit
|
|
||||||
replies = list(self.comments.fetch(**args))
|
|
||||||
else:
|
|
||||||
replies = []
|
|
||||||
else:
|
|
||||||
args['parent'] = comment['id']
|
|
||||||
replies = list(self.comments.fetch(**args))
|
|
||||||
else:
|
|
||||||
comment['total_replies'] = 0
|
|
||||||
replies = []
|
|
||||||
|
|
||||||
comment['hidden_replies'] = comment['total_replies'] - len(replies)
|
|
||||||
comment['replies'] = self._process_fetched_list(replies, plain)
|
|
||||||
|
|
||||||
return JSON(rv, 200)
|
|
||||||
|
|
||||||
def _process_fetched_list(self, fetched_list, plain=False):
|
|
||||||
for item in fetched_list:
|
|
||||||
|
|
||||||
key = item['email'] or item['remote_addr']
|
|
||||||
val = self.cache.get('hash', key.encode('utf-8'))
|
|
||||||
|
|
||||||
if val is None:
|
|
||||||
val = self.hash(key)
|
|
||||||
self.cache.set('hash', key.encode('utf-8'), val)
|
|
||||||
|
|
||||||
item['hash'] = val
|
|
||||||
|
|
||||||
for key in set(item.keys()) - API.FIELDS:
|
|
||||||
item.pop(key)
|
|
||||||
|
|
||||||
if plain:
|
|
||||||
for item in fetched_list:
|
|
||||||
item['text'] = self.isso.render(item['text'])
|
|
||||||
|
|
||||||
return fetched_list
|
|
||||||
|
|
||||||
@xhr
|
|
||||||
def like(self, environ, request, id):
|
|
||||||
|
|
||||||
nv = self.comments.vote(True, id, utils.anonymize(str(request.remote_addr)))
|
|
||||||
return JSON(nv, 200)
|
|
||||||
|
|
||||||
@xhr
|
|
||||||
def dislike(self, environ, request, id):
|
|
||||||
|
|
||||||
nv = self.comments.vote(False, id, utils.anonymize(str(request.remote_addr)))
|
|
||||||
return JSON(nv, 200)
|
|
||||||
|
|
||||||
# TODO: remove someday (replaced by :func:`counts`)
|
|
||||||
@requires(str, 'uri')
|
|
||||||
def count(self, environ, request, uri):
|
|
||||||
|
|
||||||
rv = self.comments.count(uri)[0]
|
|
||||||
|
|
||||||
if rv == 0:
|
|
||||||
raise NotFound
|
|
||||||
|
|
||||||
return JSON(rv, 200)
|
|
||||||
|
|
||||||
def counts(self, environ, request):
|
|
||||||
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
if not isinstance(data, list) and not all(isinstance(x, str) for x in data):
|
|
||||||
raise BadRequest("JSON must be a list of URLs")
|
|
||||||
|
|
||||||
return JSON(self.comments.count(*data), 200)
|
|
||||||
|
|
||||||
def demo(self, env, req):
|
|
||||||
return redirect(get_current_url(env) + '/index.html')
|
|
30
isso/views/info.py
Normal file
30
isso/views/info.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import pkg_resources
|
||||||
|
dist = pkg_resources.get_distribution("isso")
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
from werkzeug.wrappers import Response
|
||||||
|
|
||||||
|
from isso import local
|
||||||
|
from isso.compat import text_type as str
|
||||||
|
|
||||||
|
|
||||||
|
class Info(object):
|
||||||
|
|
||||||
|
def __init__(self, conf):
|
||||||
|
self.moderation = conf.getboolean("moderation", "enabled")
|
||||||
|
|
||||||
|
def show(self, environ, request):
|
||||||
|
|
||||||
|
rv = {
|
||||||
|
"version": dist.version,
|
||||||
|
"host": str(local("host")),
|
||||||
|
"origin": str(local("origin")),
|
||||||
|
"moderation": self.moderation,
|
||||||
|
}
|
||||||
|
|
||||||
|
return Response(json.dumps(rv), 200, content_type="application/json")
|
2
setup.py
2
setup.py
@ -5,7 +5,7 @@ import sys
|
|||||||
|
|
||||||
from setuptools import setup, find_packages
|
from setuptools import setup, find_packages
|
||||||
|
|
||||||
requires = ['itsdangerous', 'misaka', 'html5lib']
|
requires = ['itsdangerous', 'misaka', 'html5lib>=0.95', 'SQLAlchemy>=0.7.6']
|
||||||
|
|
||||||
if (3, 0) <= sys.version_info < (3, 3):
|
if (3, 0) <= sys.version_info < (3, 3):
|
||||||
raise SystemExit("Python 3.0, 3.1 and 3.2 are not supported")
|
raise SystemExit("Python 3.0, 3.1 and 3.2 are not supported")
|
||||||
|
@ -3,9 +3,9 @@
|
|||||||
|
|
||||||
[general]
|
[general]
|
||||||
|
|
||||||
# file location to the SQLite3 database, highly recommended to change this
|
# Address of the database
|
||||||
# location to a non-temporary location!
|
# see docs.sqlalchemy.org/en/rel_0_9/core/engines.html for the syntax
|
||||||
dbpath = /tmp/comments.db
|
dbpath = sqlite://
|
||||||
|
|
||||||
# required to dispatch multiple websites, not used otherwise.
|
# required to dispatch multiple websites, not used otherwise.
|
||||||
name =
|
name =
|
||||||
@ -115,7 +115,7 @@ timeout = 10
|
|||||||
# enable guard, recommended in production. Not useful for debugging purposes.
|
# enable guard, recommended in production. Not useful for debugging purposes.
|
||||||
enabled = true
|
enabled = true
|
||||||
|
|
||||||
# limit to N new comments per minute.
|
# limit to N new comments per minute. Use -1 to disable rate limit.
|
||||||
ratelimit = 2
|
ratelimit = 2
|
||||||
|
|
||||||
# how many comments directly to the thread (prevent a simple while true; do
|
# how many comments directly to the thread (prevent a simple while true; do
|
||||||
@ -128,6 +128,10 @@ direct-reply = 3
|
|||||||
reply-to-self = false
|
reply-to-self = false
|
||||||
|
|
||||||
|
|
||||||
|
[auth]
|
||||||
|
enabled = false
|
||||||
|
|
||||||
|
|
||||||
[markup]
|
[markup]
|
||||||
# Customize markup and sanitized HTML. Currently, only Markdown (via Misaka) is
|
# Customize markup and sanitized HTML. Currently, only Markdown (via Misaka) is
|
||||||
# supported, but new languages are relatively easy to add.
|
# supported, but new languages are relatively easy to add.
|
||||||
|
13
tox.ini
13
tox.ini
@ -7,13 +7,6 @@ deps =
|
|||||||
commands =
|
commands =
|
||||||
python setup.py nosetests
|
python setup.py nosetests
|
||||||
|
|
||||||
[testenv:py26]
|
|
||||||
deps =
|
|
||||||
argparse
|
|
||||||
unittest2
|
|
||||||
configparser
|
|
||||||
{[testenv]deps}
|
|
||||||
|
|
||||||
[testenv:py27]
|
[testenv:py27]
|
||||||
deps =
|
deps =
|
||||||
configparser
|
configparser
|
||||||
@ -28,12 +21,6 @@ deps=
|
|||||||
passlib==1.5.3
|
passlib==1.5.3
|
||||||
werkzeug==0.8.3
|
werkzeug==0.8.3
|
||||||
|
|
||||||
[testenv:squeeze]
|
|
||||||
basepython=python2.6
|
|
||||||
deps=
|
|
||||||
{[testenv:py26]deps}
|
|
||||||
{[testenv:debian]deps}
|
|
||||||
|
|
||||||
[testenv:wheezy]
|
[testenv:wheezy]
|
||||||
basepython=python2.7
|
basepython=python2.7
|
||||||
deps =
|
deps =
|
||||||
|
@ -3,6 +3,7 @@ http = :8080
|
|||||||
master = true
|
master = true
|
||||||
processes = 4
|
processes = 4
|
||||||
cache2 = name=hash,items=10240,blocksize=32
|
cache2 = name=hash,items=10240,blocksize=32
|
||||||
|
cache2 = name=text,items=1024
|
||||||
spooler = %d/mail
|
spooler = %d/mail
|
||||||
module = isso.run
|
module = isso.run
|
||||||
virtualenv = %d
|
virtualenv = %d
|
||||||
|
Loading…
Reference in New Issue
Block a user