connection pooling and new cache implementations
* add a wrapper around `sqlite3` to enable connection pooling across multiple threads. Most tests now use a in-memory database which speeds things (slightly) up. The database wrapper is now uncoupled from the actual database connection. * split cache framework from core.Mixin into a separate package `isso.cache`. The dependency on `werkzeug.contrib` has been removed to ease a possible transition to a different web framework later. The default cache uses SQLite3 now (unless Isso is run from uWSGI). While it may sound like a Bad Idea (tm), it's much more efficient than per-process python datastructures. The SQLite3 cache is SMP-capable and fast for read-heavy sites. SQLite3 may fail due to a corrupt database for concurrent read access from multiple processes. The database is actually not corrupted, but the connection is stale. As a workaround, limit process number to your CPU count or wait until a "real" backend such as PostgreSQL is available.
This commit is contained in:
parent
bbd9e1b523
commit
88689c789a
@ -25,7 +25,7 @@
|
||||
#
|
||||
# Isso – a lightweight Disqus alternative
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import pkg_resources
|
||||
dist = pkg_resources.get_distribution("isso")
|
||||
@ -42,7 +42,6 @@ if sys.argv[0].startswith("isso"):
|
||||
import os
|
||||
import errno
|
||||
import logging
|
||||
import tempfile
|
||||
|
||||
from os.path import dirname, join
|
||||
from argparse import ArgumentParser
|
||||
@ -62,7 +61,12 @@ from werkzeug.contrib.profiler import ProfilerMiddleware
|
||||
local = Local()
|
||||
local_manager = LocalManager([local])
|
||||
|
||||
from isso import config, db, migrate, wsgi, ext, views
|
||||
try:
|
||||
import uwsgi
|
||||
except ImportError:
|
||||
uwsgi = None
|
||||
|
||||
from isso import cache, config, db, migrate, wsgi, ext, views
|
||||
from isso.core import ThreadedMixin, ProcessMixin, uWSGIMixin
|
||||
from isso.wsgi import origin, urlsplit
|
||||
from isso.utils import http, JSONRequest, html, hash
|
||||
@ -80,10 +84,18 @@ logger = logging.getLogger("isso")
|
||||
|
||||
class Isso(object):
|
||||
|
||||
def __init__(self, conf):
|
||||
def __init__(self, conf, cacheobj=None, connection=None,
|
||||
multiprocessing=False):
|
||||
if cacheobj is None:
|
||||
cacheobj = cache.Cache(threshold=1024)
|
||||
|
||||
if connection is None:
|
||||
connection = db.SQLite3(":memory:")
|
||||
|
||||
self.conf = conf
|
||||
self.db = db.SQLite3(conf.get('general', 'dbpath'), conf)
|
||||
self.cache = cacheobj
|
||||
self.connection = connection
|
||||
self.db = db.Adapter(connection, conf)
|
||||
self.signer = URLSafeTimedSerializer(self.db.preferences.get("session-key"))
|
||||
self.markup = html.Markup(
|
||||
conf.getlist("markup", "options"),
|
||||
@ -92,6 +104,7 @@ class Isso(object):
|
||||
self.hasher = hash.new(
|
||||
conf.get("hash", "algorithm"),
|
||||
conf.get("hash", "salt"))
|
||||
self.shared = True if multiprocessing else False
|
||||
|
||||
super(Isso, self).__init__(conf)
|
||||
|
||||
@ -142,6 +155,10 @@ class Isso(object):
|
||||
return InternalServerError()
|
||||
else:
|
||||
return response
|
||||
finally:
|
||||
# FIXME: always close connection but rather fix tests
|
||||
if self.shared:
|
||||
self.connection.close()
|
||||
|
||||
def wsgi_app(self, environ, start_response):
|
||||
response = self.dispatch(JSONRequest(environ))
|
||||
@ -151,22 +168,29 @@ class Isso(object):
|
||||
return self.wsgi_app(environ, start_response)
|
||||
|
||||
|
||||
def make_app(conf=None, threading=True, multiprocessing=False, uwsgi=False):
|
||||
def make_app(conf, multiprocessing=True):
|
||||
|
||||
if not any((threading, multiprocessing, uwsgi)):
|
||||
raise RuntimeError("either set threading, multiprocessing or uwsgi")
|
||||
connection = db.SQLite3(conf.get("general", "dbpath"))
|
||||
cacheobj = cache.SQLite3Cache(connection, threshold=2048)
|
||||
|
||||
if threading:
|
||||
if multiprocessing:
|
||||
if uwsgi is not None:
|
||||
class App(Isso, uWSGIMixin):
|
||||
pass
|
||||
|
||||
cacheobj = cache.uWSGICache(timeout=3600)
|
||||
else:
|
||||
class App(Isso, ProcessMixin):
|
||||
pass
|
||||
else:
|
||||
class App(Isso, ThreadedMixin):
|
||||
pass
|
||||
elif multiprocessing:
|
||||
class App(Isso, ProcessMixin):
|
||||
pass
|
||||
else:
|
||||
class App(Isso, uWSGIMixin):
|
||||
pass
|
||||
|
||||
isso = App(conf)
|
||||
isso = App(
|
||||
conf,
|
||||
cacheobj=cacheobj,
|
||||
connection=connection,
|
||||
multiprocessing=multiprocessing)
|
||||
|
||||
# check HTTP server connection
|
||||
for host in conf.getiter("general", "host"):
|
||||
@ -226,12 +250,11 @@ def main():
|
||||
conf.set("guard", "enabled", "off")
|
||||
|
||||
if args.dryrun:
|
||||
xxx = tempfile.NamedTemporaryFile()
|
||||
dbpath = xxx.name
|
||||
dbpath = ":memory:"
|
||||
else:
|
||||
dbpath = conf.get("general", "dbpath")
|
||||
|
||||
mydb = db.SQLite3(dbpath, conf)
|
||||
mydb = db.Adapter(db.SQLite3(dbpath), conf)
|
||||
migrate.dispatch(args.type, mydb, args.dump)
|
||||
|
||||
sys.exit(0)
|
||||
@ -240,13 +263,15 @@ def main():
|
||||
logger.error("No website(s) configured, Isso won't work.")
|
||||
sys.exit(1)
|
||||
|
||||
app = make_app(conf, multiprocessing=False)
|
||||
|
||||
if conf.get("server", "listen").startswith("http://"):
|
||||
host, port, _ = urlsplit(conf.get("server", "listen"))
|
||||
try:
|
||||
from gevent.pywsgi import WSGIServer
|
||||
WSGIServer((host, port), make_app(conf)).serve_forever()
|
||||
WSGIServer((host, port), app).serve_forever()
|
||||
except ImportError:
|
||||
run_simple(host, port, make_app(conf), threaded=True,
|
||||
run_simple(host, port, app, threaded=True,
|
||||
use_reloader=conf.getboolean('server', 'reload'))
|
||||
else:
|
||||
sock = conf.get("server", "listen").partition("unix://")[2]
|
||||
@ -255,4 +280,4 @@ def main():
|
||||
except OSError as ex:
|
||||
if ex.errno != errno.ENOENT:
|
||||
raise
|
||||
wsgi.SocketHTTPServer(sock, make_app(conf)).serve_forever()
|
||||
wsgi.SocketHTTPServer(sock, app).serve_forever()
|
||||
|
105
isso/cache/__init__.py
vendored
Normal file
105
isso/cache/__init__.py
vendored
Normal file
@ -0,0 +1,105 @@
|
||||
# -*- encoding: utf-8 -*-
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import abc
|
||||
import json
|
||||
|
||||
from isso.utils import types
|
||||
from isso.compat import string_types
|
||||
|
||||
|
||||
def pickle(value):
|
||||
return json.dumps(value).encode("utf-8")
|
||||
|
||||
|
||||
def unpickle(value):
|
||||
return json.loads(value.decode("utf-8"))
|
||||
|
||||
|
||||
class Base(object):
|
||||
"""Base class for all cache objects.
|
||||
|
||||
Arbitrary values are set by namespace and key. Namespace and key must be
|
||||
strings, the underlying cache implementation may use :func:`pickle` and
|
||||
:func:`unpickle:` to safely un-/serialize Python primitives.
|
||||
|
||||
:param threshold: maximum size of the cache
|
||||
:param timeout: key expiration
|
||||
"""
|
||||
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
# enable serialization of Python primitives
|
||||
serialize = False
|
||||
|
||||
def __init__(self, threshold, timeout):
|
||||
self.threshold = threshold
|
||||
self.timeout = timeout
|
||||
|
||||
def get(self, ns, key, default=None):
|
||||
types.require(ns, string_types)
|
||||
types.require(key, string_types)
|
||||
|
||||
try:
|
||||
value = self._get(ns.encode("utf-8"), key.encode("utf-8"))
|
||||
except KeyError:
|
||||
return default
|
||||
else:
|
||||
if self.serialize:
|
||||
value = unpickle(value)
|
||||
return value
|
||||
|
||||
@abc.abstractmethod
|
||||
def _get(self, ns, key):
|
||||
return
|
||||
|
||||
def set(self, ns, key, value):
|
||||
types.require(ns, string_types)
|
||||
types.require(key, string_types)
|
||||
|
||||
if self.serialize:
|
||||
value = pickle(value)
|
||||
|
||||
return self._set(ns.encode("utf-8"), key.encode("utf-8"), value)
|
||||
|
||||
@abc.abstractmethod
|
||||
def _set(self, ns, key, value):
|
||||
return
|
||||
|
||||
def delete(self, ns, key):
|
||||
types.require(ns, string_types)
|
||||
types.require(key, string_types)
|
||||
|
||||
return self._delete(ns.encode("utf-8"), key.encode("utf-8"))
|
||||
|
||||
@abc.abstractmethod
|
||||
def _delete(self, ns, key):
|
||||
return
|
||||
|
||||
|
||||
class Cache(Base):
|
||||
"""Implements a simple in-memory cache; once the threshold is reached, all
|
||||
cached elements are discarded (the timeout parameter is ignored).
|
||||
"""
|
||||
|
||||
def __init__(self, threshold=512, timeout=-1):
|
||||
super(Cache, self).__init__(threshold, timeout)
|
||||
self.cache = {}
|
||||
|
||||
def _get(self, ns, key):
|
||||
return self.cache[ns + b'-' + key]
|
||||
|
||||
def _set(self, ns, key, value):
|
||||
if len(self.cache) > self.threshold - 1:
|
||||
self.cache.clear()
|
||||
self.cache[ns + b'-' + key] = value
|
||||
|
||||
def _delete(self, ns, key):
|
||||
self.cache.pop(ns + b'-' + key, None)
|
||||
|
||||
|
||||
from .uwsgi import uWSGICache
|
||||
from .sqlite import SQLite3Cache
|
||||
|
||||
__all__ = ["Cache", "SQLite3Cache", "uWSGICache"]
|
58
isso/cache/sqlite.py
vendored
Normal file
58
isso/cache/sqlite.py
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
# -*- encoding: utf-8 -*-
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import time
|
||||
|
||||
from . import Base
|
||||
from isso.db import SQLite3
|
||||
|
||||
|
||||
class SQLite3Cache(Base):
|
||||
"""Implements a shared cache using SQLite3. Works across multiple processes
|
||||
and threads, concurrent writes are not supported.
|
||||
|
||||
JSON is used to serialize python primitives in a safe way.
|
||||
"""
|
||||
|
||||
serialize = True
|
||||
|
||||
def __init__(self, connection, threshold=1024, timeout=-1):
|
||||
super(SQLite3Cache, self).__init__(threshold, timeout)
|
||||
self.connection = connection
|
||||
self.connection.execute(
|
||||
'CREATE TABLE IF NOT EXISTS cache ('
|
||||
' key TEXT PRIMARY KEY,'
|
||||
' value BLOB,'
|
||||
' time FLOAT)')
|
||||
|
||||
# drop trigger, in case threshold has changed
|
||||
self.connection.execute('DROP TRIGGER IF EXISTS sweeper')
|
||||
self.connection.execute([
|
||||
'CREATE TRIGGER sweeper AFTER INSERT ON cache',
|
||||
'BEGIN',
|
||||
' DELETE FROM cache WHERE key NOT IN ('
|
||||
' SELECT key FROM cache',
|
||||
' ORDER BY time DESC LIMIT {0}'.format(threshold),
|
||||
' );',
|
||||
'END'])
|
||||
|
||||
def _get(self, ns, key, default=None):
|
||||
rv = self.connection.execute(
|
||||
'SELECT value FROM cache WHERE key = ?',
|
||||
(ns + b'-' + key, )).fetchone()
|
||||
|
||||
if rv is None:
|
||||
raise KeyError
|
||||
|
||||
return rv[0]
|
||||
|
||||
def _set(self, ns, key, value):
|
||||
with self.connection.transaction as con:
|
||||
con.execute(
|
||||
'INSERT OR REPLACE INTO cache (key, value, time) VALUES (?, ?, ?)',
|
||||
(ns + b'-' + key, value, time.time()))
|
||||
|
||||
def _delete(self, ns, key):
|
||||
with self.connection.transaction as con:
|
||||
con.execute('DELETE FROM cache WHERE key = ?', (ns + b'-' + key, ))
|
34
isso/cache/uwsgi.py
vendored
Normal file
34
isso/cache/uwsgi.py
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
# -*- encoding: utf-8 -*-
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
try:
|
||||
import uwsgi
|
||||
except ImportError:
|
||||
uwsgi = None
|
||||
|
||||
from . import Base
|
||||
|
||||
|
||||
class uWSGICache(Base):
|
||||
"""Utilize uWSGI caching framework, in-memory and SMP-safe.
|
||||
"""
|
||||
|
||||
serialize = True
|
||||
|
||||
def __init__(self, threshold=-1, timeout=3600):
|
||||
if uwsgi is None:
|
||||
raise RuntimeError("uWSGI not available")
|
||||
|
||||
super(uWSGICache, self).__init__(threshold, timeout)
|
||||
|
||||
def _get(self, ns, key):
|
||||
if not uwsgi.cache_exists(key, ns):
|
||||
raise KeyError
|
||||
return uwsgi.cache_get(key, ns)
|
||||
|
||||
def _delete(self, ns, key):
|
||||
uwsgi.cache_del(key, ns)
|
||||
|
||||
def _set(self, ns, key, value):
|
||||
uwsgi.cache_set(key, value, self.timeout, ns)
|
46
isso/core.py
46
isso/core.py
@ -19,34 +19,13 @@ if PY2K:
|
||||
else:
|
||||
import _thread as thread
|
||||
|
||||
from werkzeug.contrib.cache import NullCache, SimpleCache
|
||||
|
||||
logger = logging.getLogger("isso")
|
||||
|
||||
|
||||
class Cache:
|
||||
"""Wrapper around werkzeug's cache class, to make it compatible to
|
||||
uWSGI's cache framework.
|
||||
"""
|
||||
|
||||
def __init__(self, cache):
|
||||
self.cache = cache
|
||||
|
||||
def get(self, cache, key):
|
||||
return self.cache.get(key)
|
||||
|
||||
def set(self, cache, key, value):
|
||||
return self.cache.set(key, value)
|
||||
|
||||
def delete(self, cache, key):
|
||||
return self.cache.delete(key)
|
||||
|
||||
|
||||
class Mixin(object):
|
||||
|
||||
def __init__(self, conf):
|
||||
self.lock = threading.Lock()
|
||||
self.cache = Cache(NullCache())
|
||||
|
||||
def notify(self, subject, body, retries=5):
|
||||
pass
|
||||
@ -72,8 +51,6 @@ class ThreadedMixin(Mixin):
|
||||
if conf.getboolean("moderation", "enabled"):
|
||||
self.purge(conf.getint("moderation", "purge-after"))
|
||||
|
||||
self.cache = Cache(SimpleCache(threshold=1024, default_timeout=3600))
|
||||
|
||||
@threaded
|
||||
def purge(self, delta):
|
||||
while True:
|
||||
@ -90,28 +67,6 @@ class ProcessMixin(ThreadedMixin):
|
||||
self.lock = multiprocessing.Lock()
|
||||
|
||||
|
||||
class uWSGICache(object):
|
||||
"""Uses uWSGI Caching Framework. INI configuration:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
cache2 = name=hash,items=1024,blocksize=32
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def get(self, cache, key):
|
||||
return uwsgi.cache_get(key, cache)
|
||||
|
||||
@classmethod
|
||||
def set(self, cache, key, value):
|
||||
uwsgi.cache_set(key, value, 3600, cache)
|
||||
|
||||
@classmethod
|
||||
def delete(self, cache, key):
|
||||
uwsgi.cache_del(key, cache)
|
||||
|
||||
|
||||
class uWSGIMixin(Mixin):
|
||||
|
||||
def __init__(self, conf):
|
||||
@ -119,7 +74,6 @@ class uWSGIMixin(Mixin):
|
||||
super(uWSGIMixin, self).__init__(conf)
|
||||
|
||||
self.lock = multiprocessing.Lock()
|
||||
self.cache = uWSGICache
|
||||
|
||||
timedelta = conf.getint("moderation", "purge-after")
|
||||
purge = lambda signum: self.db.comments.purge(timedelta)
|
||||
|
@ -1,8 +1,12 @@
|
||||
# -*- encoding: utf-8 -*-
|
||||
|
||||
import sqlite3
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import sqlite3
|
||||
import operator
|
||||
import threading
|
||||
|
||||
import os.path
|
||||
|
||||
from collections import defaultdict
|
||||
@ -15,7 +19,75 @@ from isso.db.spam import Guard
|
||||
from isso.db.preferences import Preferences
|
||||
|
||||
|
||||
class SQLite3:
|
||||
class Transaction(object):
|
||||
"""A context manager to lock the database across processes and automatic
|
||||
rollback on failure. On success, reset the isolation level back to normal.
|
||||
|
||||
SQLite3's DEFERRED (default) transaction mode causes database corruption
|
||||
for concurrent writes to the database from multiple processes. IMMEDIATE
|
||||
ensures a global write lock, but reading is still possible.
|
||||
"""
|
||||
|
||||
def __init__(self, con):
|
||||
self.con = con
|
||||
|
||||
def __enter__(self):
|
||||
self._orig = self.con.isolation_level
|
||||
self.con.isolation_level = "IMMEDIATE"
|
||||
self.con.execute("BEGIN IMMEDIATE")
|
||||
return self.con
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
try:
|
||||
if exc_type:
|
||||
self.con.rollback()
|
||||
else:
|
||||
self.con.commit()
|
||||
finally:
|
||||
self.con.isolation_level = self._orig
|
||||
|
||||
|
||||
class SQLite3(object):
|
||||
"""SQLite3 connection pool across multiple threads. Implementation idea
|
||||
from `Peewee <https://github.com/coleifer/peewee>`_.
|
||||
"""
|
||||
|
||||
def __init__(self, db):
|
||||
self.db = os.path.expanduser(db)
|
||||
self.lock = threading.Lock()
|
||||
self.local = threading.local()
|
||||
|
||||
def connect(self):
|
||||
with self.lock:
|
||||
self.local.conn = sqlite3.connect(self.db, isolation_level=None)
|
||||
|
||||
def close(self):
|
||||
with self.lock:
|
||||
self.local.conn.close()
|
||||
self.local.conn = None
|
||||
|
||||
def execute(self, sql, args=()):
|
||||
if isinstance(sql, (list, tuple)):
|
||||
sql = ' '.join(sql)
|
||||
|
||||
return self.connection.execute(sql, args)
|
||||
|
||||
@property
|
||||
def connection(self):
|
||||
if not hasattr(self.local, 'conn') or self.local.conn is None:
|
||||
self.connect()
|
||||
return self.local.conn
|
||||
|
||||
@property
|
||||
def transaction(self):
|
||||
return Transaction(self.connection)
|
||||
|
||||
@property
|
||||
def total_changes(self):
|
||||
return self.connection.total_changes
|
||||
|
||||
|
||||
class Adapter(object):
|
||||
"""DB-dependend wrapper around SQLite3.
|
||||
|
||||
Runs migration if `user_version` is older than `MAX_VERSION` and register
|
||||
@ -24,9 +96,8 @@ class SQLite3:
|
||||
|
||||
MAX_VERSION = 3
|
||||
|
||||
def __init__(self, path, conf):
|
||||
|
||||
self.path = os.path.expanduser(path)
|
||||
def __init__(self, conn, conf):
|
||||
self.connection = conn
|
||||
self.conf = conf
|
||||
|
||||
rv = self.execute([
|
||||
@ -40,9 +111,9 @@ class SQLite3:
|
||||
self.guard = Guard(self)
|
||||
|
||||
if rv is None:
|
||||
self.execute("PRAGMA user_version = %i" % SQLite3.MAX_VERSION)
|
||||
self.execute("PRAGMA user_version = %i" % Adapter.MAX_VERSION)
|
||||
else:
|
||||
self.migrate(to=SQLite3.MAX_VERSION)
|
||||
self.migrate(to=Adapter.MAX_VERSION)
|
||||
|
||||
self.execute([
|
||||
'CREATE TRIGGER IF NOT EXISTS remove_stale_threads',
|
||||
@ -51,14 +122,6 @@ class SQLite3:
|
||||
' DELETE FROM threads WHERE id NOT IN (SELECT tid FROM comments);',
|
||||
'END'])
|
||||
|
||||
def execute(self, sql, args=()):
|
||||
|
||||
if isinstance(sql, (list, tuple)):
|
||||
sql = ' '.join(sql)
|
||||
|
||||
with sqlite3.connect(self.path) as con:
|
||||
return con.execute(sql, args)
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
return self.execute("PRAGMA user_version").fetchone()[0]
|
||||
@ -77,7 +140,7 @@ class SQLite3:
|
||||
from isso.utils import Bloomfilter
|
||||
bf = buffer(Bloomfilter(iterable=["127.0.0.0"]).array)
|
||||
|
||||
with sqlite3.connect(self.path) as con:
|
||||
with self.connection.transaction as con:
|
||||
con.execute('UPDATE comments SET voters=?', (bf, ))
|
||||
con.execute('PRAGMA user_version = 1')
|
||||
logger.info("%i rows changed", con.total_changes)
|
||||
@ -85,7 +148,7 @@ class SQLite3:
|
||||
# move [general] session-key to database
|
||||
if self.version == 1:
|
||||
|
||||
with sqlite3.connect(self.path) as con:
|
||||
with self.connection.transaction as con:
|
||||
if self.conf.has_option("general", "session-key"):
|
||||
con.execute('UPDATE preferences SET value=? WHERE key=?', (
|
||||
self.conf.get("general", "session-key"), "session-key"))
|
||||
@ -98,7 +161,7 @@ class SQLite3:
|
||||
|
||||
first = lambda rv: list(map(operator.itemgetter(0), rv))
|
||||
|
||||
with sqlite3.connect(self.path) as con:
|
||||
with self.connection.transaction as con:
|
||||
top = first(con.execute("SELECT id FROM comments WHERE parent IS NULL").fetchall())
|
||||
flattened = defaultdict(set)
|
||||
|
||||
@ -117,3 +180,6 @@ class SQLite3:
|
||||
|
||||
con.execute('PRAGMA user_version = 3')
|
||||
logger.info("%i rows changed", con.total_changes)
|
||||
|
||||
def execute(self, sql, args=()):
|
||||
return self.connection.execute(sql, args)
|
||||
|
@ -1,5 +1,7 @@
|
||||
# -*- encoding: utf-8 -*-
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
import binascii
|
||||
|
||||
|
@ -8,5 +8,4 @@ from isso import dist, config
|
||||
application = make_app(
|
||||
config.load(
|
||||
os.path.join(dist.location, "isso", "defaults.ini"),
|
||||
os.environ.get('ISSO_SETTINGS')),
|
||||
multiprocessing=True)
|
||||
os.environ.get('ISSO_SETTINGS')))
|
||||
|
62
isso/tests/test_cache.py
Normal file
62
isso/tests/test_cache.py
Normal file
@ -0,0 +1,62 @@
|
||||
# -*- encoding: utf-8 -*-
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
try:
|
||||
import unittest2 as unittest
|
||||
except ImportError:
|
||||
import unittest
|
||||
|
||||
from isso.compat import text_type as str
|
||||
|
||||
from isso.db import SQLite3
|
||||
from isso.cache import Cache, SQLite3Cache
|
||||
|
||||
ns = "test"
|
||||
|
||||
|
||||
class TestCache(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.cache = Cache(threshold=8)
|
||||
|
||||
def test_cache(self):
|
||||
cache = self.cache
|
||||
|
||||
cache.delete(ns, "foo")
|
||||
self.assertIsNone(cache.get(ns, "foo"))
|
||||
|
||||
cache.set(ns, "foo", "bar")
|
||||
self.assertEqual(cache.get(ns, "foo"), "bar")
|
||||
|
||||
cache.delete(ns, "foo")
|
||||
self.assertIsNone(cache.get(ns, "foo"))
|
||||
|
||||
def test_full(self):
|
||||
cache = self.cache
|
||||
|
||||
cache.set(ns, "foo", "bar")
|
||||
|
||||
for i in range(7):
|
||||
cache.set(ns, str(i), "Spam!")
|
||||
|
||||
for i in range(7):
|
||||
self.assertEqual(cache.get(ns, str(i)), "Spam!")
|
||||
|
||||
self.assertIsNotNone(cache.get(ns, "foo"))
|
||||
|
||||
cache.set(ns, "bar", "baz")
|
||||
self.assertIsNone(cache.get(ns, "foo"))
|
||||
|
||||
def test_primitives(self):
|
||||
cache = self.cache
|
||||
|
||||
for val in (None, True, [1, 2, 3], {"bar": "baz"}):
|
||||
cache.set(ns, "val", val)
|
||||
self.assertEqual(cache.get(ns, "val"), val, val.__class__.__name__)
|
||||
|
||||
|
||||
class TestSQLite3Cache(TestCache):
|
||||
|
||||
def setUp(self):
|
||||
self.cache = SQLite3Cache(SQLite3(":memory:"), threshold=8)
|
@ -4,7 +4,6 @@ from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
import json
|
||||
import tempfile
|
||||
|
||||
try:
|
||||
import unittest2 as unittest
|
||||
@ -31,9 +30,7 @@ http.curl = curl
|
||||
class TestComments(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
fd, self.path = tempfile.mkstemp()
|
||||
conf = config.load(os.path.join(dist.location, "isso", "defaults.ini"))
|
||||
conf.set("general", "dbpath", self.path)
|
||||
conf.set("guard", "enabled", "off")
|
||||
conf.set("hash", "algorithm", "none")
|
||||
|
||||
@ -49,9 +46,6 @@ class TestComments(unittest.TestCase):
|
||||
self.post = self.client.post
|
||||
self.delete = self.client.delete
|
||||
|
||||
def tearDown(self):
|
||||
os.unlink(self.path)
|
||||
|
||||
def testGet(self):
|
||||
|
||||
self.post('/new?uri=%2Fpath%2F', data=json.dumps({'text': 'Lorem ipsum ...'}))
|
||||
@ -379,9 +373,7 @@ class TestComments(unittest.TestCase):
|
||||
class TestModeratedComments(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
fd, self.path = tempfile.mkstemp()
|
||||
conf = config.load(os.path.join(dist.location, "isso", "defaults.ini"))
|
||||
conf.set("general", "dbpath", self.path)
|
||||
conf.set("moderation", "enabled", "true")
|
||||
conf.set("guard", "enabled", "off")
|
||||
conf.set("hash", "algorithm", "none")
|
||||
@ -393,9 +385,6 @@ class TestModeratedComments(unittest.TestCase):
|
||||
self.app.wsgi_app = FakeIP(self.app.wsgi_app, "192.168.1.1")
|
||||
self.client = JSONClient(self.app, Response)
|
||||
|
||||
def tearDown(self):
|
||||
os.unlink(self.path)
|
||||
|
||||
def testAddComment(self):
|
||||
|
||||
rv = self.client.post('/new?uri=test', data=json.dumps({"text": "..."}))
|
||||
@ -411,9 +400,7 @@ class TestModeratedComments(unittest.TestCase):
|
||||
class TestPurgeComments(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
fd, self.path = tempfile.mkstemp()
|
||||
conf = config.load(os.path.join(dist.location, "isso", "defaults.ini"))
|
||||
conf.set("general", "dbpath", self.path)
|
||||
conf.set("moderation", "enabled", "true")
|
||||
conf.set("guard", "enabled", "off")
|
||||
conf.set("hash", "algorithm", "none")
|
||||
|
@ -1,28 +1,51 @@
|
||||
# -*- encoding: utf-8 -*-
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
try:
|
||||
import unittest2 as unittest
|
||||
except ImportError:
|
||||
import unittest
|
||||
|
||||
import os
|
||||
import sqlite3
|
||||
import tempfile
|
||||
|
||||
from isso import config
|
||||
from isso.db import SQLite3
|
||||
from isso.db import SQLite3, Adapter
|
||||
|
||||
from isso.compat import iteritems
|
||||
|
||||
|
||||
class TestSQLite3(unittest.TestCase):
|
||||
|
||||
def test_connection(self):
|
||||
con = SQLite3(":memory:")
|
||||
|
||||
con.connect()
|
||||
self.assertTrue(hasattr(con.local, "conn"))
|
||||
|
||||
con.close()
|
||||
self.assertIsNone(con.local.conn)
|
||||
|
||||
def test_autoconnect(self):
|
||||
con = SQLite3(":memory:")
|
||||
con.execute("")
|
||||
self.assertTrue(hasattr(con.local, "conn"))
|
||||
|
||||
def test_rollback(self):
|
||||
con = SQLite3(":memory:")
|
||||
con.execute("CREATE TABLE foo (bar INTEGER)")
|
||||
con.execute("INSERT INTO foo (bar) VALUES (42)")
|
||||
|
||||
try:
|
||||
with con.transaction as con:
|
||||
con.execute("INSERT INTO foo (bar) VALUES (23)")
|
||||
raise ValueError("some error")
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
self.assertEqual(len(con.execute("SELECT bar FROM foo").fetchall()), 1)
|
||||
|
||||
|
||||
class TestDBMigration(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
fd, self.path = tempfile.mkstemp()
|
||||
|
||||
def tearDown(self):
|
||||
os.unlink(self.path)
|
||||
|
||||
def test_defaults(self):
|
||||
|
||||
conf = config.new({
|
||||
@ -31,9 +54,9 @@ class TestDBMigration(unittest.TestCase):
|
||||
"max-age": "1h"
|
||||
}
|
||||
})
|
||||
db = SQLite3(self.path, conf)
|
||||
db = Adapter(SQLite3(":memory:"), conf)
|
||||
|
||||
self.assertEqual(db.version, SQLite3.MAX_VERSION)
|
||||
self.assertEqual(db.version, Adapter.MAX_VERSION)
|
||||
self.assertTrue(db.preferences.get("session-key", "").isalnum())
|
||||
|
||||
def test_session_key_migration(self):
|
||||
@ -46,21 +69,23 @@ class TestDBMigration(unittest.TestCase):
|
||||
})
|
||||
conf.set("general", "session-key", "supersecretkey")
|
||||
|
||||
with sqlite3.connect(self.path) as con:
|
||||
connection = SQLite3(":memory:")
|
||||
|
||||
with connection.transaction as con:
|
||||
con.execute("PRAGMA user_version = 1")
|
||||
con.execute("CREATE TABLE threads (id INTEGER PRIMARY KEY)")
|
||||
|
||||
db = SQLite3(self.path, conf)
|
||||
db = Adapter(connection, conf)
|
||||
|
||||
self.assertEqual(db.version, SQLite3.MAX_VERSION)
|
||||
self.assertEqual(db.version, Adapter.MAX_VERSION)
|
||||
self.assertEqual(db.preferences.get("session-key"),
|
||||
conf.get("general", "session-key"))
|
||||
|
||||
# try again, now with the session-key removed from our conf
|
||||
conf.remove_option("general", "session-key")
|
||||
db = SQLite3(self.path, conf)
|
||||
db = Adapter(connection, conf)
|
||||
|
||||
self.assertEqual(db.version, SQLite3.MAX_VERSION)
|
||||
self.assertEqual(db.version, Adapter.MAX_VERSION)
|
||||
self.assertEqual(db.preferences.get("session-key"),
|
||||
"supersecretkey")
|
||||
|
||||
@ -76,7 +101,9 @@ class TestDBMigration(unittest.TestCase):
|
||||
6: None
|
||||
}
|
||||
|
||||
with sqlite3.connect(self.path) as con:
|
||||
connection = SQLite3(":memory:")
|
||||
|
||||
with connection.transaction as con:
|
||||
con.execute("PRAGMA user_version = 2")
|
||||
con.execute("CREATE TABLE threads ("
|
||||
" id INTEGER PRIMARY KEY,"
|
||||
@ -106,7 +133,7 @@ class TestDBMigration(unittest.TestCase):
|
||||
"max-age": "1h"
|
||||
}
|
||||
})
|
||||
SQLite3(self.path, conf)
|
||||
Adapter(connection, conf)
|
||||
|
||||
flattened = [
|
||||
(1, None),
|
||||
@ -118,6 +145,6 @@ class TestDBMigration(unittest.TestCase):
|
||||
(7, 2)
|
||||
]
|
||||
|
||||
with sqlite3.connect(self.path) as con:
|
||||
with connection.transaction as con:
|
||||
rv = con.execute("SELECT id, parent FROM comments ORDER BY created").fetchall()
|
||||
self.assertEqual(flattened, rv)
|
||||
|
@ -15,7 +15,7 @@ from werkzeug import __version__
|
||||
from werkzeug.test import Client
|
||||
from werkzeug.wrappers import Response
|
||||
|
||||
from isso import Isso, config, core, dist
|
||||
from isso import Isso, config, core, db, dist
|
||||
from isso.utils import http
|
||||
|
||||
from fixtures import curl, FakeIP
|
||||
@ -33,12 +33,11 @@ class TestGuard(unittest.TestCase):
|
||||
data = json.dumps({"text": "Lorem ipsum."})
|
||||
|
||||
def setUp(self):
|
||||
self.path = tempfile.NamedTemporaryFile().name
|
||||
self.connection = db.SQLite3(":memory:")
|
||||
|
||||
def makeClient(self, ip, ratelimit=2, direct_reply=3, self_reply=False):
|
||||
|
||||
conf = config.load(os.path.join(dist.location, "isso", "defaults.ini"))
|
||||
conf.set("general", "dbpath", self.path)
|
||||
conf.set("hash", "algorithm", "none")
|
||||
conf.set("guard", "enabled", "true")
|
||||
conf.set("guard", "ratelimit", str(ratelimit))
|
||||
@ -48,7 +47,7 @@ class TestGuard(unittest.TestCase):
|
||||
class App(Isso, core.Mixin):
|
||||
pass
|
||||
|
||||
app = App(conf)
|
||||
app = App(conf, connection=self.connection)
|
||||
app.wsgi_app = FakeIP(app.wsgi_app, ip)
|
||||
|
||||
return Client(app, Response)
|
||||
|
@ -7,12 +7,11 @@ try:
|
||||
except ImportError:
|
||||
import unittest
|
||||
|
||||
import tempfile
|
||||
from os.path import join, dirname
|
||||
|
||||
from isso import config
|
||||
|
||||
from isso.db import SQLite3
|
||||
from isso.db import SQLite3, Adapter
|
||||
from isso.migrate import Disqus, WordPress, autodetect
|
||||
|
||||
conf = config.new({
|
||||
@ -28,9 +27,8 @@ class TestMigration(unittest.TestCase):
|
||||
def test_disqus(self):
|
||||
|
||||
xml = join(dirname(__file__), "disqus.xml")
|
||||
xxx = tempfile.NamedTemporaryFile()
|
||||
|
||||
db = SQLite3(xxx.name, conf)
|
||||
db = Adapter(SQLite3(":memory:"), conf)
|
||||
Disqus(db, xml).migrate()
|
||||
|
||||
self.assertEqual(len(db.execute("SELECT id FROM comments").fetchall()), 2)
|
||||
@ -50,9 +48,8 @@ class TestMigration(unittest.TestCase):
|
||||
def test_wordpress(self):
|
||||
|
||||
xml = join(dirname(__file__), "wordpress.xml")
|
||||
xxx = tempfile.NamedTemporaryFile()
|
||||
|
||||
db = SQLite3(xxx.name, conf)
|
||||
db = Adapter(SQLite3(":memory:"), conf)
|
||||
WordPress(db, xml).migrate()
|
||||
|
||||
self.assertEqual(db.threads["/2014/test/"]["title"], "Hello, World…")
|
||||
|
@ -3,7 +3,6 @@ from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
import json
|
||||
import tempfile
|
||||
|
||||
try:
|
||||
import unittest2 as unittest
|
||||
@ -12,7 +11,7 @@ except ImportError:
|
||||
|
||||
from werkzeug.wrappers import Response
|
||||
|
||||
from isso import Isso, core, config, dist
|
||||
from isso import Isso, cache, core, config, dist
|
||||
from isso.utils import http
|
||||
|
||||
from fixtures import curl, loads, FakeIP, JSONClient
|
||||
@ -22,22 +21,17 @@ http.curl = curl
|
||||
class TestVote(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.path = tempfile.NamedTemporaryFile().name
|
||||
|
||||
def makeClient(self, ip):
|
||||
|
||||
conf = config.load(os.path.join(dist.location, "isso", "defaults.ini"))
|
||||
conf.set("general", "dbpath", self.path)
|
||||
conf.set("guard", "enabled", "off")
|
||||
conf.set("hash", "algorithm", "none")
|
||||
|
||||
class App(Isso, core.Mixin):
|
||||
pass
|
||||
|
||||
app = App(conf)
|
||||
app.wsgi_app = FakeIP(app.wsgi_app, ip)
|
||||
self.app = App(conf)
|
||||
|
||||
return JSONClient(app, Response)
|
||||
def makeClient(self, ip):
|
||||
return JSONClient(FakeIP(self.app.wsgi_app, ip), Response)
|
||||
|
||||
def testZeroLikes(self):
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
|
||||
def _TypeError(expected, val):
|
||||
if isinstance(expected, (list, tuple)):
|
||||
expected = ", ".join(expected.__name__)
|
||||
expected = ", ".join(ex.__name__ for ex in expected)
|
||||
else:
|
||||
expected = expected.__name__
|
||||
return TypeError("Expected {0}, not {1}".format(
|
||||
@ -11,5 +11,14 @@ def _TypeError(expected, val):
|
||||
|
||||
|
||||
def require(val, expected):
|
||||
"""Assure that :param val: is an instance of :param expected: or raise a
|
||||
:exception TypeError: indicating what's expected.
|
||||
|
||||
>>> require(23, int)
|
||||
>>> require(None, bool)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
TypeError: Expected bool, not NoneType
|
||||
"""
|
||||
if not isinstance(val, expected):
|
||||
raise _TypeError(expected, val)
|
||||
|
@ -1,5 +1,7 @@
|
||||
# -*- encoding: utf-8 -*-
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import re
|
||||
import cgi
|
||||
import time
|
||||
@ -195,7 +197,7 @@ class API(object):
|
||||
rv["text"] = self.isso.render(rv["text"])
|
||||
rv["hash"] = self.hash(rv['email'] or rv['remote_addr'])
|
||||
|
||||
self.cache.set('hash', (rv['email'] or rv['remote_addr']).encode('utf-8'), rv['hash'])
|
||||
self.cache.set('hash', (rv['email'] or rv['remote_addr']), rv['hash'])
|
||||
|
||||
for key in set(rv.keys()) - API.FIELDS:
|
||||
rv.pop(key)
|
||||
@ -286,7 +288,7 @@ class API(object):
|
||||
if item is None:
|
||||
raise NotFound
|
||||
|
||||
self.cache.delete('hash', (item['email'] or item['remote_addr']).encode('utf-8'))
|
||||
self.cache.delete('hash', (item['email'] or item['remote_addr']))
|
||||
|
||||
with self.isso.lock:
|
||||
rv = self.comments.delete(id)
|
||||
@ -337,7 +339,7 @@ class API(object):
|
||||
else:
|
||||
with self.isso.lock:
|
||||
self.comments.delete(id)
|
||||
self.cache.delete('hash', (item['email'] or item['remote_addr']).encode('utf-8'))
|
||||
self.cache.delete('hash', (item['email'] or item['remote_addr']))
|
||||
self.signal("comments.delete", id)
|
||||
|
||||
return Response("Yo", 200)
|
||||
@ -422,11 +424,11 @@ class API(object):
|
||||
for item in fetched_list:
|
||||
|
||||
key = item['email'] or item['remote_addr']
|
||||
val = self.cache.get('hash', key.encode('utf-8'))
|
||||
val = self.cache.get('hash', key)
|
||||
|
||||
if val is None:
|
||||
val = self.hash(key)
|
||||
self.cache.set('hash', key.encode('utf-8'), val)
|
||||
self.cache.set('hash', key, val)
|
||||
|
||||
item['hash'] = val
|
||||
|
||||
|
@ -4,8 +4,8 @@
|
||||
[general]
|
||||
|
||||
# file location to the SQLite3 database, highly recommended to change this
|
||||
# location to a non-temporary location!
|
||||
dbpath = /tmp/comments.db
|
||||
# location to a persistent location, e.g. /var/lib/isso/comments.db.
|
||||
dbpath = :memory:
|
||||
|
||||
# required to dispatch multiple websites, not used otherwise.
|
||||
name =
|
||||
|
Loading…
Reference in New Issue
Block a user