cache PBKDF2 hash, closes #18
Markdown conversion is not the reason for 2s per 100 comments response, the hash function is. When using the email/remote_addr from cache, the response time is pretty fast. * when uWSGI is available, use their caching framework * for multi-threaded environment (the default), use a simple cache shipped with werkzeug
This commit is contained in:
parent
74ab58167e
commit
cac4694f43
1
isso.ini
1
isso.ini
@ -2,6 +2,7 @@
|
|||||||
http = :8080
|
http = :8080
|
||||||
master = true
|
master = true
|
||||||
processes = 4
|
processes = 4
|
||||||
|
cache2 = name=hash,items=1024,blocksize=32
|
||||||
spooler = %v/mail
|
spooler = %v/mail
|
||||||
module = isso
|
module = isso
|
||||||
virtualenv = .
|
virtualenv = .
|
||||||
|
49
isso/core.py
49
isso/core.py
@ -30,6 +30,8 @@ from isso import notify
|
|||||||
from isso.utils import parse
|
from isso.utils import parse
|
||||||
from isso.compat import text_type as str
|
from isso.compat import text_type as str
|
||||||
|
|
||||||
|
from werkzeug.contrib.cache import NullCache, SimpleCache
|
||||||
|
|
||||||
logger = logging.getLogger("isso")
|
logger = logging.getLogger("isso")
|
||||||
|
|
||||||
|
|
||||||
@ -137,10 +139,29 @@ def SMTP(conf):
|
|||||||
return mailer
|
return mailer
|
||||||
|
|
||||||
|
|
||||||
|
class Cache:
|
||||||
|
"""Wrapper around werkzeug's cache class, to make it compatible to
|
||||||
|
uWSGI's cache framework.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cache):
|
||||||
|
self.cache = cache
|
||||||
|
|
||||||
|
def get(self, cache, key):
|
||||||
|
return self.cache.get(key)
|
||||||
|
|
||||||
|
def set(self, cache, key, value):
|
||||||
|
return self.cache.set(key, value)
|
||||||
|
|
||||||
|
def delete(self, cache, key):
|
||||||
|
return self.cache.delete(key)
|
||||||
|
|
||||||
|
|
||||||
class Mixin(object):
|
class Mixin(object):
|
||||||
|
|
||||||
def __init__(self, conf):
|
def __init__(self, conf):
|
||||||
self.lock = threading.Lock()
|
self.lock = threading.Lock()
|
||||||
|
self.cache = Cache(NullCache())
|
||||||
|
|
||||||
def notify(self, subject, body, retries=5):
|
def notify(self, subject, body, retries=5):
|
||||||
pass
|
pass
|
||||||
@ -167,7 +188,7 @@ class ThreadedMixin(Mixin):
|
|||||||
self.purge(conf.getint("moderation", "purge-after"))
|
self.purge(conf.getint("moderation", "purge-after"))
|
||||||
|
|
||||||
self.mailer = SMTP(conf)
|
self.mailer = SMTP(conf)
|
||||||
|
self.cache = Cache(SimpleCache(threshold=1024, default_timeout=3600))
|
||||||
|
|
||||||
@threaded
|
@threaded
|
||||||
def notify(self, subject, body, retries=5):
|
def notify(self, subject, body, retries=5):
|
||||||
@ -188,6 +209,28 @@ class ThreadedMixin(Mixin):
|
|||||||
time.sleep(delta)
|
time.sleep(delta)
|
||||||
|
|
||||||
|
|
||||||
|
class uWSGICache(object):
|
||||||
|
"""Uses uWSGI Caching Framework. INI configuration:
|
||||||
|
|
||||||
|
.. code-block:: ini
|
||||||
|
|
||||||
|
cache2 = name=hash,items=1024,blocksize=32
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get(self, cache, key):
|
||||||
|
return uwsgi.cache_get(key, cache)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def set(self, cache, key, value):
|
||||||
|
uwsgi.cache_set(key, value, 3600, cache)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def delete(self, cache, key):
|
||||||
|
uwsgi.cache_del(key, cache)
|
||||||
|
|
||||||
|
|
||||||
class uWSGIMixin(Mixin):
|
class uWSGIMixin(Mixin):
|
||||||
|
|
||||||
def __init__(self, conf):
|
def __init__(self, conf):
|
||||||
@ -210,9 +253,11 @@ class uWSGIMixin(Mixin):
|
|||||||
else:
|
else:
|
||||||
return uwsgi.SPOOL_OK
|
return uwsgi.SPOOL_OK
|
||||||
|
|
||||||
|
uwsgi.spooler = spooler
|
||||||
|
|
||||||
self.lock = Lock()
|
self.lock = Lock()
|
||||||
self.mailer = SMTP(conf)
|
self.mailer = SMTP(conf)
|
||||||
uwsgi.spooler = spooler
|
self.cache = uWSGICache
|
||||||
|
|
||||||
timedelta = conf.getint("moderation", "purge-after")
|
timedelta = conf.getint("moderation", "purge-after")
|
||||||
purge = lambda signum: self.db.comments.purge(timedelta)
|
purge = lambda signum: self.db.comments.purge(timedelta)
|
||||||
|
@ -166,6 +166,7 @@ def single(app, environ, request, id):
|
|||||||
for key in set(rv.keys()) - FIELDS:
|
for key in set(rv.keys()) - FIELDS:
|
||||||
rv.pop(key)
|
rv.pop(key)
|
||||||
|
|
||||||
|
app.cache.delete('hash', id)
|
||||||
logger.info('comment %i edited: %s', id, json.dumps(rv))
|
logger.info('comment %i edited: %s', id, json.dumps(rv))
|
||||||
|
|
||||||
checksum = hashlib.md5(rv["text"].encode('utf-8')).hexdigest()
|
checksum = hashlib.md5(rv["text"].encode('utf-8')).hexdigest()
|
||||||
@ -182,6 +183,7 @@ def single(app, environ, request, id):
|
|||||||
for key in set(rv.keys()) - FIELDS:
|
for key in set(rv.keys()) - FIELDS:
|
||||||
rv.pop(key)
|
rv.pop(key)
|
||||||
|
|
||||||
|
app.cache.delete('hash', id)
|
||||||
logger.info('comment %i deleted', id)
|
logger.info('comment %i deleted', id)
|
||||||
|
|
||||||
resp = Response(json.dumps(rv), 200, content_type='application/json')
|
resp = Response(json.dumps(rv), 200, content_type='application/json')
|
||||||
@ -198,7 +200,14 @@ def fetch(app, environ, request, uri):
|
|||||||
|
|
||||||
for item in rv:
|
for item in rv:
|
||||||
|
|
||||||
item['hash'] = str(pbkdf2(item['email'] or item['remote_addr'], app.salt, 1000, 6))
|
key = item['email'] or item['remote_addr']
|
||||||
|
val = app.cache.get('hash', key)
|
||||||
|
|
||||||
|
if val is None:
|
||||||
|
val = str(pbkdf2(key, app.salt, 1000, 6))
|
||||||
|
app.cache.set('hash', key, val)
|
||||||
|
|
||||||
|
item['hash'] = val
|
||||||
|
|
||||||
for key in set(item.keys()) - FIELDS:
|
for key in set(item.keys()) - FIELDS:
|
||||||
item.pop(key)
|
item.pop(key)
|
||||||
|
Loading…
Reference in New Issue
Block a user