Skip to content

Instantly share code, notes, and snippets.

@lxneng
Forked from cloverstd/cache.py
Last active May 27, 2016 03:24
Show Gist options
  • Save lxneng/cc3b5330382a0351ab6c to your computer and use it in GitHub Desktop.
Save lxneng/cc3b5330382a0351ab6c to your computer and use it in GitHub Desktop.
# coding: utf-8
try:
import cPickle as pickle
except ImportError:
import pickle
try:
import hashlib
sha1 = hashlib.sha1
except ImportError:
import sha
sha1 = sha.new
import functools
def cache(expires=7200):
def _func(func):
@functools.wraps(func)
def wrapper(handler, *args, **kwargs):
handler.expires = expires
return func(handler, *args, **kwargs)
return wrapper
return _func
class CacheMixin(object):
@property
def cache(self):
return self.application.cache
def prepare(self):
super(CacheMixin, self).prepare()
key = self._generate_key(self.request)
print key
if self.cache.exists(self._prefix(key)):
rv = pickle.loads(self.cache.get(self._prefix(key)))
self.write_cache(rv)
self.finish()
def _generate_key(self, request):
key = pickle.dumps((request.path, request.arguments))
return sha1(key).hexdigest()
def _prefix(self, key):
return "Cache:%s" % key
def write_cache(self, chunk):
super(CacheMixin, self).write(chunk)
def write(self, chunk):
pickled = pickle.dumps(chunk)
key = self._generate_key(self.request)
if hasattr(self, "expires"):
self.cache.set(self._prefix(key), pickled, self.expires)
else:
self.cache.set(self._prefix(key), pickled)
super(CacheMixin, self).write(chunk)
class CacheBackend(object):
"""
The base Cache Backend class
"""
def get(self, key):
raise NotImplementedError
def set(self, key, value, timeout):
raise NotImplementedError
def delitem(self, key):
raise NotImplementedError
def exists(self, key):
raise NotImplementedError
class RedisCacheBackend(CacheBackend):
def __init__(self, redis_connection, **options):
self.options = dict(timeout=86400)
self.options.update(options)
self.redis = redis_connection
def get(self, key):
if self.exists(key):
return self.redis.get(key)
return None
def set(self, key, value, timeout=None):
self.redis.set(key, value)
if timeout:
self.redis.expire(key, timeout)
else:
self.redis.expire(key, self.options["timeout"])
def delitem(self, key):
self.redis.delete(key)
def exists(self, key):
print key
return bool(self.redis.exists(key))
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import tornado.web
import tornado.ioloop
from cache import RedisCacheBackend, CacheMixin
import redis
class CacheHandler(CacheMixin, tornado.web.RequestHandler):
def get(self):
self.expires = 60 # set the cache expires
self.write("test")
class Application(tornado.web.Application):
def __init__(self):
settings = dict(debug=True)
self.redis = redis.Redis()
self.cache = RedisCacheBackend(self.redis)
handlers = [(r'/', CacheHandler)]
super(Application, self).__init__(handlers=handlers, **settings)
if __name__ == '__main__':
application = Application()
application.listen(8080)
tornado.ioloop.IOLoop.instance().start()
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import tornado.web
import tornado.ioloop
from cache import RedisCacheBackend, CacheMixin, cache
import redis
class BaseHandler(CacheMixin, RequestHandler):
def prepare(self):
super(BaseHandler, self).prepare()
class Greetandler(Cornado.web.RequestHandler):
@cache(60) # set the cache expires
def get(self):
self.write("test")
class Application(tornado.web.Application):
def __init__(self):
settings = dict(debug=True)
self.redis = redis.Redis()
self.cache = RedisCacheBackend(self.redis)
handlers = [(r'/', CacheHandler)]
super(Application, self).__init__(handlers=handlers, **settings)
if __name__ == '__main__':
application = Application()
application.listen(8080)
tornado.ioloop.IOLoop.instance().start()
@lxneng
Copy link
Author

lxneng commented May 27, 2016

#!/usr/bin/env python
# -*- coding: utf-8 -*-
import time
import pickle
from functools import (
    wraps,
    _CacheInfo,
    RLock,
    lru_cache as mem_lru_cache
)

import redis
import hashlib


class RedisCacheBackend(object):

    def __init__(self, redis_connection, **options):
        self.options = dict(timeout=86400)
        self.options.update(options)
        self.redis = redis_connection

    def get(self, key):
        if self.exists(key):
            return self.redis.get(key)
        return None

    def set(self, key, value, timeout=None):
        self.redis.set(key, value)
        if timeout:
            self.redis.expire(key, timeout)
        else:
            self.redis.expire(key, self.options["timeout"])

    def delitem(self, key):
        self.redis.delete(key)

    def exists(self, key):
        return bool(self.redis.exists(key))


class cached_property_redis(object):

    def __init__(self, expires=None, cache_backend=None):
        self.expires = expires
        if cache_backend is None:
            self.cache = RedisCacheBackend(redis.Redis())
        else:
            self.cache = cache_backend

    def __call__(self, func):
        self.func = func
        return self

    def __get__(self, obj, cls):
        if obj is None:
            return self

        key = hashlib.md5("{}.{}".format(repr(obj),
          self.func.__name__).encode()).hexdigest()
        key = "cached_property:{}".format(key)
        if self.cache.exists(key):
            return pickle.loads(self.cache.get(key))
        value = self.func(obj)
        pickled = pickle.dumps(value)
        self.cache.set(key, pickled, self.expires)
        return value


class cached_method_redis(object):

    def __init__(self, cache_key=None, expires=None, cache_backend=None):
        self.expires = expires
        self.cache_key = cache_key
        if cache_backend is None:
            self.cache = RedisCacheBackend(redis.Redis())
        else:
            self.cache = cache_backend

    def __call__(self, func):
        @wraps(func)
        def decorated(*args, **kwargs):
            if self.cache_key:
                key = self.cache_key
            else:
                key = hashlib.md5("{}.{}.{}".format(
                    func.__name__, args, kwargs).encode()).hexdigest()
                key = "cached_func:{}".format(key)
            if self.cache.exists(key):
                return pickle.loads(self.cache.get(key))
            value = func(*args, **kwargs)
            pickled = pickle.dumps(value)
            self.cache.set(key, pickled, self.expires)
            return value
        return decorated


class cached_property(object):

    '''A property that is only computed once per instance and then replaces
    itself with an ordinary attribute. Deleting the attribute reset the
    property.
    Credit to Marcel Hellkamp, author of bottle.py.
    copy from https://github.com/bottlepy/bottle/blob/master/bottle.py#L183
    '''

    def __init__(self, func):
        self.__doc__ = getattr(func, '__doc__')
        self.func = func

    def __get__(self, obj, cls):
        if obj is None:
            return self
        value = obj.__dict__[self.func.__name__] = self.func(obj)
        return value


def lru_cache(maxsize=100, conn=None):
    """
    Simple Redis-based LRU cache decorator

    * will fall back to functools.lru_cache when redis conn not provided

    :param conn: redis connection
    :param maxsize: maximum number of entries in LRU cache

    Usage::

    >>> @redis_lru(maxsize=10000, conn=redis.Redis())
    ... def func(foo, bar):
    ...     # some expensive operation
    ...     return baz

    Uses 4 Redis keys, all suffixed with the function name:
        lru:keys: - sorted set, stores hash keys
        lru:vals: - hash, stores function output values
        lru:hits: - string, stores hit counter
        lru:miss: - string, stores miss counter
    """
    def decorator(func):
        cache_keys = "lru:keys:%s" % (func.__name__,)
        cache_vals = "lru:vals:%s" % (func.__name__,)
        cache_hits = "lru:hits:%s" % (func.__name__,)
        cache_miss = "lru:miss:%s" % (func.__name__,)

        lock = RLock()
        lvars = [conn]      # closure mutable

        # While no redis connection is specified, fall back to functools's
        # lru_cache
        if conn is None:
            mlru = mem_lru_cache(maxsize=maxsize)(func)

        def add(key, value):
            eject()
            conn = lvars[0]
            conn.incr(cache_miss)
            conn.hset(cache_vals, key, pickle.dumps(value))
            conn.zadd(cache_keys, key, 0)
            return value

        def get(key):
            conn = lvars[0]
            value = conn.hget(cache_vals, key)
            if value:
                conn.incr(cache_hits)
                conn.zincrby(cache_keys, key, 1.0)
                value = pickle.loads(value)
            return value

        def eject():
            conn = lvars[0]
            count = min((maxsize // 10) or 1, 1000)
            if conn.zcard(cache_keys) >= maxsize:
                eject = conn.zrange(cache_keys, 0, count)
                conn.zremrangebyrank(cache_keys, 0, count)
                conn.hdel(cache_vals, *eject)

        @wraps(func)
        def wrapper(*args, **kwargs):
            conn = lvars[0]
            if conn:
                items = args + tuple(sorted(kwargs.items()))
                key = pickle.dumps(items)
                return get(key) or add(key, func(*args, **kwargs))
            else:
                return mlru(*args, **kwargs)

        def cache_info():
            with lock:
                conn = lvars[0]
                if conn:
                    size = int(conn.zcard(cache_keys) or 0)
                    hits = int(conn.get(cache_hits) or 0)
                    misses = int(conn.get(cache_miss) or 0)
                    return _CacheInfo(hits, misses, maxsize, size)
                else:
                    return mlru.cache_info()

        def cache_clear():
            with lock:
                conn = lvars[0]
                if conn:
                    conn.delete(cache_keys, cache_vals)
                    conn.delete(cache_hits, cache_miss)
                else:
                    return mlru.cache_clear()

        def cache_init(conn):
            lvars[0] = conn

        wrapper.cache_init = cache_init
        wrapper.cache_info = cache_info
        wrapper.cache_clear = cache_clear
        return wrapper
    return decorator


CACHE = {}


def mem_cache(timeout=300):
    count = 0

    def deco1(func):
        @wraps(func)
        def deco2(*args, **kwargs):
            if timeout:
                nonlocal count
                count += 1
                if count % 100 == 0:
                    # invalidate outdated value from time to time
                    to_dels = []
                    for key, values in CACHE.items():
                        if values[0] < time.time() - timeout:
                            to_dels.append(key)
                    for key in to_dels:
                        del CACHE[key]

            key = pickle.dumps([func.__name__, args, kwargs])
            if key not in CACHE or \
                    (timeout and CACHE[key][0] < time.time() - timeout):
                value = func(*args, **kwargs)
                CACHE[key] = (time.time(), value)
            return CACHE[key][1]
        return deco2
    return deco1

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment