Skip to content

Instantly share code, notes, and snippets.

@mbox
Last active September 3, 2019 16:14
Show Gist options
  • Save mbox/953524 to your computer and use it in GitHub Desktop.
Save mbox/953524 to your computer and use it in GitHub Desktop.
Two-level cache for Django
To use the two-layer cache:
* Run memcached on each Django webserver
* Add "LOCAL_CACHE_ADDR = ('127.0.0.1:11211',)" to settings.py
* Replace "from django.core.cache import cache" with "from dualcache import cache" everywhere
you want to use the two-layer cache. It can be freely mix and matched with Django's default caching.
# Provide a local read-only cache on each Django webserver, with cache misses and writes going to
# the standard Django cache.
# (c) 2011 Malcolm Box
# Released under the same license as Django
from django.core import signals
from django.conf import settings
from django.core.cache import cache as remote_cache
from django.core.cache.backends.base import BaseCache
from django.utils.encoding import smart_str
import memcache
import time
local_cache = memcache.Client(settings.LOCAL_CACHE_ADDR)
if hasattr(settings, 'LOCAL_CACHE_LIFETIME'):
local_lifetime = settings.LOCAL_CACHE_LIFETIME
else:
local_lifetime = 1
class TwoLayerCache(BaseCache):
""" Implements a local read-only cache of values.
The local cache is a local memcache instance that stores acts as a local, L1 cache for an individual web server.
The standard Django cache backend acts as the L2 cache shared between multiple web servers.
On a cache read, if the key is not found in the local cache, it is fetched from the L2 cache and stored locally.
Any write operation (set, add, incr, decr) invalidates the key in the L1 cache and passes the operation to the L2
cache.
The lifetime for objects in the L1 cache is controlled by settings.LOCAL_CACHE_LIFETIME. Data in the L1 cache
may be up to this many seconds out of date, and applications must be able to tolerate this.
"""
def __init__(self):
super(TwoLayerCache, self).__init__(params={'timeout':1})
def get(self, key, default=None):
"""Try local cache first, then go to remote"""
val = None
val = local_cache.get(smart_str(key))
if not val:
val = remote_cache.get(smart_str(key))
if val:
local_cache.set(smart_str(key), val, local_lifetime)
if not val:
val = default
return val
def set(self, key, value):
local_cache.delete(smart_str(key))
return remote_cache.set(smart_str(key), value)
def add(self, key, value, *args):
local_cache.delete(smart_str(key))
return remote_cache.add(smart_str(key), value, *args)
def incr(self, key, *args, **kwargs):
local_cache.delete(smart_str(key))
return remote_cache.incr(smart_str(key), *args, **kwargs)
def decr(self, key, *args, **kwargs):
local_cache.delete(smart_str(key))
return remote_cache.decr(smart_str(key), *args, **kwargs)
def delete(self, key, *args, **kwargs):
local_cache.delete(smart_str(key))
return remote_cache.delete(smart_str(key), *args, **kwargs)
def get_many(self, keys):
values = local_cache.get_multi(map(smart_str, keys))
if len(values) == len(keys):
return values
else:
r_values = remote_cache.get_many(map(smart_str, keys))
local_cache.set_multi(r_values, time=local_lifetime)
return r_values
def _get_local(self,key):
return local_cache.get(smart_str(key))
def close(self, *args, **kwargs):
local_cache.disconnect_all()
cache = TwoLayerCache()
# Tidy up at the end of request as per Django memcache backend
signals.request_finished.connect(cache.close)
import dualcache
from dualcache import cache
from django.core.cache import cache as remote_cache
import time
class DualCacheTest(TestCase):
def setUp(self):
keys = ['test', '1','2','3', '4', 'wensleydale', 'key']
for k in keys:
cache.delete(k)
remote_cache.delete(k)
def test_read(self):
key = "test"
remote_cache.set(key, 100)
self.assertEquals(cache.get(key), 100)
self.assertEquals(cache._get_local(key), 100)
cache.set(key, 200)
self.assertEquals(cache.get(key), 200)
remote_cache.set(key, 300)
self.assertEquals(cache.get(key), 200)
time.sleep(settings.LOCAL_CACHE_LIFETIME+1)
self.assertEquals(cache.get(key), 300)
def test_multi_read(self):
keys = ['1','2','3']
remote_cache.set('1', 'one')
remote_cache.set('2', 'two')
values = cache.get_many(keys)
self.assertEquals(len(values), 2)
self.assertEquals(values['1'], 'one')
self.assertEquals(values['2'], 'two')
cache.set('3', 'three')
values = cache.get_many(keys)
self.assertEquals(set(values.keys()), set(['1','2','3']))
remote_cache.delete('1')
values = cache.get_many(keys)
self.assertEquals(set(values.keys()), set(['1','2','3']))
cache.delete('1')
values = cache.get_many(keys)
self.assertEquals(set(values.keys()), set(['2','3']))
# Check that a missing value causes update from L2
remote_cache.set('4', 'four')
values = cache.get_many(keys+['4'])
self.assertEquals(len(values), 3)
self.assertEquals(cache._get_local('4'), 'four')
def test_interface(self):
key = "wensleydale"
cache.set(key, "yummy")
self.assertEquals(cache.get(key), "yummy")
cache.set(key, 10)
cache.incr(key, 10)
self.assertEquals(cache.get(key), 20)
cache.delete(key)
self.assertEquals(cache.get(key), None)
added = cache.add(key, 'new trousers')
self.assertEquals(added, True)
self.assertEquals(cache.get(key), 'new trousers')
added = cache.add(key, 'old trousers')
self.assertEquals(added, False)
self.assertEquals(cache.get(key), 'new trousers')
def test_incr(self):
cache.set('key', 100)
cache.incr('key')
self.assertEquals(cache.get('key'), 101)
cache.incr('key', 25)
self.assertEquals(cache.get('key'), 126)
try:
ret = cache.incr('no-such-key')
self.assertEquals(ret, None)
except ValueError:
# Some cache backends raise, some don't
pass
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment