June 7th 2010

Distributing locking with Python and Redis

This is already possible with Memcache, but if you are already drinking the Koolaid...

import time

class Lock(object):
    def __init__(self, key, expires=60, timeout=10):
        Distributed locking using Redis SETNX and GETSET.


            with Lock('my_lock'):
                print "Critical section"

        :param  expires     We consider any existing lock older than
                            ``expires`` seconds to be invalid in order to
                            detect crashed clients. This value must be higher
                            than it takes the critical section to execute.
        :param  timeout     If another client has already obtained the lock,
                            sleep for a maximum of ``timeout`` seconds before
                            giving up. A value of 0 means we never wait.

        self.key = key
        self.timeout = timeout
        self.expires = expires

    def __enter__(self):
        timeout = self.timeout
        while timeout >= 0:
            expires = time.time() + self.expires + 1

            if redis.setnx(self.key, expires):
                # We gained the lock; enter critical section

            current_value = redis.get(self.key)

            # We found an expired lock and nobody raced us to replacing it
            if current_value and float(current_value) < time.time() and \
                redis.getset(self.key, expires) == current_value:

            timeout -= 1

        raise LockTimeout("Timeout whilst waiting for lock")

    def __exit__(self, exc_type, exc_value, traceback):

class LockTimeout(BaseException):

One common use case for distributed locks in web applications is to prevent clients dog-piling onto an expensive cache key:

def cache_without_dogpiling(key, cb, cache_expiry=None, *args, **kwargs):
    val = cache.get(key)
    if val is not None:
        return val

    # Cache miss; gain the lock to prevent multiple clients calling cb()
    with Lock(key, *args, **kwargs):
        # Check cache again - another client may have set the cache
        val = cache.get(key)
        if val is None:
            val = cb()
            cache.set(key, val, cache_expiry)
        return val

def slow():
    print "Inside slow()"
    return 1 + 1 # Python is slow

>>> cache_without_dogpiling('my_key', slow, 60 * 10)
Inside slow()
>>> cache_without_dogpiling('my_key', slow, 60 * 10)

As a bonus, if you don't want your test or development environment to rely on Redis, you can replace it with a no-op lock:

import contextlib

def Lock(*args, **kwargs):

You can subscribe to new posts via email or RSS.