dogpile.cache-dict-config / dogpile / cache / backends /

Memcached Backends

Provides backends for talking to memcached.


from dogpile.cache.api import CacheBackend, NO_VALUE
from dogpile.cache import util
import random
import time

__all__ = 'GenericMemcachedBackend', 'MemcachedBackend', \
    'PylibmcBackend', 'BMemcachedBackend', 'MemcachedLock'

class MemcachedLock(object):
    """Simple distributed lock using memcached.

    This is an adaptation of the lock featured at


    def __init__(self, client_fn, key):
        self.client_fn = client_fn
        self.key = "_lock" + key

    def acquire(self, wait=True):
        client = self.client_fn()
        i = 0
        while True:
            if client.add(self.key, 1):
                return True
            elif not wait:
                return False
                sleep_time = (((i+1)*random.random()) + 2**i) / 2.5
            if i < 15:
                i += 1

    def release(self):
        client = self.client_fn()

class GenericMemcachedBackend(CacheBackend):
    """Base class for memcached backends.

    This base class accepts a number of paramters
    common to all backends.

    :param url: the string URL to connect to.  Can be a single
     string or a list of strings.  This is the only argument
     that's required.
    :param distributed_lock: boolean, when True, will use a
     memcached-lock as the dogpile lock (see :class:`.MemcachedLock`).
     Use this when multiple
     processes will be talking to the same memcached instance.
     When left at False, dogpile will coordinate on a regular
     threading mutex.
    :param memcached_expire_time: integer, when present will
     be passed as the ``time`` parameter to ``pylibmc.Client.set``.
     This is used to set the memcached expiry time for a value.

     .. note::

         This parameter is **different** from Dogpile's own 
         ``expiration_time``, which is the number of seconds after
         which Dogpile will consider the value to be expired. 
         When Dogpile considers a value to be expired, 
         it **continues to use the value** until generation
         of a new value is complete, when using 
         Therefore, if you are setting ``memcached_expire_time``, you'll
         want to make sure it is greater than ``expiration_time`` 
         by at least enough seconds for new values to be generated,
         else the value won't be available during a regeneration, 
         forcing all threads to wait for a regeneration each time 
         a value expires.

    The :class:`.GenericMemachedBackend` uses a ``threading.local()``
    object to store individual client objects per thread,
    as most modern memcached clients do not appear to be inherently

    In particular, ``threading.local()`` has the advantage over pylibmc's 
    built-in thread pool in that it automatically discards objects 
    associated with a particular thread when that thread ends.


    set_arguments = {}
    """Additional arguments which will be passed
    to the :meth:`set` method."""

    def __init__(self, arguments):
        # using a plain threading.local here.   threading.local
        # automatically deletes the __dict__ when a thread ends,
        # so the idea is that this is superior to pylibmc's
        # own ThreadMappedPool which doesn't handle this 
        # automatically.
        self.url = util.to_list(arguments['url'])
        self.distributed_lock = arguments.get('distributed_lock', False)
        self.memcached_expire_time = arguments.get(
                                        'memcached_expire_time', 0)

    def _imports(self):
        """client library imports go here."""
        raise NotImplementedError()

    def _create_client(self):
        """Creation of a Client instance goes here."""
        raise NotImplementedError()

    def _clients(self):
        backend = self
        class ClientPool(util.threading.local):
            def __init__(self):
                self.memcached = backend._create_client()

        return ClientPool()

    def client(self):
        """Return the memcached client.

        This uses a threading.local by
        default as it appears most modern
        memcached libs aren't inherently

        return self._clients.memcached

    def get_mutex(self, key):
        if self.distributed_lock:
            return MemcachedLock(lambda: self.client, key)
            return None

    def get(self, key):
        value = self.client.get(key)
        if value is None:
            return NO_VALUE
            return value

    def set(self, key, value):

    def delete(self, key):

class PylibmcBackend(GenericMemcachedBackend):
    """A backend for the 
    `pylibmc <>`_ 
    memcached client.

    A configuration illustrating several of the optional
    arguments described in the pylibmc documentation::

        from dogpile.cache import make_region

        region = make_region().configure(
            expiration_time = 3600,
            arguments = {
                'behaviors':{"tcp_nodelay": True,"ketama":True}

    Arguments accepted here include those of 
    :class:`.GenericMemcachedBackend`, as well as 
    those below.

    :param binary: sets the ``binary`` flag understood by
    :param behaviors: a dictionary which will be passed to
     ``pylibmc.Client`` as the ``behaviors`` parameter.
    :param min_compres_len: Integer, will be passed as the 
     ``min_compress_len`` parameter to the ``pylibmc.Client.set``


    def __init__(self, arguments):
        self.binary = arguments.get('binary', False)
        self.behaviors = arguments.get('behaviors', {})
        self.min_compress_len = arguments.get('min_compress_len', 0)

        self.set_arguments = {}
        if "memcached_expire_time" in arguments:
            self.set_arguments["time"] = \
        if "min_compress_len" in arguments:
            self.set_arguments["min_compress_len"] = \
        super(PylibmcBackend, self).__init__(arguments)

    def _imports(self):
        global pylibmc
        import pylibmc

    def _create_client(self):
        return pylibmc.Client(self.url, 

class MemcachedBackend(GenericMemcachedBackend):
    """A backend using the standard `Python-memcached <>`_
        from dogpile.cache import make_region

        region = make_region().configure(
            expiration_time = 3600,
            arguments = {

    def _imports(self):
        global memcache
        import memcache

    def _create_client(self):
        return memcache.Client(self.url)

class BMemcachedBackend(GenericMemcachedBackend):
    """A backend for the 
    `python-binary-memcached <>`_ 
    memcached client.

    This is a pure Python memcached client which
    includes the ability to authenticate with a memcached
    server using SASL.

    A typical configuration using username/password::

        from dogpile.cache import make_region

        region = make_region().configure(
            expiration_time = 3600,
            arguments = {

    Arguments which can be passed to the ``arguments`` 
    dictionary include:

    :param username: optional username, will be used for 
     SASL authentication.
    :param password: optional password, will be used for
     SASL authentication.

    def __init__(self, arguments):
        self.username = arguments.get('username', None)
        self.password = arguments.get('password', None)
        super(BMemcachedBackend, self).__init__(arguments)

    def _imports(self):
        global bmemcached
        import bmemcached

        class RepairBMemcachedAPI(bmemcached.Client):
            """Repairs BMemcached's non-standard method 


            def add(self, key, value):
                    super(RepairBMemcachedAPI, self).add(key, value)
                    return True
                except ValueError:
                    return False

        self.Client = RepairBMemcachedAPI

    def _create_client(self):
        return self.Client(self.url,