Source

dogpile.cache / dogpile / cache / backends / redis.py

Mike Bayer 92bb73c 



Mike Bayer bd8def8 
Mike Bayer 92bb73c 


Ollie Rutherfurd 3e854bb 

Jimmy Mabey f8108da 
Ollie Rutherfurd 3e854bb 
Mike Bayer 2421b64 

Mike Bayer 630abea 
Mike Bayer bd8def8 
Lx Yu b5fa5b0 
Ollie Rutherfurd 3e854bb 
Mike Bayer f53d8b5 
Mike Bayer bd8def8 
Ollie Rutherfurd 3e854bb 











Mike Bayer bd8def8 
Ollie Rutherfurd 3e854bb 




Mike Bayer 2421b64 



inklesspen ed4682c 
Ollie Rutherfurd 3e854bb 

inklesspen ed4682c 

Mike Bayer c9c29f4 

Ollie Rutherfurd 3e854bb 




Mike Bayer bd8def8 



Mike Bayer 630abea 
Mike Bayer bd8def8 




Mike Bayer b455e55 
Mike Bayer 630abea 



Lx Yu a3a248c 
Mike Bayer b455e55 
Mike Bayer 630abea 




Mike Bayer b455e55 
Ollie Rutherfurd 3e854bb 


inklesspen ed4682c 
Ollie Rutherfurd 3e854bb 
inklesspen ed4682c 
Ollie Rutherfurd 3e854bb 

Mike Bayer 380669a 
Mike Bayer 17f2860 
Mike Bayer b455e55 

Mike Bayer 17f2860 
Ollie Rutherfurd 3e854bb 








inklesspen ed4682c 

Mike Bayer 2421b64 

Lx Yu b5fa5b0 
Ollie Rutherfurd 3e854bb 
Mike Bayer 380669a 

Jimmy Mabey f8108da 
Mike Bayer b455e55 
Mike Bayer 380669a 


Ollie Rutherfurd 3e854bb 





Marcos Araujo So… e1db6a9 
Lx Yu b5fa5b0 
Mike Bayer 17f2860 
Lx Yu b5fa5b0 
Marcos Araujo So… e1db6a9 
Ollie Rutherfurd 3e854bb 


Lx Yu 2e55041 
Ollie Rutherfurd 3e854bb 
Lx Yu 2e55041 
Ollie Rutherfurd 3e854bb 
Marcos Araujo So… e1db6a9 
Łukasz Fidosz fe8a9f1 



Lx Yu b5fa5b0 







Marcos Araujo So… e1db6a9 
Marcos Araujo So… 6a1318d 
Ollie Rutherfurd 3e854bb 
Mike Bayer 380669a 
Marcos Araujo So… e1db6a9 
Lx Yu b5fa5b0 
Mike Bayer 17f2860 
"""
Redis Backends
------------------

Provides backends for talking to `Redis <http://redis.io>`_.

"""

from __future__ import absolute_import
from dogpile.cache.api import CacheBackend, NO_VALUE
from dogpile.cache.compat import pickle, u

redis = None

__all__ = 'RedisBackend',


class RedisBackend(CacheBackend):
    """A `Redis <http://redis.io/>`_ backend, using the
    `redis-py <http://pypi.python.org/pypi/redis/>`_ backend.

    Example configuration::

        from dogpile.cache import make_region

        region = make_region().configure(
            'dogpile.cache.redis',
            arguments = {
                'host': 'localhost',
                'port': 6379,
                'db': 0,
                'redis_expiration_time': 60*60*2,   # 2 hours
                'distributed_lock':True
                }
        )

    Arguments accepted in the arguments dictionary:

    :param url: string. If provided, will override separate host/port/db
     params.  The format is that accepted by ``StrictRedis.from_url()``.

     .. versionadded:: 0.4.1

    :param host: string, default is ``localhost``.

    :param password: string, default is no password.

     .. versionadded:: 0.4.1

    :param port: integer, default is ``6379``.

    :param db: integer, default is ``0``.

    :param redis_expiration_time: integer, number of seconds after setting
     a value that Redis should expire it.  This should be larger than dogpile's
     cache expiration.  By default no expiration is set.

    :param distributed_lock: boolean, when True, will use a
     redis-lock as the dogpile lock.
     Use this when multiple
     processes will be talking to the same redis instance.
     When left at False, dogpile will coordinate on a regular
     threading mutex.

    :param lock_timeout: integer, number of seconds after acquiring a lock that
     Redis should expire it.  This argument is only valid when
     ``distributed_lock`` is ``True``.

     .. versionadded:: 0.5.0

    :param lock_sleep: integer, number of seconds to sleep when failed to
     acquire a lock.  This argument is only valid when
     ``distributed_lock`` is ``True``.

     .. versionadded:: 0.5.0

    """

    def __init__(self, arguments):
        self._imports()
        self.url = arguments.pop('url', None)
        self.host = arguments.pop('host', 'localhost')
        self.password = arguments.pop('password', None)
        self.port = arguments.pop('port', 6379)
        self.db = arguments.pop('db', 0)
        self.distributed_lock = arguments.get('distributed_lock', False)

        self.lock_timeout = arguments.get('lock_timeout', None)
        self.lock_sleep = arguments.get('lock_sleep', 0.1)

        self.redis_expiration_time = arguments.pop('redis_expiration_time', 0)
        self.client = self._create_client()

    def _imports(self):
        # defer imports until backend is used
        global redis
        import redis

    def _create_client(self):
        if self.url is not None:
            return redis.StrictRedis.from_url(url=self.url)
        else:
            return redis.StrictRedis(host=self.host, password=self.password,
                                     port=self.port, db=self.db)

    def get_mutex(self, key):
        if self.distributed_lock:
            return self.client.lock(u('_lock{}').format(key), self.lock_timeout,
                                    self.lock_sleep)
        else:
            return None

    def get(self, key):
        value = self.client.get(key)
        if value is None:
            return NO_VALUE
        return pickle.loads(value)

    def get_multi(self, keys):
        values = self.client.mget(keys)
        return [pickle.loads(v) if v is not None else NO_VALUE
                  for v in values]

    def set(self, key, value):
        if self.redis_expiration_time:
            self.client.setex(key, self.redis_expiration_time,
                              pickle.dumps(value, pickle.HIGHEST_PROTOCOL))
        else:
            self.client.set(key, pickle.dumps(value, pickle.HIGHEST_PROTOCOL))

    def set_multi(self, mapping):
        mapping = dict(
            (k, pickle.dumps(v, pickle.HIGHEST_PROTOCOL))
            for k, v in mapping.items()
        )

        if not self.redis_expiration_time:
            self.client.mset(mapping)
        else:
            pipe = self.client.pipeline()
            for key, value in mapping.items():
                pipe.setex(key, self.redis_expiration_time, value)
            pipe.execute()

    def delete(self, key):
        self.client.delete(key)

    def delete_multi(self, keys):
        self.client.delete(*keys)