Commits

Mike Bayer committed b4b1b54

- fix up cache decorator, tests
- add backends for python-memcached, python-binary-memcached

Comments (0)

Files changed (14)

docs/build/api.rst

 Backends
 ==========
 
-Memory Backend
---------------
 .. automodule:: dogpile.cache.backends.memory
     :members:
 
-Memcached Backends
-------------------
-
 .. automodule:: dogpile.cache.backends.memcached
     :members:
 
-
 Plugins
 ========
 
-Mako Plugin
------------
-
 .. automodule:: dogpile.cache.plugins.mako_cache
     :members:
 

docs/build/usage.rst

         }
     )
 
-    @region.cache_on_arguments
+    @region.cache_on_arguments()
     def load_user_info(user_id):
         return some_database.lookup_user_by_id(user_id)
 
 Above, we create a :class:`.CacheRegion` using the :func:`.make_region` function, then
 apply the backend configuration via the :meth:`.CacheRegion.configure` method, which returns the 
-region.  The name of the backend is the only required argument,
-in this case ``dogpile.cache.pylibmc``.
+region.  The name of the backend is the only argument required by :meth:`.CacheRegion.configure`
+itself, in this case ``dogpile.cache.pylibmc``.  However, in this specific case, the ``pylibmc`` 
+backend also requires that the URL of the memcached server be passed within the ``arguments`` dictionary.
 
 The configuration is separated into two sections.  Upon construction via :func:`.make_region`,
 the :class:`.CacheRegion` object is available, typically at module
 pickle or similar can be used on the tuple - the "metadata" portion will always
 be a small and easily serializable Python structure.
 
-.. _mako_plugin:
-
-Mako Integration
-================
-
-dogpile.cache includes a `Mako <http://www.makotemplates.org>`_ plugin that replaces `Beaker <http://beaker.groovie.org>`_ 
-as the cache backend.
-Setup a Mako template lookup using the "dogpile.cache" cache implementation
-and a region dictionary::
-
-    from dogpile.cache import make_region
-    from mako.lookup import TemplateLookup
-
-    my_regions = {
-        "local":make_region(
-                    "dogpile.cache.dbm", 
-                    expiration_time=360,
-                    arguments={"filename":"file.dbm"}
-                ),
-        "memcached":make_region(
-                    "dogpile.cache.pylibmc", 
-                    expiration_time=3600,
-                    arguments={"url":["127.0.0.1"]}
-                )
-    }
-
-    mako_lookup = TemplateLookup(
-        directories=["/myapp/templates"],
-        cache_impl="dogpile.cache",
-        cache_args={
-            'regions':my_regions
-        }
-    )
-
-To use the above configuration in a template, use the ``cached=True`` argument on any
-Mako tag which accepts it, in conjunction with the name of the desired region
-as the ``cache_region`` argument::
-
-    <%def name="mysection()" cached=True cache_region="memcached">
-        some content that's cached
-    </%def>

dogpile/cache/api.py

 class CachedValue(tuple):
     """Represent a value stored in the cache.
     
-    :class:.`CachedValue` is a two-tuple of
+    :class:`.CachedValue` is a two-tuple of
     ``(payload, metadata)``, where ``metadata``
     is dogpile.cache's tracking information (
     currently the creation time).  The metadata
     
     """
     payload = property(operator.itemgetter(0))
+    """Named accessor for the payload."""
+
     metadata = property(operator.itemgetter(1))
+    """Named accessor for the dogpile.cache metadata dictionary."""
 
     def __new__(cls, payload, metadata):
         return tuple.__new__(cls, (payload, metadata))

dogpile/cache/backends/__init__.py

 
 register_backend("dogpile.cache.dbm", "dogpile.cache.backends.dbm", "DBMBackend")
 register_backend("dogpile.cache.pylibmc", "dogpile.cache.backends.memcached", "PylibmcBackend")
+register_backend("dogpile.cache.bmemcached", "dogpile.cache.backends.memcached", "BMemcachedBackend")
+register_backend("dogpile.cache.memcached", "dogpile.cache.backends.memcached", "MemcachedBackend")
 register_backend("dogpile.cache.memory", "dogpile.cache.backends.memory", "MemoryBackend")

dogpile/cache/backends/memcached.py

-"""Provides backends for talking to memcached."""
+"""
+Memcached Backends
+------------------
+
+Provides backends for talking to memcached.
+
+"""
 
 from dogpile.cache.api import CacheBackend, NO_VALUE
 from dogpile.cache import util
 
 class MemcachedLock(object):
     """Simple distributed lock using memcached.
-    
+
     This is an adaptation of the lock featured at
     http://amix.dk/blog/post/19386
-    
+
     """
 
     def __init__(self, client_fn, key):
         client = self.client_fn()
         client.delete(self.key)
 
-class PylibmcBackend(CacheBackend):
-    """A backend for the 
-    `pylibmc <http://sendapatch.se/projects/pylibmc/index.html>`_ 
-    memcached client.
-    
-    A configuration illustrating several of the optional
-    arguments described in the pylibmc documentation::
-    
-        from dogpile.cache import make_region
+class GenericMemcachedBackend(CacheBackend):
+    """Base class for memcached backends.
 
-        region = make_region().configure(
-            'dogpile.cache.pylibmc',
-            expiration_time = 3600,
-            arguments = {
-                'url':["127.0.0.1"],
-                'binary':True,
-                'behaviors':{"tcp_nodelay": True,"ketama":True}
-            }
-        )
-    
-    Arguments which can be passed to the ``arguments`` 
-    dictionary include:
-    
+    This base class accepts a number of paramters
+    common to all backends.
+
     :param url: the string URL to connect to.  Can be a single
      string or a list of strings.  This is the only argument
      that's required.
     :param distributed_lock: boolean, when True, will use a
-     memcached-lock as the dogpile lock (see :class:`.MemcachedLock`).   
+     memcached-lock as the dogpile lock (see :class:`.MemcachedLock`).
      Use this when multiple
      processes will be talking to the same memcached instance.
      When left at False, dogpile will coordinate on a regular
-     threading mutex.  
-    :param binary: sets the ``binary`` flag understood by
-     ``pylibmc.Client``.
-    :param behaviors: a dictionary which will be passed to
-     ``pylibmc.Client`` as the ``behaviors`` parameter.
+     threading mutex.
     :param memcached_expire_time: integer, when present will
      be passed as the ``time`` parameter to ``pylibmc.Client.set``.
      This is used to set the memcached expiry time for a value.
-     
+
      .. note::
 
          This parameter is **different** from Dogpile's own 
          forcing all threads to wait for a regeneration each time 
          a value expires.
 
-    :param min_compres_len: Integer, will be passed as the 
-     ``min_compress_len`` parameter to the ``pylibmc.Client.set``
-     method.
-     
-    The :class:`.PylibmcBackend` uses a ``threading.local()``
-    object to store individual ``pylibmc.Client`` objects per thread.
-    ``threading.local()`` has the advantage over pylibmc's built-in
-    thread pool in that it automatically discards objects associated
-    with a particular thread when that thread ends.
-    
+    The :class:`.GenericMemachedBackend` uses a ``threading.local()``
+    object to store individual client objects per thread,
+    as most modern memcached clients do not appear to be inherently
+    threadsafe.
+
+    In particular, ``threading.local()`` has the advantage over pylibmc's 
+    built-in thread pool in that it automatically discards objects 
+    associated with a particular thread when that thread ends.
+
     """
 
+    set_arguments = {}
+    """Additional arguments which will be passed
+    to the :meth:`set` method."""
+
     def __init__(self, arguments):
         self._imports()
-        self.url = util.to_list(arguments['url'])
-        self.binary = arguments.get('binary', False)
-        self.distributed_lock = arguments.get('distributed_lock', False)
-        self.behaviors = arguments.get('behaviors', {})
-        self.memcached_expire_time = arguments.get(
-                                        'memcached_expire_time', 0)
-        self.min_compress_len = arguments.get('min_compress_len', 0)
-
-        self._pylibmc_set_args = {}
-        if "memcached_expire_time" in arguments:
-            self._pylibmc_set_args["time"] = \
-                            arguments["memcached_expire_time"]
-        if "min_compress_len" in arguments:
-            self._pylibmc_set_args["min_compress_len"] = \
-                            arguments["min_compress_len"]
-        backend = self
-
         # using a plain threading.local here.   threading.local
         # automatically deletes the __dict__ when a thread ends,
         # so the idea is that this is superior to pylibmc's
         # own ThreadMappedPool which doesn't handle this 
         # automatically.
+        self.url = util.to_list(arguments['url'])
+        self.distributed_lock = arguments.get('distributed_lock', False)
+        self.memcached_expire_time = arguments.get(
+                                        'memcached_expire_time', 0)
+
+        backend = self
         class ClientPool(util.threading.local):
             def __init__(self):
                 self.memcached = backend._create_client()
 
         self._clients = ClientPool()
 
+
+    def _imports(self):
+        """client library imports go here."""
+        raise NotImplementedError()
+
+    def _create_client(self):
+        """Creation of a Client instance goes here."""
+        raise NotImplementedError()
+
+    @property
+    def client(self):
+        """Return the memcached client.
+
+        This uses a threading.local by
+        default as it appears most modern
+        memcached libs aren't inherently
+        threadsafe.
+
+        """
+        return self._clients.memcached
+
     def get_mutex(self, key):
         if self.distributed_lock:
-            return MemcachedLock(lambda: self._clients.memcached, key)
+            return MemcachedLock(lambda: self.client, key)
         else:
             return None
 
+    def get(self, key):
+        value = self.client.get(key)
+        if value is None:
+            return NO_VALUE
+        else:
+            return value
+
+    def set(self, key, value):
+        self.client.set(key, 
+                            value, 
+                            **self.set_arguments
+                        )
+
+    def delete(self, key):
+        self.client.delete(key)
+
+class PylibmcBackend(GenericMemcachedBackend):
+    """A backend for the 
+    `pylibmc <http://sendapatch.se/projects/pylibmc/index.html>`_ 
+    memcached client.
+
+    A configuration illustrating several of the optional
+    arguments described in the pylibmc documentation::
+
+        from dogpile.cache import make_region
+
+        region = make_region().configure(
+            'dogpile.cache.pylibmc',
+            expiration_time = 3600,
+            arguments = {
+                'url':["127.0.0.1"],
+                'binary':True,
+                'behaviors':{"tcp_nodelay": True,"ketama":True}
+            }
+        )
+
+    Arguments accepted here include those of 
+    :class:`.GenericMemcachedBackend`, as well as 
+    those below.
+
+    :param binary: sets the ``binary`` flag understood by
+     ``pylibmc.Client``.
+    :param behaviors: a dictionary which will be passed to
+     ``pylibmc.Client`` as the ``behaviors`` parameter.
+    :param min_compres_len: Integer, will be passed as the 
+     ``min_compress_len`` parameter to the ``pylibmc.Client.set``
+     method.
+
+    """
+
+    def __init__(self, arguments):
+        self.binary = arguments.get('binary', False)
+        self.behaviors = arguments.get('behaviors', {})
+        self.min_compress_len = arguments.get('min_compress_len', 0)
+
+        self.set_arguments = {}
+        if "memcached_expire_time" in arguments:
+            self.set_arguments["time"] = \
+                            arguments["memcached_expire_time"]
+        if "min_compress_len" in arguments:
+            self.set_arguments["min_compress_len"] = \
+                            arguments["min_compress_len"]
+        super(PylibmcBackend, self).__init__(arguments)
+
+
     def _imports(self):
         global pylibmc
         import pylibmc
                         behaviors=self.behaviors
                     )
 
-    def get(self, key):
-        value = self._clients.memcached.get(key)
-        if value is None:
-            return NO_VALUE
-        else:
-            return value
+class MemcachedBackend(GenericMemcachedBackend):
+    """A backend using the standard `Python-memcached <http://www.tummy.com/Community/software/python-memcached/>`_
+    library.
+    
+    Example::
+    
+        from dogpile.cache import make_region
 
-    def set(self, key, value):
-        self._clients.memcached.set(
-                                    key, 
-                                    value, 
-                                    **self._pylibmc_set_args
-                                )
+        region = make_region().configure(
+            'dogpile.cache.memcached',
+            expiration_time = 3600,
+            arguments = {
+                'url':"127.0.0.1:11211"
+            }
+        )
 
-    def delete(self, key):
-        self._clients.memcached.delete(key)
+    """
+    def _imports(self):
+        global memcache
+        import memcache
+
+    def _create_client(self):
+        return memcache.Client(self.url)
+
+class BMemcachedBackend(GenericMemcachedBackend):
+    """A backend for the 
+    `python-binary-memcached <https://github.com/jaysonsantos/python-binary-memcached>`_ 
+    memcached client.
+
+    This is a pure Python memcached client which
+    includes the ability to authenticate with a memcached
+    server using SASL.
+
+    A typical configuration using username/password::
+
+        from dogpile.cache import make_region
+
+        region = make_region().configure(
+            'dogpile.cache.bmemcached',
+            expiration_time = 3600,
+            arguments = {
+                'url':["127.0.0.1"],
+                'username':'scott',
+                'password':'tiger'
+            }
+        )
+
+    Arguments which can be passed to the ``arguments`` 
+    dictionary include:
+
+    :param username: optional username, will be used for 
+     SASL authentication.
+    :param password: optional password, will be used for
+     SASL authentication.
+
+    """
+    def __init__(self, arguments):
+        self.username = arguments.get('username', None)
+        self.password = arguments.get('password', None)
+        super(BMemcachedBackend, self).__init__(arguments)
+
+    def _imports(self):
+        global bmemcached
+        import bmemcached
+
+        class RepairBMemcachedAPI(bmemcached.Client):
+            """Repairs BMemcached's non-standard method 
+            signatures.
+
+            """
+
+            def add(self, key, value):
+                try:
+                    super(RepairBMemcachedAPI, self).add(key, value)
+                    return True
+                except ValueError:
+                    return False
+
+        self.Client = RepairBMemcachedAPI
+
+    def _create_client(self):
+        return self.Client(self.url, 
+                        username=self.username,
+                        password=self.password
+                    )

dogpile/cache/backends/memory.py

-"""Provides a simple dictionary-based backend."""
+"""
+Memory Backend
+--------------
+
+Provides a simple dictionary-based backend.
+
+"""
 
 from dogpile.cache.api import CacheBackend, NO_VALUE
 

dogpile/cache/plugins/mako_cache.py

-"""Implements dogpile caching for Mako templates.
+"""
+Mako Integration
+----------------
 
-See the section :ref:`mako_plugin` for examples.
+dogpile.cache includes a `Mako <http://www.makotemplates.org>`_ plugin that replaces `Beaker <http://beaker.groovie.org>`_ 
+as the cache backend.
+Setup a Mako template lookup using the "dogpile.cache" cache implementation
+and a region dictionary::
+
+    from dogpile.cache import make_region
+    from mako.lookup import TemplateLookup
+
+    my_regions = {
+        "local":make_region(
+                    "dogpile.cache.dbm", 
+                    expiration_time=360,
+                    arguments={"filename":"file.dbm"}
+                ),
+        "memcached":make_region(
+                    "dogpile.cache.pylibmc", 
+                    expiration_time=3600,
+                    arguments={"url":["127.0.0.1"]}
+                )
+    }
+
+    mako_lookup = TemplateLookup(
+        directories=["/myapp/templates"],
+        cache_impl="dogpile.cache",
+        cache_args={
+            'regions':my_regions
+        }
+    )
+
+To use the above configuration in a template, use the ``cached=True`` argument on any
+Mako tag which accepts it, in conjunction with the name of the desired region
+as the ``cache_region`` argument::
+
+    <%def name="mysection()" cached=True cache_region="memcached">
+        some content that's cached
+    </%def>
+
 
 """
 from mako.cache import CacheImpl

dogpile/cache/region.py

     memoized_property
 from dogpile.cache.api import NO_VALUE, CachedValue
 import time
+from functools import wraps
 
 _backend_loader = PluginLoader("dogpile.cache")
 register_backend = _backend_loader.register
             self.key_mangler = self.backend.key_mangler
         return self
 
-    def _create_dogpile(self, identifier):
+    def _create_dogpile(self, identifier, expiration_time):
+        if expiration_time is None:
+            expiration_time = self.expiration_time
         return Dogpile(
-                self.expiration_time, 
+                expiration_time, 
                 lock=self.backend.get_mutex(identifier)
             )
 
         value = self.backend.get(key)
         return value.payload
 
-    def get_or_create(self, key, creator):
+    def get_or_create(self, key, creator, expiration_time=None):
         """Similar to ``get``, will use the given "creation" 
         function to create a new
         value if the value does not exist.
         expiration mechanism to determine when/how 
         the creation function is called.
 
+        :param key: Key to retrieve
+        :param creator: function which creates a new value.
+        :expiration_time: optional expiration time which will overide
+         the expiration time already configured on this :class:`.CacheRegion`
+         if not None.   To set no expiration, use the value -1.
+
         """
         if self.key_mangler:
             key = self.key_mangler(key)
             self.backend.set(key, value)
             return value.payload, value.metadata["creation_time"]
 
-        dogpile = self.dogpile_registry.get(key)
+        dogpile = self.dogpile_registry.get(key, expiration_time)
         with dogpile.acquire(gen_value, 
                     value_and_created_fn=get_value) as value:
             return value
 
         self.backend.delete(key)
 
-    def cache_on_arguments(self, fn):
+    def cache_on_arguments(self, namespace=None, expiration_time=None):
         """A function decorator that will cache the return 
         value of the function using a key derived from the 
-        name of the function, its location within the 
-        application (i.e. source filename) as well as the
-        arguments passed to the function.
+        function itself and its arguments.
         
         E.g.::
         
-            @someregion.cache_on_arguments
+            @someregion.cache_on_arguments()
             def generate_something(x, y):
                 return somedatabase.query(x, y)
                 
         
             generate_something.invalidate(5, 6)
 
-        The generation of the key from the function is the big 
-        controversial thing that was a source of user issues with 
-        Beaker. Dogpile provides the latest and greatest algorithm 
-        used by Beaker, but also allows you to use whatever function 
-        you want, by specifying it to using the ``function_key_generator`` 
-        argument to :func:`.make_region` and/or
+        The default key generation will use the name
+        of the function, the module name for the function,
+        the arguments passed, as well as an optional "namespace"
+        parameter in order to generate a cache key.
+        
+        Given a function ``one`` inside the module
+        ``myapp.tools``::
+        
+            @region.cache_on_arguments(namespace="foo")
+            def one(a, b):
+                return a + b
+
+        Above, calling ``one(3, 4)`` will produce a
+        cache key as follows::
+        
+            myapp.tools:one|foo|3, 4
+        
+        The key generator will ignore an initial argument
+        of ``self`` or ``cls``, making the decorator suitable
+        (with caveats) for use with instance or class methods.
+        Given the example::
+        
+            class MyClass(object):
+                @region.cache_on_arguments(namespace="foo")
+                def one(self, a, b):
+                    return a + b
+
+        The cache key above for ``MyClass().one(3, 4)`` will 
+        again produce the same cache key of ``myapp.tools:one|foo|3, 4`` -
+        the name ``self`` is skipped.
+        
+        The ``namespace`` parameter is optional, and is used
+        normally to disambiguate two functions of the same
+        name within the same module, as can occur when decorating
+        instance or class methods as below::
+            
+            class MyClass(object):
+                @region.cache_on_arguments(namespace='MC')
+                def somemethod(self, x, y):
+                    ""
+
+            class MyOtherClass(object):
+                @region.cache_on_arguments(namespace='MOC')
+                def somemethod(self, x, y):
+                    ""
+                    
+        Above, the ``namespace`` parameter disambiguates
+        between ``somemethod`` on ``MyClass`` and ``MyOtherClass``.
+        Python class declaration mechanics otherwise prevent
+        the decorator from having awareness of the ``MyClass``
+        and ``MyOtherClass`` names, as the function is received
+        by the decorator before it becomes an instance method.
+
+        The function key generation can be entirely replaced
+        on a per-region basis using the ``function_key_generator``
+        argument present on :func:`.make_region` and
         :class:`.CacheRegion`. If defaults to 
         :func:`.function_key_generator`.
 
+        :param namespace: optional string argument which will be
+         established as part of the cache key.   This may be needed
+         to disambiguate functions of the same name within the same
+         source file, such as those
+         associated with classes - note that the decorator itself 
+         can't see the parent class on a function as the class is
+         being declared.
+        :param expiration_time: if not None, will override the normal
+         expiration time.
         """
-        key_generator = self.function_key_generator(fn)
-        def decorate(*arg, **kw):
-            key = key_generator(*arg, **kw)
-            def creator():
-                return fn(*arg, **kw)
-            return self.get_or_create(key, creator)
+        def decorator(fn):
+            key_generator = self.function_key_generator(namespace, fn)
+            @wraps(fn)
+            def decorate(*arg, **kw):
+                key = key_generator(*arg, **kw)
+                def creator():
+                    return fn(*arg, **kw)
+                return self.get_or_create(key, creator, expiration_time)
 
-        def invalidate(*arg, **kw):
-            key = key_generator(*arg, **kw)
-            self.delete(key)
+            def invalidate(*arg, **kw):
+                key = key_generator(*arg, **kw)
+                self.delete(key)
 
-        decorate.invalidate = invalidate
+            decorate.invalidate = invalidate
 
-        return decorate
+            return decorate
+        return decorator
 
 def make_region(*arg, **kw):
     """Instantiate a new :class:`.CacheRegion`.

dogpile/cache/util.py

         self.impls[name] = load
 
 
-def function_key_generator(fn):
+def function_key_generator(namespace, fn):
     """Return a function that generates a string
     key, based on a given function as well as
     arguments to the returned function itself.
     
     """
 
-    kls = None
-    if hasattr(fn, 'im_func'):
-        kls = fn.im_class
-        fn = fn.im_func
-
-    if kls:
-        namespace = '%s.%s' % (kls.__module__, kls.__name__)
+    if namespace is None:
+        namespace = '%s:%s' % (fn.__module__, fn.__name__)
     else:
-        namespace = '%s|%s' % (inspect.getsourcefile(fn), fn.__name__)
+        namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace)
 
     args = inspect.getargspec(fn)
     has_self = args[0] and args[0][0] in ('self', 'cls')

tests/_fixtures.py

         self._backend_inst = backend_cls(_config_args.get('arguments', {}))
         return self._backend_inst
 
+class _GenericBackendTest(_GenericBackendFixture, TestCase):
     def tearDown(self):
         if self._region_inst:
             self._region_inst.delete("some key")
         elif self._backend_inst:
             self._backend_inst.delete("some_key")
 
-class _GenericBackendTest(_GenericBackendFixture, TestCase):
     def test_backend_get_nothing(self):
         backend = self._backend()
         eq_(backend.get("some_key"), NO_VALUE)

tests/test_decorator.py

+from tests._fixtures import _GenericBackendFixture
+from tests import eq_
+from unittest import TestCase
+import time
+from dogpile.cache import util
+import inspect
+
+class DecoratorTest(_GenericBackendFixture, TestCase):
+    backend = "dogpile.cache.memory"
+
+    def _fixture(self, namespace=None, expiration_time=None):
+        reg = self._region(config_args={"expiration_time":.25})
+
+        counter = [0]
+        @reg.cache_on_arguments(namespace=namespace, 
+                            expiration_time=expiration_time)
+        def go(a, b):
+            counter[0] +=1
+            return counter[0], a, b
+        return go
+
+    def test_decorator(self):
+        go = self._fixture()
+        eq_(go(1, 2), (1, 1, 2))
+        eq_(go(3, 4), (2, 3, 4))
+        eq_(go(1, 2), (1, 1, 2))
+        time.sleep(.3)
+        eq_(go(1, 2), (3, 1, 2))
+
+    def test_decorator_namespace(self):
+        # TODO: test the namespace actually
+        # working somehow...
+        go = self._fixture(namespace="x")
+        eq_(go(1, 2), (1, 1, 2))
+        eq_(go(3, 4), (2, 3, 4))
+        eq_(go(1, 2), (1, 1, 2))
+        time.sleep(.3)
+        eq_(go(1, 2), (3, 1, 2))
+
+    def test_decorator_custom_expire(self):
+        go = self._fixture(expiration_time=.5)
+        eq_(go(1, 2), (1, 1, 2))
+        eq_(go(3, 4), (2, 3, 4))
+        eq_(go(1, 2), (1, 1, 2))
+        time.sleep(.3)
+        eq_(go(1, 2), (1, 1, 2))
+        time.sleep(.3)
+        eq_(go(1, 2), (3, 1, 2))
+
+    def test_explicit_expire(self):
+        go = self._fixture(expiration_time=1)
+        eq_(go(1, 2), (1, 1, 2))
+        eq_(go(3, 4), (2, 3, 4))
+        eq_(go(1, 2), (1, 1, 2))
+        go.invalidate(1, 2)
+        eq_(go(1, 2), (3, 1, 2))
+
+class KeyGenerationTest(TestCase):
+    def _keygen_decorator(self, namespace=None):
+        canary = []
+        def decorate(fn):
+            canary.append(util.function_key_generator(namespace, fn))
+            return fn
+        return decorate, canary
+
+    def test_keygen_fn(self):
+        decorate, canary = self._keygen_decorator()
+
+        @decorate
+        def one(a, b):
+            pass
+        gen = canary[0]
+
+        eq_(gen(1, 2), "tests.test_decorator:one|1 2")
+        eq_(gen(None, 5), "tests.test_decorator:one|None 5")
+
+    def test_keygen_fn_namespace(self):
+        decorate, canary = self._keygen_decorator("mynamespace")
+
+        @decorate
+        def one(a, b):
+            pass
+        gen = canary[0]
+
+        eq_(gen(1, 2), "tests.test_decorator:one|mynamespace|1 2")
+        eq_(gen(None, 5), "tests.test_decorator:one|mynamespace|None 5")
+
+

tests/test_memcached_backend.py

+from tests._fixtures import _GenericBackendTest, _GenericMutexTest
+from tests import eq_
+from unittest import TestCase
+from threading import Thread
+import time
+
+class _NonDistributedMemcachedTest(_GenericBackendTest):
+    region_args = {
+        "key_mangler":lambda x: x.replace(" ", "_")
+    }
+    config_args = {
+        "arguments":{
+            "url":"127.0.0.1:11211"
+        }
+    }
+
+class _DistributedMemcachedTest(_GenericBackendTest):
+    region_args = {
+        "key_mangler":lambda x: x.replace(" ", "_")
+    }
+    config_args = {
+        "arguments":{
+            "url":"127.0.0.1:11211",
+            "distributed_lock":True
+        }
+    }
+
+class _DistributedMemcachedMutexTest(_GenericMutexTest):
+    config_args = {
+        "arguments":{
+            "url":"127.0.0.1:11211",
+            "distributed_lock":True
+        }
+    }
+
+class PylibmcTest(_NonDistributedMemcachedTest):
+    backend = "dogpile.cache.pylibmc"
+
+class PylibmcDistributedTest(_DistributedMemcachedTest):
+    backend = "dogpile.cache.pylibmc"
+
+class PylibmcDistributedMutexTest(_DistributedMemcachedMutexTest):
+    backend = "dogpile.cache.pylibmc"
+
+class BMemcachedTest(_NonDistributedMemcachedTest):
+    backend = "dogpile.cache.bmemcached"
+
+class BMemcachedDistributedTest(_DistributedMemcachedTest):
+    backend = "dogpile.cache.bmemcached"
+
+class BMemcachedDistributedMutexTest(_DistributedMemcachedMutexTest):
+    backend = "dogpile.cache.bmemcached"
+
+class MemcachedTest(_NonDistributedMemcachedTest):
+    backend = "dogpile.cache.memcached"
+
+class MemcachedDistributedTest(_DistributedMemcachedTest):
+    backend = "dogpile.cache.memcached"
+
+class MemcachedDistributedMutexTest(_DistributedMemcachedMutexTest):
+    backend = "dogpile.cache.memcached"
+
+
+from dogpile.cache.backends.memcached import GenericMemcachedBackend
+from dogpile.cache.backends.memcached import PylibmcBackend
+class MockMemcachedBackend(GenericMemcachedBackend):
+    def _imports(self):
+        pass
+
+    def _create_client(self):
+        return MockClient(self.url)
+
+class MockPylibmcBackend(PylibmcBackend):
+    def _imports(self):
+        pass
+
+    def _create_client(self):
+        return MockClient(self.url, 
+                        binary=self.binary,
+                        behaviors=self.behaviors
+                    )
+
+class MockClient(object):
+    number_of_clients = 0
+
+    def __init__(self, *arg, **kw):
+        self.arg = arg
+        self.kw = kw
+        self.canary = []
+        self._cache = {}
+        MockClient.number_of_clients += 1
+
+    def get(self, key):
+        return self._cache.get(key)
+    def set(self, key, value, **kw):
+        self.canary.append(kw)
+        self._cache[key] = value
+    def delete(self, key):
+        self._cache.pop(key, None)
+    def __del__(self):
+        MockClient.number_of_clients -= 1
+
+class PylibmcArgsTest(TestCase):
+    def test_binary_flag(self):
+        backend = MockPylibmcBackend(arguments={'url':'foo','binary':True})
+        eq_(backend._create_client().kw["binary"], True)
+
+    def test_url_list(self):
+        backend = MockPylibmcBackend(arguments={'url':["a", "b", "c"]})
+        eq_(backend._create_client().arg[0], ["a", "b", "c"])
+
+    def test_url_scalar(self):
+        backend = MockPylibmcBackend(arguments={'url':"foo"})
+        eq_(backend._create_client().arg[0], ["foo"])
+
+    def test_behaviors(self):
+        backend = MockPylibmcBackend(arguments={'url':"foo", 
+                                    "behaviors":{"q":"p"}})
+        eq_(backend._create_client().kw["behaviors"], {"q": "p"})
+
+    def test_set_time(self):
+        backend = MockPylibmcBackend(arguments={'url':"foo", 
+                                "memcached_expire_time":20})
+        backend.set("foo", "bar")
+        eq_(backend._clients.memcached.canary, [{"time":20}])
+
+    def test_set_min_compress_len(self):
+        backend = MockPylibmcBackend(arguments={'url':"foo", 
+                                "min_compress_len":20})
+        backend.set("foo", "bar")
+        eq_(backend._clients.memcached.canary, [{"min_compress_len":20}])
+
+    def test_no_set_args(self):
+        backend = MockPylibmcBackend(arguments={'url':"foo"})
+        backend.set("foo", "bar")
+        eq_(backend._clients.memcached.canary, [{}])
+
+class LocalThreadTest(TestCase):
+    def setUp(self):
+        import gc
+        gc.collect()
+        eq_(MockClient.number_of_clients, 0)
+
+    def test_client_cleanup_1(self):
+        self._test_client_cleanup(1)
+
+    def test_client_cleanup_3(self):
+        self._test_client_cleanup(3)
+
+    def test_client_cleanup_10(self):
+        self._test_client_cleanup(10)
+
+    def _test_client_cleanup(self, count):
+        backend = MockMemcachedBackend(arguments={'url':'foo'})
+        canary = []
+
+        def f():
+            backend._clients.memcached
+            canary.append(MockClient.number_of_clients)
+            time.sleep(.05)
+
+        threads = [Thread(target=f) for i in xrange(count)]
+        for t in threads:
+            t.start()
+        for t in threads:
+            t.join()
+        eq_(canary, [i + 2 for i in xrange(count)])
+        eq_(MockClient.number_of_clients, 1)
+
+

tests/test_pylibmc_backend.py

-from tests._fixtures import _GenericBackendTest, _GenericMutexTest
-from tests import eq_
-from unittest import TestCase
-from threading import Thread
-import time
-
-class PylibmcTest(_GenericBackendTest):
-    backend = "dogpile.cache.pylibmc"
-
-    region_args = {
-        "key_mangler":lambda x: x.replace(" ", "_")
-    }
-    config_args = {
-        "arguments":{
-            "url":"127.0.0.1"
-        }
-    }
-
-class PylibmcDistributedTest(_GenericBackendTest):
-    backend = "dogpile.cache.pylibmc"
-
-    region_args = {
-        "key_mangler":lambda x: x.replace(" ", "_")
-    }
-    config_args = {
-        "arguments":{
-            "url":"127.0.0.1",
-            "distributed_lock":True
-        }
-    }
-
-class PylibmcDistributedMutexTest(_GenericMutexTest):
-    backend = "dogpile.cache.pylibmc"
-
-    config_args = {
-        "arguments":{
-            "url":"127.0.0.1",
-            "distributed_lock":True
-        }
-    }
-
-from dogpile.cache.backends.memcached import PylibmcBackend
-class MockPylibmcBackend(PylibmcBackend):
-    def _imports(self):
-        pass
-
-    def _create_client(self):
-        return MockClient(self.url, 
-                        binary=self.binary,
-                        behaviors=self.behaviors
-                    )
-
-class MockClient(object):
-    number_of_clients = 0
-
-    def __init__(self, *arg, **kw):
-        self.arg = arg
-        self.kw = kw
-        self.canary = []
-        self._cache = {}
-        MockClient.number_of_clients += 1
-
-    def get(self, key):
-        return self._cache.get(key)
-    def set(self, key, value, **kw):
-        self.canary.append(kw)
-        self._cache[key] = value
-    def delete(self, key):
-        self._cache.pop(key, None)
-    def __del__(self):
-        MockClient.number_of_clients -= 1
-
-class PylibmcArgsTest(TestCase):
-    def test_binary_flag(self):
-        backend = MockPylibmcBackend(arguments={'url':'foo','binary':True})
-        eq_(backend._create_client().kw["binary"], True)
-
-    def test_url_list(self):
-        backend = MockPylibmcBackend(arguments={'url':["a", "b", "c"]})
-        eq_(backend._create_client().arg[0], ["a", "b", "c"])
-
-    def test_url_scalar(self):
-        backend = MockPylibmcBackend(arguments={'url':"foo"})
-        eq_(backend._create_client().arg[0], ["foo"])
-
-    def test_behaviors(self):
-        backend = MockPylibmcBackend(arguments={'url':"foo", 
-                                    "behaviors":{"q":"p"}})
-        eq_(backend._create_client().kw["behaviors"], {"q": "p"})
-
-    def test_set_time(self):
-        backend = MockPylibmcBackend(arguments={'url':"foo", 
-                                "memcached_expire_time":20})
-        backend.set("foo", "bar")
-        eq_(backend._clients.memcached.canary, [{"time":20}])
-
-    def test_set_min_compress_len(self):
-        backend = MockPylibmcBackend(arguments={'url':"foo", 
-                                "min_compress_len":20})
-        backend.set("foo", "bar")
-        eq_(backend._clients.memcached.canary, [{"min_compress_len":20}])
-
-    def test_no_set_args(self):
-        backend = MockPylibmcBackend(arguments={'url':"foo"})
-        backend.set("foo", "bar")
-        eq_(backend._clients.memcached.canary, [{}])
-
-class PylibmcThreadTest(TestCase):
-    def setUp(self):
-        import gc
-        gc.collect()
-        eq_(MockClient.number_of_clients, 0)
-
-    def test_client_cleanup_1(self):
-        self._test_client_cleanup(1)
-
-    def test_client_cleanup_3(self):
-        self._test_client_cleanup(3)
-
-    def test_client_cleanup_10(self):
-        self._test_client_cleanup(10)
-
-    def _test_client_cleanup(self, count):
-        backend = MockPylibmcBackend(arguments={'url':'foo','binary':True})
-        canary = []
-
-        def f():
-            backend._clients.memcached
-            canary.append(MockClient.number_of_clients)
-            time.sleep(.05)
-
-        threads = [Thread(target=f) for i in xrange(count)]
-        for t in threads:
-            t.start()
-        for t in threads:
-            t.join()
-        eq_(canary, [i + 2 for i in xrange(count)])
-        eq_(MockClient.number_of_clients, 1)
-
-

tests/test_region.py

         eq_(reg.get_or_create("some key", creator), "some value 2")
         eq_(reg.get("some key"), "some value 2")
 
+    def test_expire_override(self):
+        reg = self._region(config_args={"expiration_time":5})
+        counter = itertools.count(1)
+        def creator():
+            return "some value %d" % next(counter)
+        eq_(reg.get_or_create("some key", creator, expiration_time=1), 
+                    "some value 1")
+        time.sleep(1)
+        eq_(reg.get("some key"), "some value 1")
+        eq_(reg.get_or_create("some key", creator, expiration_time=1), 
+                    "some value 2")
+        eq_(reg.get("some key"), "some value 2")
+
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.