Mike Bayer avatar Mike Bayer committed 8a6571a

- implement pylibmc backend
- other updates

Comments (0)

Files changed (13)

 \.coverage
 \.DS_Store
 test.cfg
+.venv
+0.1.0
+=====
+Initial release.
+
+Includes a pylibmc backend and a plain dictionary backend.

docs/build/api.rst

 Backend API
 =============
 
+See the section :ref:`creating_backends` for details on how to
+register new backends.
+
 .. automodule:: dogpile.cache.api
     :members:
 
 Backends
 ==========
 
-.. automodule:: dogpile.cache.backends.dbm
+Memory Backend
+--------------
+.. automodule:: dogpile.cache.backends.memory
     :members:
 
+Pylibmc Backend
+---------------
+
 .. automodule:: dogpile.cache.backends.memcached
     :members:
 
 Plugins
 ========
 
+Mako Plugin
+-----------
+
 .. automodule:: dogpile.cache.plugins.mako_cache
     :members:

docs/build/usage.rst

 .. automethod:: dogpile.cache.region.CacheRegion.cache_on_arguments
     :noindex:
 
-Backends
-========
+.. _creating_backends:
+
+Creating Backends
+=================
 
 Backends are located using the setuptools entrypoint system.  To make life easier
 for writers of ad-hoc backends, a helper function is included which registers any

dogpile/cache/api.py

     def __new__(cls, payload, metadata):
         return tuple.__new__(cls, (payload, metadata))
 
+    def __reduce__(self):
+        return CachedValue, (self.payload, self.metadata)
+
 class CacheBackend(object):
     """Base class for backend implementations."""
 

dogpile/cache/backends/memcached.py

+"""Provides backends for talking to memcached."""
+
 from dogpile.cache.api import CacheBackend, CachedValue, NO_VALUE
+from dogpile.cache import util
 
 class PylibmcBackend(CacheBackend):
-    pass
+    """A backend for the `pylibmc <http://sendapatch.se/projects/pylibmc/index.html>`_ 
+    memcached client.
+    
+    E.g.::
+    
+        from dogpile.cache import make_region
+
+        region = make_region().configure(
+            'dogpile.cache.pylibmc',
+            expiration_time = 3600,
+            arguments = {
+                'url':["127.0.0.1"],
+                'binary':True,
+                'behaviors':{"tcp_nodelay": True,"ketama":True}
+            }
+        )
+    
+    Arguments which can be passed to the ``arguments`` 
+    dictionary include:
+    
+    :param url: the string URL to connect to.  Can be a single
+     string or a list of strings.
+    :param binary: sets the ``binary`` flag understood by
+     ``pylibmc.Client``.
+    :param behaviors: a dictionary which will be passed to
+     ``pylibmc.Client`` as the ``behaviors`` parameter.
+    :param memcached_expire_time: integer, when present will
+     be passed as the ``time`` parameter to ``pylibmc.Client.set``.
+     This is used to set the memcached expiry time for a value.
+     
+     Note that this is **different** from Dogpile's own 
+     ``expiration_time``, which is the number of seconds after
+     which Dogpile will consider the value to be expired, however
+     Dogpile **will continue to use this value** until a new
+     one can be generated, when using :meth:`.CacheRegion.get_or_create`.
+     Therefore, if you are setting ``memcached_expire_time``, you'll
+     usually want to make sure it is greater than ``expiration_time`` 
+     by at least enough seconds for new values to be generated.
+    :param min_compres_len: Integer, will be passed as the 
+     ``min_compress_len`` parameter to the ``pylibmc.Client.set``
+     method.
+     
+    Threading
+    ---------
+    
+    The :class:`.PylibmcBackend` uses a ``threading.local()``
+    object to store individual ``pylibmc.Client`` objects per thread.
+    ``threading.local()`` has the advantage over pylibmc's built-in
+    thread pool in that it automatically discards objects associated
+    with a particular thread when that thread ends.
+    
+    """
+
+    def __init__(self, arguments):
+        self._imports()
+        self.url = util.to_list(arguments['url'])
+        self.binary = arguments.get('binary', False)
+        self.behaviors = arguments.get('behaviors', {})
+        self.memcached_expire_time = arguments.get(
+                                        'memcached_expire_time', 0)
+        self.min_compress_len = arguments.get('min_compress_len', 0)
+
+        self._pylibmc_set_args = {}
+        if "memcached_expire_time" in arguments:
+            self._pylibmc_set_args["time"] = \
+                            arguments["memcached_expire_time"]
+        if "min_compress_len" in arguments:
+            self._pylibmc_set_args["min_compress_len"] = \
+                            arguments["min_compress_len"]
+        backend = self
+
+        # using a plain threading.local here.   threading.local
+        # automatically deletes the __dict__ when a thread ends,
+        # so the idea is that this is superior to pylibmc's
+        # own ThreadMappedPool which doesn't handle this 
+        # automatically.
+        class ClientPool(util.threading.local):
+            def __init__(self):
+                self.memcached = backend._create_client()
+
+        self._clients = ClientPool()
+
+    def _imports(self):
+        global pylibmc
+        import pylibmc
+
+    def _create_client(self):
+        return pylibmc.Client(self.url, 
+                        binary=self.binary,
+                        behaviors=self.behaviors
+                    )
+
+    def get(self, key):
+        value = self._clients.memcached.get(key)
+        if value is None:
+            return NO_VALUE
+        else:
+            return value
+
+    def set(self, key, value):
+        self._clients.memcached.set(
+                                    key, 
+                                    value, 
+                                    **self._pylibmc_set_args
+                                )
+
+    def delete(self, key):
+        self._clients.memcached.delete(key)

dogpile/cache/backends/memory.py

+"""Provides a simple dictionary-based backend."""
+
 from dogpile.cache.api import CacheBackend, CachedValue, NO_VALUE
 
 class MemoryBackend(CacheBackend):
-    def __init__(self, the_cache=None):
-        if the_cache is None:
-            self._cache = {}
-        else:
-            self._cache = the_cache
+    """A backend that uses a plain dictionary.
+
+    There is no size management, and values which
+    are placed into the dictionary will remain
+    until explicitly removed.   Note that
+    Dogpile's expiration of items is based on 
+    timestamps and does not remove them from 
+    the cache.
+
+    E.g.::
+    
+        from dogpile.cache import make_region
+
+        region = make_region().configure(
+            'dogpile.cache.memory'
+        )
+        
+    
+    To use a Python dictionary of your choosing,
+    it can be passed in with the ``cache_dict``
+    argument::
+    
+        my_dictionary = {}
+        region = make_region().configure(
+            'dogpile.cache.memory',
+            arguments={
+                "cache_dict":my_dictionary
+            }
+        )
+    
+    
+    """
+    def __init__(self, arguments):
+        self._cache = arguments.pop("cache_dict", {})
 
     def get(self, key):
         return self._cache.get(key, NO_VALUE)

dogpile/cache/plugins/mako_cache.py

+"""Implements dogpile caching for Mako templates.
+
+See the section :ref:`mako_plugin` for examples.
+
+"""
 from mako.cache import CacheImpl
 
 class MakoPlugin(CacheImpl):
+    """A Mako ``CacheImpl`` which talks to dogpile.cache."""
+
     def __init__(self, cache):
         super(MakoPlugin, self).__init__(cache)
         try:

dogpile/cache/region.py

                 _config_prefix
             )
         else:
-            self.backend = backend_cls(arguments)
+            self.backend = backend_cls(arguments or {})
         self.expiration_time = expiration_time
         self.dogpile_registry = NameRegistry(self._create_dogpile)
         if self.key_mangler is None:

dogpile/cache/util.py

 from hashlib import sha1
 import inspect
 
+try:
+    import threading
+    import thread
+except ImportError:
+    import dummy_threading as threading
+    import dummy_thread as thread
+
 class PluginLoader(object):
     def __init__(self, group):
         self.group = group
             return self
         obj.__dict__[self.__name__] = result = self.fget(obj)
         return result
+
+def to_list(x, default=None):
+    """Coerce to a list."""
+    if x is None:
+        return default
+    if not isinstance(x, (list, tuple)):
+        return [x]
+    else:
+        return x

tests/_fixtures.py

+from dogpile.cache.api import CacheBackend, CachedValue, NO_VALUE
+from dogpile.cache import register_backend, CacheRegion
+from tests import eq_, assert_raises_message
+import itertools
+import time
+from nose import SkipTest
+
+from unittest import TestCase
+
+class _GenericBackendTest(TestCase):
+    @classmethod
+    def setup_class(cls):
+        try:
+            cls._region()
+        except ImportError:
+            raise SkipTest("Backend %s not installed" % cls.backend)
+
+    backend = None
+    region_args = {}
+    config_args = {}
+
+    @classmethod
+    def _region(cls, region_args={}, config_args={}):
+        _region_args = {}
+        _region_args = cls.region_args.copy()
+        _region_args.update(**region_args)
+        reg = CacheRegion(**_region_args)
+        _config_args = cls.config_args.copy()
+        _config_args.update(config_args)
+        reg.configure(cls.backend, **_config_args)
+        return reg
+
+    def test_set_get_value(self):
+        reg = self._region()
+        reg.set("some key", "some value")
+        eq_(reg.get("some key"), "some value")
+
+    def test_set_get_nothing(self):
+        reg = self._region()
+        eq_(reg.get("some key"), NO_VALUE)
+
+    def test_creator(self):
+        reg = self._region()
+        def creator():
+            return "some value"
+        eq_(reg.get_or_create("some key", creator), "some value")
+
+    def test_remove(self):
+        reg = self._region()
+        reg.set("some key", "some value")
+        reg.delete("some key")
+        reg.delete("some key")
+        eq_(reg.get("some key"), NO_VALUE)
+
+    def test_expire(self):
+        reg = self._region(config_args={"expiration_time":1})
+        counter = itertools.count(1)
+        def creator():
+            return "some value %d" % next(counter)
+        eq_(reg.get_or_create("some key", creator), "some value 1")
+        time.sleep(1)
+        eq_(reg.get("some key"), "some value 1")
+        eq_(reg.get_or_create("some key", creator), "some value 2")
+        eq_(reg.get("some key"), "some value 2")

tests/test_memory_backend.py

-from unittest import TestCase
-from dogpile.cache.api import CacheBackend, CachedValue, NO_VALUE
-from dogpile.cache import register_backend, CacheRegion
-from tests import eq_, assert_raises_message
-import time
-import itertools
+from tests._fixtures import _GenericBackendTest
 
-class MemoryBackendTest(TestCase):
+class MemoryBackendTest(_GenericBackendTest):
+    backend = "dogpile.cache.memory"
 
-    def _region(self, init_args={}, config_args={}, backend="dogpile.cache.memory"):
-        reg = CacheRegion(**init_args)
-        reg.configure(backend, **config_args)
-        return reg
-
-    def test_set_get_value(self):
-        reg = self._region()
-        reg.set("some key", "some value")
-        eq_(reg.get("some key"), "some value")
-
-    def test_set_get_nothing(self):
-        reg = self._region()
-        eq_(reg.get("some key"), NO_VALUE)
-
-    def test_creator(self):
-        reg = self._region()
-        def creator():
-            return "some value"
-        eq_(reg.get_or_create("some key", creator), "some value")
-
-    def test_remove(self):
-        reg = self._region()
-        reg.set("some key", "some value")
-        reg.delete("some key")
-        reg.delete("some key")
-        eq_(reg.get("some key"), NO_VALUE)
-
-    def test_expire(self):
-        reg = self._region(config_args={"expiration_time":1})
-        counter = itertools.count(1)
-        def creator():
-            return "some value %d" % next(counter)
-        eq_(reg.get_or_create("some key", creator), "some value 1")
-        time.sleep(1)
-        eq_(reg.get("some key"), "some value 1")
-        eq_(reg.get_or_create("some key", creator), "some value 2")
-        eq_(reg.get("some key"), "some value 2")
-

tests/test_pylibmc_backend.py

+from tests._fixtures import _GenericBackendTest
+from tests import eq_
+from unittest import TestCase
+from threading import Thread
+import time
+
+class PyLibMCBackendTest(_GenericBackendTest):
+    backend = "dogpile.cache.pylibmc"
+
+    region_args = {
+        "key_mangler":lambda x: x.replace(" ", "_")
+    }
+    config_args = {
+        "arguments":{
+            "url":"127.0.0.1:11211"
+        }
+    }
+
+from dogpile.cache.backends.memcached import PylibmcBackend
+class MockPylibmcBackend(PylibmcBackend):
+    def _imports(self):
+        pass
+
+    def _create_client(self):
+        return MockClient(self.url, 
+                        binary=self.binary,
+                        behaviors=self.behaviors
+                    )
+
+class MockClient(object):
+    number_of_clients = 0
+
+    def __init__(self, *arg, **kw):
+        self.arg = arg
+        self.kw = kw
+        self.canary = []
+        self._cache = {}
+        MockClient.number_of_clients += 1
+
+    def get(self, key):
+        return self._cache.get(key)
+    def set(self, key, value, **kw):
+        self.canary.append(kw)
+        self._cache[key] = value
+    def delete(self, key):
+        self._cache.pop(key, None)
+    def __del__(self):
+        MockClient.number_of_clients -= 1
+
+class PylibmcArgsTest(TestCase):
+    def test_binary_flag(self):
+        backend = MockPylibmcBackend(arguments={'url':'foo','binary':True})
+        eq_(backend._create_client().kw["binary"], True)
+
+    def test_url_list(self):
+        backend = MockPylibmcBackend(arguments={'url':["a", "b", "c"]})
+        eq_(backend._create_client().arg[0], ["a", "b", "c"])
+
+    def test_url_scalar(self):
+        backend = MockPylibmcBackend(arguments={'url':"foo"})
+        eq_(backend._create_client().arg[0], ["foo"])
+
+    def test_behaviors(self):
+        backend = MockPylibmcBackend(arguments={'url':"foo", 
+                                    "behaviors":{"q":"p"}})
+        eq_(backend._create_client().kw["behaviors"], {"q": "p"})
+
+    def test_set_time(self):
+        backend = MockPylibmcBackend(arguments={'url':"foo", 
+                                "memcached_expire_time":20})
+        backend.set("foo", "bar")
+        eq_(backend._clients.memcached.canary, [{"time":20}])
+
+    def test_set_min_compress_len(self):
+        backend = MockPylibmcBackend(arguments={'url':"foo", 
+                                "min_compress_len":20})
+        backend.set("foo", "bar")
+        eq_(backend._clients.memcached.canary, [{"min_compress_len":20}])
+
+    def test_no_set_args(self):
+        backend = MockPylibmcBackend(arguments={'url':"foo"})
+        backend.set("foo", "bar")
+        eq_(backend._clients.memcached.canary, [{}])
+
+class PylibmcThreadTest(TestCase):
+    def setUp(self):
+        import gc
+        gc.collect()
+        eq_(MockClient.number_of_clients, 0)
+
+    def test_client_cleanup_1(self):
+        self._test_client_cleanup(1)
+
+    def test_client_cleanup_3(self):
+        self._test_client_cleanup(3)
+
+    def test_client_cleanup_10(self):
+        self._test_client_cleanup(10)
+
+    def _test_client_cleanup(self, count):
+        backend = MockPylibmcBackend(arguments={'url':'foo','binary':True})
+        canary = []
+
+        def f():
+            backend._clients.memcached
+            canary.append(MockClient.number_of_clients)
+            time.sleep(.05)
+
+        threads = [Thread(target=f) for i in xrange(count)]
+        for t in threads:
+            t.start()
+        for t in threads:
+            t.join()
+        eq_(canary, [i + 2 for i in xrange(count)])
+        eq_(MockClient.number_of_clients, 1)
+
+
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.