Grigoriy Petukhov avatar Grigoriy Petukhov committed 82590a1

Initial commit

Comments (0)

Files changed (6)

+syntax: glob
+*.pyc
+*.swp
+*.egg-info/
+dist/
+update_pypi.sh
+Copyright (c) 2008-2010, Grigoriy Petukhov
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+    * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+    * Neither the name of the PyBB nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+recursive-include mongodb_cache *
+global-exclude *.pyc
+================================
+Django cache backend for mongodb
+================================
+
+Documentation
+=============
+ * http://docs.djangoproject.com/en/dev/topics/cache/
+ * http://www.mongodb.org/display/DOCS/Home
+ * http://api.mongodb.org/python/1.6/index.html
+
+
+Usage
+=====
+
+Setup you settings.py with following line:
+
+    CACHE_BACKEND = 'mongodb_cache://localhost?database=foobar'

mongodb_cache/__init__.py

+"Base Cache class."
+import pymongo
+from datetime import datetime, timedelta
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+import logging
+
+from django.core.exceptions import ImproperlyConfigured
+from django.utils.encoding import smart_unicode, smart_str
+
+logger = logging.getLogger('mongodb_cache')
+logger.addHandler(logging.StreamHandler())
+logger.setLevel(logging.INFO)
+
+class InvalidCacheBackendError(ImproperlyConfigured):
+    pass
+
+class CacheClass(object):
+    def __init__(self, server, params):
+        timeout = params.get('timeout', 300)
+        try:
+            timeout = int(timeout)
+        except (ValueError, TypeError):
+            timeout = 300
+        self.default_timeout = timeout
+        if ':' in server:
+            server, port = server.split(':')
+            port = int(port)
+        else:
+            port = 27017
+        try:
+            database_name = params['database']
+        except KeyError:
+            raise InvalidCacheBackendError('database argument is required')
+        self.debug = 'debug' in params
+        self._cache = pymongo.Connection(server, port)[database_name].cache
+        self._cache.ensure_index('key', unique=True)
+        self._cache.ensure_index('created')
+
+    def expired(self, timeout):
+        return datetime.now() + timedelta(seconds=timeout or self.default_timeout)
+
+    def add(self, key, value, timeout=None):
+        """
+        Set a value in the cache if the key does not already exist. If
+        timeout is given, that timeout will be used for the key; otherwise
+        the default cache timeout will be used.
+
+        Returns True if the value was stored, False otherwise.
+        """
+
+        key = smart_str(key)
+        value = pickle.dumps(value)
+        try:
+            obj = {'key': key, 'value': value, 'expired': self.expired(timeout)}
+            self._cache.save(obj, safe=True)
+        except pymongo.errors.OperationFailure:
+            return False
+        else:
+            return True
+
+    def get(self, key, default=None):
+        """
+        Fetch a given key from the cache. If the key does not exist, return
+        default, which itself defaults to None.
+        """
+
+        key = smart_str(key)
+        obj = {'key': key}
+        obj = self._cache.find_one(obj)
+        if obj is None:
+            return default
+        else:
+            if obj['expired'] < datetime.now():
+                self._cache.remove({'key': obj['key']})
+                return default
+            else:
+                if self.debug:
+                    logger.error('Success cache hit for key: %s' % key)
+                value = pickle.loads(obj['value'].encode('utf-8'))
+                if isinstance(value, basestring):
+                    return smart_unicode(value)
+                else:
+                    return value
+
+    def set(self, key, value, timeout=None):
+        """
+        Set a value in the cache. If timeout is given, that timeout will be
+        used for the key; otherwise the default cache timeout will be used.
+        """
+
+        key = smart_str(key)
+        if not self.add(key, value, timeout):
+            value = pickle.dumps(value)
+            obj = {'key': key, 'value': value, 'expired': self.expired(timeout)}
+            self._cache.update({'key': key}, obj)
+
+    def delete(self, key):
+        """
+        Delete a key from the cache, failing silently.
+        """
+
+        key = smart_str(key)
+        self._cache.remove({'key': key})
+
+    def get_many(self, keys):
+        """
+        Fetch a bunch of keys from the cache. For certain backends (memcached,
+        pgsql) this can be *much* faster when fetching multiple values.
+
+        Returns a dict mapping each key in keys to its value. If the given
+        key is missing, it will be missing from the response dict.
+        """
+        d = {}
+        for k in keys:
+            val = self.get(k)
+            if val is not None:
+                d[k] = val
+        return d
+
+    def has_key(self, key):
+        """
+        Returns True if the key is in the cache and has not expired.
+        """
+        return self.get(key) is not None
+
+    def incr(self, key, delta=1):
+        """
+        Add delta to value in the cache. If the key does not exist, raise a
+        ValueError exception.
+        """
+        if key not in self:
+            raise ValueError, "Key '%s' not found" % key
+        new_value = self.get(key) + delta
+        self.set(key, new_value)
+        return new_value
+
+    def decr(self, key, delta=1):
+        """
+        Subtract delta from value in the cache. If the key does not exist, raise
+        a ValueError exception.
+        """
+        return self.incr(key, -delta)
+
+    def __contains__(self, key):
+        """
+        Returns True if the key is in the cache and has not expired.
+        """
+        # This is a separate method, rather than just a copy of has_key(),
+        # so that it always has the same functionality as has_key(), even
+        # if a subclass overrides it.
+        return self.has_key(key)
+
+    def _get_num_entries(self):
+        return self._cache.count()
+import os
+from setuptools import setup
+
+# Compile the list of packages available, because distutils doesn't have
+# an easy way to do this.
+
+packages, data_files = [], []
+root_dir = os.path.dirname(__file__)
+if root_dir:
+    os.chdir(root_dir)
+
+PACKAGE = 'mongodb_cache'
+
+for dirpath, dirnames, filenames in os.walk(PACKAGE):
+    for i, dirname in enumerate(dirnames):
+        if dirname in ['.', '..']:
+            del dirnames[i]
+    if '__init__.py' in filenames:
+        pkg = dirpath.replace(os.path.sep, '.')
+        if os.path.altsep:
+            pkg = pkg.replace(os.path.altsep, '.')
+        packages.append(pkg)
+    elif filenames:
+        prefix = dirpath[len(PACKAGE) + 1:] # Strip package directory + path separator
+        for f in filenames:
+            data_files.append(os.path.join(prefix, f))
+
+setup(
+    version = '0.1.0',
+    description = 'Mongodb cache backend for Django framework',
+    author = 'Grigoriy Petukhov',
+    author_email = 'lorien@lorien.name',
+    url = 'http://bitbucket.org/lorien/django-mongodb-cache/',
+    name = 'django-mongodb-cache',
+
+    packages = packages,
+    package_data = {'mongodb_cache': data_files},
+
+    license = "BSD",
+    keywords = "django cache mongodb",
+    classifiers=[
+        'Development Status :: 4 - Beta',
+        'Environment :: Web Environment',
+        'Framework :: Django',
+        'Intended Audience :: Developers',
+        'License :: OSI Approved :: BSD License',
+        'Operating System :: OS Independent',
+        'Programming Language :: Python',
+        'Topic :: Software Development :: Libraries :: Python Modules',
+    ],
+)
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.