Anonymous avatar Anonymous committed a129b92

Now mongoengine_rediscache in bitbucket.org!

Comments (0)

Files changed (31)

+About
+==========
+:Info: Redis cached tools for caching MongoEngine ORM models
+:Author: Michael
+
+mongoengine_rediscache allows you use cache on model level,
+he can to monitor the relevance of the cache when the model changes (save, update, delete).
+Designed for use with or without Django.
+
+Dependencies
+============
+- pymongo
+- mongoengine
+- python-redis
+- `blinker <http://pypi.python.org/pypi/blinker#downloads>`_
+
+Usage
+=====
+You can create models like this (for example look at models.py of application "tests")::
+
+	from mongoengine import *
+	from mongoengine_rediscache.fields import ReferenceFieldCached, ListFieldCached
+	from mongoengine_rediscache.queryset import CachedQuerySet
+	
+	class TestModelObj(Document):
+	    num  =  IntField(default=0)
+	    name =  StringField(max_length=255, required=False )
+	    text =  StringField(max_length=255, required=False )
+	    create_date = DateTimeField()
+	    
+	    meta = { 'queryset_class': CachedQuerySet }
+	
+	class TestModelList(Document):
+	    num  =  IntField(default=0)
+	    name =  StringField(max_length=255, required=False )
+	    models = ListFieldCached( ReferenceField(TestModelObj) )
+	    
+	    meta = { 'queryset_class': CachedQuerySet }
+	    
+	class TestModelRef(Document):
+	    num  =  IntField(default=0)
+	    name =  StringField(max_length=255, required=False )
+	    model = ReferenceFieldCached(TestModelObj)
+	    
+	    meta = { 'queryset_class': CachedQuerySet }
+	   
+	   
+Possible you can achieve greater efficiency if turn off cascade save for models with ReferenceField::
+
+	class TestModelRef(Document):
+	    num  =  IntField(default=0)
+	    name =  StringField(max_length=255, required=False )
+	    model = ReferenceFieldCached(TestModelObj)
+	    
+	    meta = { 'queryset_class': CachedQuerySet, 'cascade' : False }
+	    
+	    
+Make sure the 'mongoengine_rediscache' after a 'you_application' in INSTALLED_APPS (all your applications)::
+
+	INSTALLED_APPS = (
+	    'django.contrib.auth',
+	    'django.contrib.contenttypes',
+	    'django.contrib.sessions',
+	    'django.contrib.sites',
+	    'django.contrib.messages',
+	    'django.contrib.sitemaps',
+	    'django.contrib.staticfiles',
+	    'django.contrib.admin',
+	    'packeris',
+	    'tests',
+	    'mongoengine_rediscache',
+	    'cronis',
+	)
+
+
+Configuration
+=====
+And more, you must create option in settings::
+
+	MONGOENGINE_REDISCACHE = {
+	    'scheme' : {
+                	'tests.models.TestModelObj'  : { 'list' : 120, 'reference' : 600, 'get' : 600 },
+                	'tests.models.TestModelList' : { 'all' : 600 },
+                	'tests.models.TestModelRef'  : { 'list' : 120, 'reference' : 600, 'get' : 120, 'list_reference' : 600 },
+                	'tests.models.TestModelDict' : { 'list' : 120, 'reference' : 600, 'get' : 120, 'list_reference' : 600 },
+	                },
+	    'redis' : {
+	        'host': 'localhost',
+	        'port': 6379,
+	        'db': 1, 
+	        'socket_timeout': 3,
+	    },
+	    'used' : True,
+	    'keyhashed' : True,
+	}
+
+- `'count' - use cache for count() method of CachedQuerySet`
+- `'list' - use cache in CachedQuerySet, you just need to call property ".cache" after of all "filter" and "order_by"`
+- `'reference' - use cache in ReferenceFieldCached`
+- `'get' - use cache in CachedQuerySet for all get request`
+- `'list_reference' - use cache for ListFieldCached( ReferenceField(Document) )`
+
+Posible to use without Django, you'll have such code::
+
+	from mongoengine import *
+	from mongoengine import CASCADE as REF_CASCADE
+	from mongoengine import PULL as REF_PULL
+	from datetime import datetime
+	
+	from mongoengine_rediscache.config import LazySettings
+	from mongoengine_rediscache import install_signals
+	from mongoengine_rediscache.queryset import CachedQuerySet
+	
+	LazySettings.options = {
+	    'scheme' : {
+	                'models.Model1' : { 'all' : 600 },
+	                'models.Model2' : { 'all' : 600 },
+	                'models.Model3' : { 'all' : 600 },
+	                },
+	    'redis' : {
+	        'host': 'localhost',
+	        'port': 6379,
+	        'db'  : 2,
+	        'socket_timeout': 5,
+	    },
+	    'used'      : True,
+	    'keyhashed' : True,
+	}
+	
+	class Model1(Document):
+	    name = StringField(max_length=32)
+	    volume = IntField()
+	    created = DateTimeField(default=datetime.now)
+	    
+	    meta = { 'queryset_class': CachedQuerySet, 'cascade' : False }
+	
+	class Model2(Document):
+	    name = StringField(max_length=32)
+	    count = IntField()
+	    created = DateTimeField(default=datetime.now)
+	    model1 = ReferenceField(Model1, reverse_delete_rule=REF_CASCADE, dbref=False)
+	    
+	    meta = { 'queryset_class': CachedQuerySet, 'cascade' : False }
+	
+	class Model3(Document):
+	    name = StringField(max_length=32)
+	    count = IntField()
+	    created = DateTimeField(default=datetime.now)
+	    model1 = ListField(ReferenceField(Model1, reverse_delete_rule=REF_CASCADE, dbref=False), required=True)
+	    
+	    meta = { 'queryset_class': CachedQuerySet, 'cascade' : False }
+	
+	install_signals()
+
+I think this all simple..
+
+MONGOENGINE_REDISCACHE contain option 'keyhashed' needed for hashed cahce keys.
+
+If 'keyhashed' is False then cache name generator will be create keys like this::
+  1) "test_model_obj:list:_types=TestModelObj|text=regex(ef)|num=$lt=500000|create_date=$gt=1986-11-2207:15:00|((num,1))"
+  2) "test_model_obj:list:text__contains=aa|((num,1))"
+  3) "test_model_obj:list:_types=TestModelObj|text=regex(fe)|num=$lt=500000|((num,1))"
+  4) "test_model_obj:list:name__contains=ee|((name,-1))"
+  5) "test_model_obj:list:_types=TestModelObj|create_date=$gt=1986-11-2207:15:00|name=regex(bb)|((name,-1))"
+
+If 'keyhashed' is True then keys will be hide in hash::
+  1) "test_model_obj:list:9cc7bcf436afe1db24bb4aaae89f429f"
+  2) "test_model_obj:list:c96fc2fe93b665c8f44dbf1ae4b1dacf"
+  3) "test_model_obj:list:7828697e5b6c1995e3f5d4e336acb30d"
+  4) "test_model_obj:list:b212d48e0a087b249b9701dee2e056c2"
+  5) "test_model_obj:list:8eae9ba432e723cdc43f3399e50ec41f"
+
+This will be useful if you have a lot of different samples of one collection.
+
+and finally
+=====
+Hopefully this will be useful :)
+
+Thanks for the idea of Alexander Schepanovski (author of https://github.com/Suor/django-cacheops)
+
+Sincerely, Michael Vorotyntsev.
+Server and soft:
+OS: Debian 3.1.0-1-amd64 x86_64 GNU/Linux
+CPU: Intel(R) Core(TM)2 Duo T6600 2.20GHz
+mongodb: 2.0.0-2
+Python: 2.7.2+
+Django version: 1.4 alpha
+pymongo: 2.0.1-1
+mongoengine: 0.5.2
+redis-server: 2.4.2-1
+python-redis:  2.4.9-1
+Web-server: nginx 1.1.8-1 + uWSGI 0.9.8.3-debian(64bit)
+
+==Getting object==
+-1-
+Test with used cache:
+Count operations: 15000
+Time operations: 8.84495401382
+Test without cached:
+Count operations: 15000
+Time operations: 19.808797121
+-2-
+Test with used cache:
+Count operations: 15000
+Time operations: 3.99209499359
+Test without cached:
+Count operations: 15000
+Time operations: 18.7795460224
+-3-
+Test with used cache:
+Count operations: 15000
+Time operations: 2.54329681396
+Test without cached:
+Count operations: 15000
+Time operations: 17.7209279537
+
+==Select list==
+-1-
+Test with used cache:
+Count operations: 300
+Object count: 207195
+Time operations: 6.14473700523
+Average list length: 690
+Test without cached:
+Count operations: 300
+Object count: 199122
+Time operations: 27.7501080036
+Average list length: 663
+-2-
+Test with used cache:
+Count operations: 300
+Object count: 200271
+Time operations: 5.51485180855
+Average list length: 667
+Test without cached:
+Count operations: 300
+Object count: 198798
+Time operations: 27.8188531399
+Average list length: 662
+-3-
+Test with used cache:
+Count operations: 300
+Object count: 189700
+Time operations: 5.5311870575
+Average list length: 632
+Test without cached:
+Count operations: 300
+Object count: 180917
+Time operations: 25.7878158092
+Average list length: 603
+
+==Get reference object==
+-1-
+Test with used cache:
+Objects count: 30000
+Time operations: 12.0339980125
+Test without cached:
+Objects count: 30000
+Time operations: 28.2414021492
+-2-
+Test with used cache:
+Objects count: 30000
+Time operations: 7.19947600365
+Test without cached:
+Objects count: 30000
+Time operations: 28.7138521671
+-3-
+Test with used cache:
+Objects count: 30000
+Time operations: 5.26431107521
+Test without cached:
+Objects count: 30000
+Time operations: 27.9019629955
+
+==Get reference list==
+-1-
+Test with used cache:
+Count objects in reference list: 35383
+Time operations: 12.6457750797
+Average list length: 4
+Test without cached:
+Count objects in reference list: 35248
+Time operations: 28.0478198528
+Average list length: 4
+-2-
+Test with used cache:
+Count objects in reference list: 35005
+Time operations: 6.16148686409
+Average list length: 4
+Test without cached:
+Count objects in reference list: 35254
+Time operations: 27.6127989292
+Average list length: 4
+-3-
+Test with used cache:
+Count objects in reference list: 34964
+Time operations: 6.15631604195
+Average list length: 4
+Test without cached:
+Count objects in reference list: 34933
+Time operations: 25.8812391758
+Average list length: 4
+
Add a comment to this file

django_test_application/__init__.py

Empty file added.

django_test_application/manage.py

+#!/usr/bin/env python
+import os, sys
+
+if __name__ == "__main__":
+    os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
+
+    from django.core.management import execute_from_command_line
+
+    execute_from_command_line(sys.argv)

django_test_application/mongoengine_rediscache/__init__.py

+
+def install_signals():
+    from invalidation import CacheInvalidator
+    from config import LazySettings
+    from mongoengine import signals
+    
+    settings = LazySettings()
+    
+    if settings.content:
+        for model_location in settings.scheme:
+            location = model_location.split('.')
+            model = None
+            try:
+                if len(location) == 2:
+                    exec('from %s import %s as model' % (location[0], location[1]))
+                else: # must be 3
+                    exec('from %s.%s import %s as model' % (location[0], location[1], location[2]))
+            except:
+                raise Exception("Can't import document %s from MONGOENGINE_REDISCACHE" % model_location)
+    
+            signals.post_save.connect(CacheInvalidator.post_save, sender=model)
+            signals.post_delete.connect(CacheInvalidator.post_delete, sender=model)
+
+# uncomment for use as Django applications
+#install_signals()

django_test_application/mongoengine_rediscache/base_cache.py

+# -*- coding: utf-8 -*-
+from config import LazySettings
+import cPickle as pickle
+from functools import wraps
+from hashlib import md5 as md5_constructor
+import redis
+
+DEFAULT_TIMEOUT = 600
+
+class BaseCache(object):
+    def cached(self, extra=None, timeout=None):
+        def decorator(func):
+            @wraps(func)
+            def wrapper(*args, **kwargs):
+                md5 = md5_constructor()
+                md5.update('%s.%s' % (func.__module__, func.__name__))
+                if extra is not None:
+                    md5.update(str(extra))
+                if args:
+                    md5.update(repr(args))
+                if kwargs:
+                    md5.update(repr(sorted(kwargs.items())))
+
+                cache_key = 'c:%s' % md5.hexdigest()
+
+                try:
+                    result = self.get(cache_key)
+                except:
+                    result = func(*args, **kwargs)
+                    self.set(cache_key, result, timeout)
+                return result
+            return wrapper
+        return decorator
+
+class RedisCache(BaseCache):
+    def __init__(self, conn):
+        self.conn = conn
+
+    def get(self, cache_key):
+        data = self.conn.get(cache_key)
+        if data is None:
+            return None
+        return pickle.loads(data)
+    
+    def pipeline_get(self, cache_key_list):
+        if isinstance(cache_key_list, list) and len(cache_key_list) > 0:
+            pipe = self.conn.pipeline()
+            for key in cache_key_list:
+                pipe.get(key)
+            data = pipe.execute()
+            if data is not None and len(data) > 0:
+                res = []
+                for d in data:
+                    try: obj = pickle.loads(d)
+                    except: obj = None
+                    if obj is not None:
+                        res.append(obj)
+                return res
+        return None
+    
+    def pipeline_delete(self, cache_key_list):
+        if isinstance(cache_key_list, list) and len(cache_key_list) > 0:
+            pipe = self.conn.pipeline()
+            for key in cache_key_list:
+                pipe.delete(key)
+            data = pipe.execute()
+            if data is not None and len(data) > 0:
+                return data
+        return None
+    
+    def delete(self, cache_key):
+        return self.conn.delete(cache_key)
+
+    def set(self, cache_key, data, timeout=DEFAULT_TIMEOUT):
+        if self.conn is None:
+            return
+        pickled_data = pickle.dumps(data)
+        if timeout is not None:
+            self.conn.setex(cache_key, pickled_data, timeout)
+        else:
+            self.conn.set(cache_key, pickled_data)
+    
+    def flushall(self):
+        if self.conn is None:
+            return False
+        try:    self.conn.flushdb()
+        except: return False
+        return True
+    
+    def append_to_list(self, list_cache_key, data):
+        self.conn.rpush(list_cache_key, data)
+    
+    def get_all_list(self, list_cache_key):
+        return  self.conn.lrange(list_cache_key, 0, -1)
+
+class LazyCache(RedisCache):
+    __this = None
+    
+    def __new__(cls):
+        if cls.__this is None:
+            cls.__this = super(LazyCache, cls).__new__(cls)
+        return cls.__this
+
+    def __init__(self):
+        pass
+
+    def iazy_init(self):
+        try:
+            redis_conf = LazySettings().content.get('redis')
+        except AttributeError:
+            raise Exception('Check MONGOENGINE_REDISCACHE in settings. ')
+        try:
+            redis_conn = redis.Redis(**redis_conf)
+        except:
+            redis_conn = None
+        self.__class__.__this = RedisCache(redis_conn)
+
+    def get(self, cache_key):
+        self.iazy_init()
+        return self.__this.get(cache_key)
+    
+    def pipeline_get(self, cache_key_list):
+        self.iazy_init()
+        return self.__this.pipeline_get(cache_key_list)
+    
+    def pipeline_delete(self, cache_key_list):
+        self.iazy_init()
+        return self.__this.pipeline_delete(cache_key_list)
+    
+    def delete(self, cache_key):
+        self.iazy_init()
+        return self.__this.delete(cache_key)
+    
+    def append_to_list(self, list_cache_key, data):
+        self.iazy_init()
+        self.__this.append_to_list(list_cache_key, data)
+    
+    def set(self, cache_key, data, timeout=DEFAULT_TIMEOUT):
+        self.iazy_init()
+        return self.__this.set(cache_key, data, timeout=timeout)
+    
+    def flushall(self):
+        self.iazy_init()
+        return self.__this.flushall(self)
+    
+    def get_all_list(self, list_cache_key):
+        self.iazy_init()
+        return self.__this.get_all_list(list_cache_key)
+
+_internal_cache = LazyCache()

django_test_application/mongoengine_rediscache/config.py

+'''
+Created on 19.06.2012
+
+@author: unax
+'''
+
+class ClassProperty(object):
+    def __init__(self, getter, setter):
+        self.getter = getter
+        self.setter = setter
+
+    def __get__(self, cls, owner):
+        return getattr(cls, self.getter)()
+
+    def __set__(self, cls, value):
+        getattr(cls, self.setter)(value)
+
+class MetaSettings(type):
+    options = ClassProperty('get_options', 'set_options')
+
+class LazySettings(object):
+    __metaclass__ = MetaSettings
+    __this = None
+    __settings = None
+    __scheme = None
+    __simple_scheme = None
+    __keyhashed = None
+
+    def __new__(cls):
+        if cls.__this is None:
+            cls.__this = super(LazySettings, cls).__new__(cls)
+        return cls.__this
+    
+    def create(self, **options):
+        conf = None
+        if len(options) > 1 and 'redis' in options:
+            conf = options
+        else:
+            try:
+                from django.conf import settings
+                conf = getattr(settings, 'MONGOENGINE_REDISCACHE', None)
+            except:
+                return False
+
+        if conf:
+            self.__class__.__settings  = conf
+            self.__class__.__scheme    = conf.get('scheme')
+            self.__class__.__keyhashed = conf.get('keyhashed')
+            simple_scheme = {}
+            for model_location in conf.get('scheme'):
+                simple_scheme[model_location.split('.')[-1]] = conf['scheme'][model_location]
+            self.__class__.__simple_scheme = simple_scheme
+            return True
+        else:
+            return False
+
+    @classmethod
+    def get_options(cls):
+        return cls().content
+
+    @classmethod
+    def set_options(cls, option_dict):
+        if isinstance(option_dict, dict):
+            cls().create(**option_dict)
+
+    @property
+    def content(self):
+        if self.__settings is None and not self.create():
+            raise Exception('Mongoengine rediscache error! No settings.')
+        return self.__settings
+
+    @property
+    def scheme(self):
+        return self.__scheme
+
+    @property
+    def keyhashed(self):
+        return self.__keyhashed
+
+    @property
+    def simple_scheme(self):
+        return  self.__simple_scheme
+    
+    @classmethod
+    def timelimit(cls, model_name, operation):
+        scheme = cls().simple_scheme.get(model_name)
+        if scheme:
+            timeout = scheme.get(operation)
+            if not isinstance(timeout, int):
+                timeout = scheme.get('all')
+            return timeout

django_test_application/mongoengine_rediscache/fields.py

+'''
+Created on 12.01.2012
+
+@author: unax
+'''
+from mongoengine.fields import ReferenceField, ListField
+from helper import _queryset_list
+from config import LazySettings
+from base_cache import _internal_cache as cache
+from bson.dbref import DBRef
+# for old version: from pymongo.dbref import DBRef
+
+class ListFieldCached(ListField):
+    def __get__(self, instance, owner):
+        if instance is None:
+            return self
+        timeout = LazySettings.timelimit(instance.__class__.__name__, 'list_reference')
+        changed = False
+        if instance.pk is None:
+            changed = True # this is new model
+        else:
+            try:    changed = self.name in instance._changed_fields # this model changed
+            except: pass
+        if (not isinstance(timeout, int)) or changed:
+            return super(ListFieldCached, self).__get__(instance, owner)
+        
+        DBRef_list = instance._data.get(self.name)
+        if isinstance(DBRef_list, _queryset_list):
+            return DBRef_list
+        
+        if DBRef_list and len(DBRef_list) > 0:
+            keys = []
+            list_reference = True
+            for dbref_obj in DBRef_list:
+                if not isinstance(dbref_obj, DBRef):
+                    list_reference = False
+                    break                    
+                keys.append('%s:get:pk=%s' % (dbref_obj.collection , dbref_obj.id))
+            if list_reference:
+                models = cache.pipeline_get(keys)
+                del keys
+                if models is None or len(models) != len(DBRef_list) or changed:
+                    models = super(ListFieldCached, self).__get__(instance, owner)
+                    if models and len(models) > 0:
+                        instance._data[self.name] = _queryset_list()
+                        for obj in models:
+                            if not isinstance(obj, DBRef):
+                                cache.set('%s:get:pk=%s' % (obj._get_collection_name(), obj.pk), obj, timeout)
+                            instance._data[self.name].append(obj)
+                return models
+        return super(ListFieldCached, self).__get__(instance, owner)
+
+class ReferenceFieldCached(ReferenceField):
+    def __get__(self, instance, owner):
+        if instance is None:
+            return self
+        value = instance._data.get(self.name)
+        if isinstance(value, (DBRef)):
+            timeout = LazySettings.timelimit(instance.__class__.__name__ , 'reference')
+            if isinstance(timeout, int):
+                collection = value.collection
+                cache_key = '%s:get:pk=%s' % (collection , value.id)
+                obj = cache.get(cache_key)
+                if obj is None:
+                    obj = super(ReferenceFieldCached, self).__get__(instance, owner)
+                    cache.set(cache_key, obj, timeout)
+                if obj is not None:
+                    instance._data[self.name] = obj
+                return obj
+        return super(ReferenceFieldCached, self).__get__(instance, owner)

django_test_application/mongoengine_rediscache/helper.py

+'''
+Created on 19.06.2012
+
+@author: unax
+'''
+
+class _queryset_list(list):
+    def __init__(self, anylist=None):
+        if anylist is None:
+            super(_queryset_list, self).__init__()
+        else:
+            super(_queryset_list, self).__init__(anylist)
+    
+    def count(self):
+        return len(self)

django_test_application/mongoengine_rediscache/invalidation.py

+'''
+Created on 13.01.2012
+
+@author: unax
+'''
+
+from journal import records
+from base_cache import _internal_cache as cache
+
+def model_change(pk, collection):
+    cache.pipeline_delete(records('list', collection))
+    cache.pipeline_delete(records('count', collection))
+    cache.pipeline_delete(records('get', collection, 'pk=%s' % str(pk)))
+    cache.delete("%s:get:journal:pk=%s" % (collection, str(pk)))
+    cache.delete("%s:list:journal:" % collection)
+    cache.delete("%s:count:journal:" % collection)
+
+class CacheInvalidator:
+    @classmethod
+    def post_save(cls, sender, document, **kwargs):
+        model_change(document.pk, document._get_collection_name())
+                
+    @classmethod
+    def post_delete(cls, sender, document, **kwargs):
+        model_change(document.pk, document._get_collection_name())

django_test_application/mongoengine_rediscache/journal.py

+# -*- coding: utf-8 -*-
+'''
+Created on 12.01.2012
+
+@author: unax
+'''
+from base_cache import _internal_cache as cache
+
+def add_find_record(cache_key, collection, timeout):
+    try:     cache.append_to_list("%s:list:journal:" % collection, cache_key)
+    except:  return
+
+def add_count_record(cache_key, collection, timeout):
+    try:     cache.append_to_list("%s:count:journal:" % collection, cache_key)
+    except:  return
+
+def add_get_record(pk, cache_key, collection, timeout):
+    try:     cache.append_to_list("%s:get:journal:pk=%s" % (collection, str(pk)), cache_key)
+    except:  return
+
+def records(query_type, collection, clarify=''):
+    try:     record_list = cache.get_all_list("%s:%s:journal:%s" % (collection, query_type, clarify))
+    except:  record_list = []
+    if query_type == 'get' and isinstance(record_list, list) and clarify != '':
+        record_list.append('%s:get:%s' % (collection, clarify))
+    return record_list

django_test_application/mongoengine_rediscache/misc.py

+# -*- coding: utf-8 -*-
+'''
+Created on 11.01.2012
+
+@author: unax
+'''
+from mongoengine import Document
+from mongoengine.queryset import QuerySet
+from datetime import datetime
+import hashlib
+from config import LazySettings
+from bson.dbref import DBRef
+# for old version: from pymongo.dbref import DBRef
+
+from re import _pattern_type
+
+class CacheNameMixer(object):
+    __line = None
+
+    def __init__(self, query_dict=None):
+        self.__line = self.__parse(query_dict)
+
+    def __str__(self):
+        if LazySettings().keyhashed:
+            return self.hash
+        return self.__line
+
+    def __unicode__(self):
+        return unicode(self.__line)
+
+    @property
+    def hash(self):
+        md5 = hashlib.md5()
+        md5.update(self.__line)
+        return md5.hexdigest()
+
+    @property
+    def line(self):
+        return str(self)
+
+    @property
+    def exist(self):
+        return self.__line is not None and len(self.__line) > 0
+
+    def __create_str(self, query_obj):
+        if isinstance(query_obj, (unicode, str)):
+            return unicode(query_obj)
+        elif isinstance(query_obj, (int, float)):
+            return str(query_obj)
+        elif isinstance(query_obj, datetime):
+            return query_obj.strftime("%Y-%m-%d %H:%M:%S")
+        elif isinstance(query_obj, Document):
+            return str(query_obj.pk)
+        elif isinstance(query_obj, _pattern_type):
+            return "regex(%s)" % query_obj.pattern
+        elif isinstance(query_obj, dict):
+            return self.__parse(query_obj)
+        elif isinstance(query_obj, DBRef):
+            return "%s.id=%s" % (query_obj.collection, query_obj.id)
+        elif isinstance(query_obj, tuple):
+            return "(%s)" % (",".join([ self.__create_str(obj) for obj in query_obj ]))          
+        elif isinstance(query_obj, list) or isinstance(query_obj, QuerySet):
+            return "[%s]" % (",".join([ self.__create_str(obj) for obj in query_obj ]))
+        else:
+            try:
+                return str(query_obj)
+            except:
+                pass
+        return 'unknown_type'
+
+    def __parse(self, query_dict): # query_dict is dict, list or tuple
+        if isinstance(query_dict, dict) and query_dict.keys() > 0:
+            query_line = []
+            for key in query_dict:                
+                query_line.append(u'%s=%s' % (key, self.__create_str(query_dict.get(key))))
+            return (u"|".join(query_line)).encode('utf8')
+        elif isinstance(query_dict, tuple) or isinstance(query_dict, list):
+            return (u"(%s)" % (u",".join([ self.__create_str(key) for key in query_dict ]))).encode('utf8')
+        return None
+
+    def append(self, query_dict):
+        new_line = self.__parse(query_dict).replace(' ', '')
+        if self.__line is not None and new_line is not None:
+            self.__line += '|%s' % new_line
+        elif new_line is not None:
+            self.__line = new_line

django_test_application/mongoengine_rediscache/queryset.py

+# -*- coding: utf-8 -*-
+'''
+Created on 11.01.2012
+@author: unax
+'''
+from mongoengine.queryset import QuerySet
+from misc import CacheNameMixer
+from helper import _queryset_list
+from config import LazySettings
+from base_cache import _internal_cache as cache
+import journal
+
+#================ for mongoengine ====================
+
+class CachedQuerySet(QuerySet):
+    @property
+    def core_cache_name(self):
+        name = CacheNameMixer(self._query)
+        if not name.exist:
+            name.append(('all',))
+        if self._skip:
+            name.append({ 'skip'  : self._skip })
+        if self._limit:
+            name.append({ 'limit' : self._limit })
+        if self._ordering:
+            name.append(self._ordering)
+        return name.line
+ 
+    def count(self):
+        timeout = LazySettings.timelimit(self._document.__name__, 'count')
+        if isinstance(timeout, int):
+            cache_key = "%s:count:%s" % (self._document._get_collection_name(), self.core_cache_name)
+            n = cache.get(cache_key)
+            if n is None:
+                if self._limit == 0:
+                    return 0
+                n = self._cursor.count(with_limit_and_skip=True)
+                cache.set(cache_key, n, timeout)
+                # add in journal
+                journal.add_count_record(cache_key, self._document._get_collection_name() , timeout)
+            del cache_key
+            return n
+        return super(CachedQuerySet, self).count()
+    
+    def get(self, *q_objs, **query):
+        timeout = LazySettings.timelimit(self._document.__name__, 'get')
+        document = None
+        if isinstance(timeout, int):
+            core_cache_name = str(CacheNameMixer(query))
+            cache_key = "%s:get:%s" % (self._document._get_collection_name() , core_cache_name)
+            document = cache.get(cache_key)
+            if document is None:
+                self.__call__(*q_objs, **query)
+                count = super(CachedQuerySet, self).count()
+                if count == 1:
+                    document = self[0]
+                elif count > 1:
+                    raise self._document.MultipleObjectsReturned(u'%d items returned, instead of 1' % count)
+                elif count < 1:
+                    raise self._document.DoesNotExist(u"%s matching query does not exist." % self._document._class_name)
+                cache.set(cache_key, document, timeout)
+                journal.add_get_record(document.pk, cache_key, self._document._get_collection_name(), timeout)
+        else:
+            document = super(CachedQuerySet, self).get(*q_objs, **query)
+        return document
+
+    @property
+    def cache(self):
+        timeout = LazySettings.timelimit(self._document.__name__, 'list')
+        if isinstance(timeout, int):
+            cache_key = "%s:list:%s" % (self._document._get_collection_name(), self.core_cache_name)
+            cached_list = cache.get(cache_key)
+            if cached_list is None:
+            # creating cache
+                cached_list = _queryset_list()
+                if super(CachedQuerySet, self).count() > 0:
+                    for obj in self:
+                        cached_list.append(obj)
+                    cache.set(cache_key, cached_list, timeout)
+                    # add in journal
+                    journal.add_find_record(cache_key, self._document._get_collection_name() , timeout)
+            del cache_key
+            return cached_list
+        return self
Add a comment to this file

django_test_application/nocache_tests/__init__.py

Empty file added.

django_test_application/nocache_tests/models.py

+'''
+Created on 25.01.2012
+
+@author: unax
+'''
+
+from datetime import datetime
+from mongoengine import *
+
+class TestModelObj(Document):
+    num  =  IntField(default=0)
+    name =  StringField(max_length=255, required=False )
+    text =  StringField(max_length=255, required=False )
+    create_date = DateTimeField(default=datetime.now())
+
+class TestModelList(Document):
+    num  =  IntField(default=0)
+    name =  StringField(max_length=255, required=False )
+    models = ListField( ReferenceField(TestModelObj) )
+    
+class TestModelRef(Document):
+    num  =  IntField(default=0)
+    name =  StringField(max_length=255, required=False )
+    model = ReferenceField(TestModelObj)

django_test_application/settings.py

+# -*- coding: utf-8 -*-
+
+DEBUG = True
+TEMPLATE_DEBUG = DEBUG
+
+ADMINS = (,)
+
+MANAGERS = ADMINS
+
+DATABASES = {
+    'default': {
+    'ENGINE': 'django.db.backends.postgresql_psycopg2',
+    'NAME'  : 'testdb',
+    'USER'  : 'admin',
+    'PASSWORD': 'password',
+    'HOST'  : 'localhost',
+    'PORT'  : '5432',
+    },
+    'mongoengine' : {
+    'NAME'  : 'testdb',
+    'USER'  : 'admin',
+    'PASSWORD': 'password',
+    'HOST'  : 'localhost',
+    'PORT'  : 27017,
+    }
+}
+
+from mongoengine import connect
+
+connect(DATABASES['mongoengine'].get('NAME'),
+        username=DATABASES['mongoengine'].get('USER'),
+        password=DATABASES['mongoengine'].get('PASSWORD'),
+        host=DATABASES['mongoengine'].get('HOST'),
+        port=DATABASES['mongoengine'].get('PORT') )
+
+TIME_ZONE = 'Asia/Novosibirsk'
+TIME_INPUT_FORMATS =('%H:%M',)
+DATE_INPUT_FORMATS  =('%H:%M',)
+LANGUAGE_CODE = 'ru'
+SITE_ID = 1
+
+USE_I18N = True
+USE_L10N = True
+USE_TZ = False
+MEDIA_ROOT = ''
+MEDIA_URL = ''
+STATIC_ROOT = ''
+STATIC_URL = '/static/'
+STATICFILES_DIRS = (
+
+)
+
+STATICFILES_FINDERS = (
+    'django.contrib.staticfiles.finders.FileSystemFinder',
+    'django.contrib.staticfiles.finders.AppDirectoriesFinder',
+)
+
+SECRET_KEY = 'j_uptm-k^$wydy!&amp;7inbc4b8e*bmjmnfyl76m^uwn9z)l!vcsa'
+
+TEMPLATE_LOADERS = (
+    'django.template.loaders.app_directories.Loader',
+    'django.template.loaders.filesystem.Loader',
+)
+
+TEMPLATE_CONTEXT_PROCESSORS =(
+                              "django.contrib.auth.context_processors.auth",
+                              "django.core.context_processors.debug",
+                              "django.core.context_processors.i18n",
+                              "django.core.context_processors.media",
+                              "django.core.context_processors.static",
+                              "django.contrib.messages.context_processors.messages"
+                              )
+
+MIDDLEWARE_CLASSES = (
+    'django.middleware.common.CommonMiddleware',
+    'django.contrib.sessions.middleware.SessionMiddleware',
+    'django.middleware.csrf.CsrfViewMiddleware',
+    'django.contrib.auth.middleware.AuthenticationMiddleware',
+    'django.contrib.messages.middleware.MessageMiddleware',
+    'django.middleware.gzip.GZipMiddleware',
+)
+
+
+ROOT_URLCONF = 'urls'
+
+
+
+INSTALLED_APPS = (
+    'django.contrib.auth',
+    'django.contrib.contenttypes',
+    'django.contrib.sessions',
+    'django.contrib.sites',
+    'django.contrib.messages',
+    'django.contrib.staticfiles',
+    'django.contrib.admin',
+    'tests',
+    'mongoengine_rediscache',
+)
+
+LOGGING = {
+    'version': 1,
+    'disable_existing_loggers': False,
+    'filters': {
+        'require_debug_false': {
+            '()': 'django.utils.log.RequireDebugFalse'
+        }
+    },
+    'handlers': {
+        'mail_admins': {
+            'level': 'ERROR',
+            'filters': ['require_debug_false'],
+            'class': 'django.utils.log.AdminEmailHandler'
+        }
+    },
+    'loggers': {
+        'django.request': {
+            'handlers': ['mail_admins'],
+            'level': 'ERROR',
+            'propagate': True,
+        },
+    }
+}
+
+
+CACHE_USE=True
+
+
+MONGOENGINE_REDISCACHE = {
+    'scheme' : {
+                'tests.models.TestModelObj'  : { 'all' : 600 },
+                'tests.models.TestModelList' : { 'all' : 600 },
+                'tests.models.TestModelRef'  : { 'list' : 120, 'reference' : 600, 'get' : 120, 'list_reference' : 600 },
+                },
+    'redis' : {
+        'host': 'localhost',
+        'port': 6379,
+        'db': 1, 
+        'socket_timeout': 3,
+    },
+    'used' : True,
+    'keyhashed' : False,
+}

django_test_application/tests/__init__.py

+# -*- coding: utf-8 -*-
+from settings import MONGOENGINE_REDISCACHE
+from django.core.exceptions import ImproperlyConfigured
+import redis
+#try:
+#    import cPickle as pickle
+#except ImportError:
+import pickle
+    
+from functools import wraps
+import os, time
+from django.utils.hashcompat import md5_constructor
+# redis connecting
+try:
+    redis_conf = MONGOENGINE_REDISCACHE.get('redis')
+except AttributeError:
+    raise ImproperlyConfigured('Check MONGOENGINE_REDISCACHE in settings. ')
+
+try:
+    redis_conn = redis.Redis(**redis_conf)
+except:
+    redis_conn = None
+
+
+
+class _queryset_list(list):
+    def __init__(self, anylist=None):
+        if anylist is None:
+            super(_queryset_list, self).__init__()
+        else:
+            super(_queryset_list, self).__init__(anylist)
+    
+    def count(self):
+        return len(self)
+
+class CacheMiss(Exception):
+    pass
+
+class BaseCache(object):
+    """
+    Simple cache with time-based invalidation
+    """
+    def cached(self, extra=None, timeout=None):
+        """
+        A decorator for caching function calls
+        """
+        def decorator(func):
+            @wraps(func)
+            def wrapper(*args, **kwargs):
+                # Calculating cache key based on func and arguments
+                md5 = md5_constructor()
+                md5.update('%s.%s' % (func.__module__, func.__name__))
+                if extra is not None:
+                    md5.update(str(extra))
+                if args:
+                    md5.update(repr(args))
+                if kwargs:
+                    md5.update(repr(sorted(kwargs.items())))
+
+                cache_key = 'c:%s' % md5.hexdigest()
+
+                try:
+                    result = self.get(cache_key)
+                except CacheMiss:
+                    result = func(*args, **kwargs)
+                    self.set(cache_key, result, timeout)
+
+                return result
+            return wrapper
+        return decorator
+
+class RedisCache(BaseCache):
+    def __init__(self, conn):
+        self.conn = conn
+
+    def get(self, cache_key):
+        if self.conn is None:
+            return None
+        data = self.conn.get(cache_key)
+        if data is None:
+            return None
+        return pickle.loads(data)
+    
+    def pipeline_get(self, cache_key_list ):
+        if isinstance(cache_key_list,list) and len(cache_key_list)>0:
+            pipe = self.conn.pipeline()
+            for key in cache_key_list:
+                pipe.get(key)
+            data=pipe.execute()
+            if data is not None and len(data)>0:
+                res=[]
+                for d in data:
+                    try: obj=pickle.loads(d)
+                    except: obj=None
+                    if obj is not None:
+                        res.append(obj)
+                return res
+        return None
+    
+    def pipeline_delete(self, cache_key_list ):
+        if isinstance(cache_key_list,list) and len(cache_key_list)>0:
+            pipe = self.conn.pipeline()
+            for key in cache_key_list:
+                pipe.delete(key)
+            data=pipe.execute()
+            if data is not None and len(data)>0:
+                return data
+        return None
+    
+    def delete(self, cache_key ):
+        return self.conn.delete(cache_key)
+
+    def set(self, cache_key, data, timeout=None):
+        if self.conn is None:
+            return
+        pickled_data = pickle.dumps(data)
+        if timeout is not None:
+            self.conn.setex(cache_key, pickled_data, timeout)
+        else:
+            self.conn.set(cache_key, pickled_data)
+
+cache = RedisCache(redis_conn)
+_internal_cache = RedisCache(redis_conn)
+
+def install_signals(app):
+    from mongoengine import signals
+    for model_name in MONGOENGINE_REDISCACHE.get('scheme'):
+        try:
+            exec('from %s.models import %s' % (app,model_name))             
+            exec("signals.post_save.connect({0}.post_save, sender={0})".format(model_name) )
+            exec("signals.post_delete.connect({0}.post_delete, sender={0})".format(model_name) )
+        except:
+            pass

django_test_application/tests/main.py

+'''
+Created on 25.01.2012
+
+@author: unax
+'''
+
+from random import Random
+import time
+
+def run_test_get( n=1000, use_cache=True ):
+    if use_cache:
+        from models import TestModelObj
+    else:
+        from nocache_tests.models import TestModelObj
+    
+    all_pk_mobj= [ str(m.pk) for m in TestModelObj.objects() ]
+    rand=Random()
+    sum=0
+    start_time=time.time()
+    for i in range(n):
+        new_obj=TestModelObj.objects.get(pk= rand.choice(all_pk_mobj) )
+        sum+=new_obj.num
+    end_time=time.time()-start_time
+    
+    return "<p>Count operations: %d</p><p>Time operations: %s</p>" % (n, str(end_time))
+    
+
+def run_test_list( n=100, use_cache=True ):
+    if use_cache:
+        from models import TestModelObj
+    else:
+        from nocache_tests.models import TestModelObj
+    obj_count=0
+    from datetime import datetime
+    dt=datetime(1986,11,22,7,15)
+    rand=Random()
+    rand_hexname=lambda rand: "".join([ rand.choice('abcdef') for i in range(2) ])
+    start_time=time.time()
+    m=0
+    for i in range(n):
+        r=rand.randint(1, 15)
+        if rand.randint(1, 10)>5:
+            new_list=TestModelObj.objects.filter(create_date__gt=dt)
+        else:
+            new_list=TestModelObj.objects()
+        if r>12:
+            new_list.filter( num__lt = 500000 ).filter( text__contains = rand_hexname(rand) ).order_by("num")
+        elif r>9:
+            new_list.filter( text__contains = rand_hexname(rand) ).order_by("num")
+        elif r>6:
+            new_list.filter( name__contains = rand_hexname(rand) ).order_by("-name")
+        elif r>3:
+            new_list.filter( num__lt = 500000 ).order_by("-name")
+        else:
+            new_list.filter( num__gt = 500000 ).order_by("-name")
+        
+        if use_cache:
+            new_list=new_list.cache
+        
+        if new_list.count()>0:
+            m+=1
+            sum=0
+            for obj in new_list:
+                sum+=obj.num
+                obj_count+=1
+            
+    end_time=time.time()-start_time
+    
+    return "<p>Count operations: %d</p><p>Object count: %d</p><p>Time operations: %s</p><p>Average list length: %d</p>" % (n, obj_count, str(end_time), int(obj_count/m))
+
+def run_test_get_reference( n=1000, use_cache=True ):
+    if use_cache:
+        from models import TestModelRef
+    else:
+        from nocache_tests.models import TestModelRef
+    all_pk_mobj= [ str(m.pk) for m in TestModelRef.objects() ]
+    rand=Random()
+    sum=0
+    start_time=time.time()
+    for i in range(n):
+        new_obj=TestModelRef.objects.get(pk=rand.choice(all_pk_mobj) )
+        ref_obj=new_obj.model
+        sum+=ref_obj.num
+    end_time=time.time()-start_time
+    
+    return "<p>Objects count: %d</p><p>Time operations: %s</p>" % (n*2, str(end_time))
+
+def run_test_list_reference( n=1000, use_cache=True ):
+    if use_cache:
+        from models import TestModelList
+    else:
+        from nocache_tests.models import TestModelList
+    all_pk_mobj= [ str(m.pk) for m in TestModelList.objects() ]
+    rand=Random()
+    sum=0
+    obj_count=0
+    m=0
+    start_time=time.time()
+    for i in range(n):
+        new_obj=TestModelList.objects.get(pk=rand.choice(all_pk_mobj) )
+        sum=0
+        for obj in new_obj.models:
+            sum+=obj.num
+            m+=1
+        if sum>0:
+            obj_count+=1
+    end_time=time.time()-start_time
+    
+    return "<p>Count objects in reference list: %d</p><p>Time operations: %s</p><p>Average list length: %d</p>" % (m, str(end_time), int(m/obj_count))

django_test_application/tests/models.py

+'''
+Created on 24.01.2012
+
+@author: unax
+'''
+
+from datetime import datetime
+from mongoengine import *
+from mongoengine_rediscache.fields import ReferenceFieldCached, ListFieldCached
+from mongoengine_rediscache.queryset import CachedQuerySet
+
+class TestModelObj(Document):
+    num  =  IntField(default=0)
+    name =  StringField(max_length=255, required=False )
+    text =  StringField(max_length=255, required=False )
+    create_date = DateTimeField(default=datetime.now())
+    
+    meta = { 'queryset_class': CachedQuerySet, 'cascade' : False }
+
+class TestModelList(Document):
+    num  =  IntField(default=0)
+    name =  StringField(max_length=255, required=False )
+    models = ListFieldCached( ReferenceField(TestModelObj) )
+    
+    meta = { 'queryset_class': CachedQuerySet, 'cascade' : False }
+    
+class TestModelRef(Document):
+    num  =  IntField(default=0)
+    name =  StringField(max_length=255, required=False )
+    model = ReferenceFieldCached(TestModelObj)
+    
+    meta = { 'queryset_class': CachedQuerySet, 'cascade' : False }
+    

django_test_application/tests/views.py

+'''
+Created on 24.01.2012
+
+@author: unax
+'''
+from django.http import HttpResponse
+from models import TestModelList, TestModelObj, TestModelRef
+from random import Random
+from main import run_test_get, run_test_list, run_test_get_reference, run_test_list_reference
+
+def main_page(request):
+    return HttpResponse("""<h2>Tests for mongoengine_rediscache</h2>
+    <ul>
+    <li><a href="/create/2000">Create test collections (4000 objects of)</a></li>
+    <p><b>Run tests. For pure experimentation, run each test more than once.</b></p>
+    <li><a href="/test_get">Getting objects (TestModelObj.objects.get)</a></li>
+    <li><a href="/test_list">Getting objects list (test for CachedQuerySet with any variants of filter(..) and order_by()</a></li>
+    <li><a href="/test_get_reference">Getting objects from reference field (ReferenceFieldCached(Document))</a></li>
+    <li><a href="/test_list_reference">Getting list of reference objects (ListFieldCached( ReferenceField(Document) ))</a></li>
+    </ul>""")
+
+def test_get(request):
+    html='<p>Getting object</p>'
+    
+    html+='<ul><b>Test with used cache:</b> %s</ul>' % run_test_get(15000,True)
+    html+='<ul><b>Test without cached:</b> %s</ul>'  % run_test_get(15000,False)
+    
+    return HttpResponse(html)
+
+def test_list(request):
+    html='<p>Select list</p>'
+
+    html+='<ul><b>Test with used cache:</b> %s</ul>' % run_test_list(300,True)
+    html+='<ul><b>Test without cached:</b> %s</ul>'  % run_test_list(300,False)
+    
+    return HttpResponse(html)
+
+
+def test_get_reference(request):
+    html='<p>Get reference object</p>'
+
+    html+='<ul><b>Test with used cache:</b> %s</ul>' % run_test_get_reference(15000,True)
+    html+='<ul><b>Test without cached:</b> %s</ul>'  % run_test_get_reference(15000,False)
+    
+    return HttpResponse(html)
+
+def test_list_reference(request):
+    html='<p>Get reference list</p>'
+
+    html+='<ul><b>Test with used cache:</b> %s</ul>' % run_test_list_reference(10000,True)
+    html+='<ul><b>Test without cached:</b> %s</ul>'  % run_test_list_reference(10000,False)
+    
+    return HttpResponse(html)
+
+def create_models(request, count_models):
+    count_models=int(count_models)
+    
+    rand=Random()
+    
+    rand_hextext=lambda rand: "".join([ rand.choice('0123456789abcdef') for i in range(rand.randint(8,16)) ])
+    rand_hexname=lambda rand: "".join([ rand.choice('_abcdef') for i in range(rand.randint(2,4)) ])
+    
+    nums=[]
+    all=0
+    for i in range(count_models):
+        all+=1
+        n=rand.randint(0,1000000)
+        nums.append(n)
+        TestModelObj(num=n,
+                    name=rand_hexname(rand),
+                    text=rand_hextext(rand)).save()
+
+    for i in range(count_models/2):
+        all+=1
+        TestModelRef(num=rand.randint(0,1000000),
+                     model=TestModelObj.objects(num=rand.choice(nums))[0],
+                     name=rand_hexname(rand)).save()
+    
+    for i in range(count_models/2):
+        all+=1
+        TestModelList(num=rand.randint(0,1000000),
+                     models=TestModelObj.objects.filter(num__in= [ rand.choice(nums) for i in range( rand.randint(1,8) ) ]),
+                     name=rand_hexname(rand)).save()
+    return HttpResponse('Created %d models' % all)

django_test_application/urls.py

+from django.conf.urls import patterns, include, url
+from django.contrib import admin
+admin.autodiscover()
+
+urlpatterns = patterns('',
+    url( r'^create/(?P<count_models>[0-9]{1,9})[/]{0,1}','tests.views.create_models', ),
+    url( r'^test_get[/]{0,1}$', 'tests.views.test_get' ),
+    url( r'^test_list[/]{0,1}$', 'tests.views.test_list' ),
+    url( r'^test_get_reference[/]{0,1}$', 'tests.views.test_get_reference' ),
+    url( r'^test_list_reference[/]{0,1}$', 'tests.views.test_list_reference' ),
+    url( r'^admin[/]{0,1}', include(admin.site.urls)),
+    url( r'^[/]{0,1}', 'tests.views.main_page' ),
+)

django_test_application/webapp_uwsgi.py

+# -*- coding: utf-8 -*-
+import  os,sys
+sys.path.append('/usr/local/lib/python2.7/dist-packages/django/')
+sys.path.append('/data/web/cache_test/cachetest')
+os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
+import django.core.handlers.wsgi
+application = django.core.handlers.wsgi.WSGIHandler()

mongoengine_rediscache/__init__.py

+
+def install_signals():
+    from invalidation import CacheInvalidator
+    from config import LazySettings
+    from mongoengine import signals
+    
+    settings = LazySettings()
+    
+    if settings.content:
+        for model_location in settings.scheme:
+            location = model_location.split('.')
+            model = None
+            try:
+                if len(location) == 2:
+                    exec('from %s import %s as model' % (location[0], location[1]))
+                else: # must be 3
+                    exec('from %s.%s import %s as model' % (location[0], location[1], location[2]))
+            except:
+                raise Exception("Can't import document %s from MONGOENGINE_REDISCACHE" % model_location)
+    
+            signals.post_save.connect(CacheInvalidator.post_save, sender=model)
+            signals.post_delete.connect(CacheInvalidator.post_delete, sender=model)
+
+# uncomment for use as Django applications
+#install_signals()

mongoengine_rediscache/base_cache.py

+# -*- coding: utf-8 -*-
+from config import LazySettings
+import cPickle as pickle
+from functools import wraps
+from hashlib import md5 as md5_constructor
+import redis
+
+DEFAULT_TIMEOUT = 600
+
+class BaseCache(object):
+    def cached(self, extra=None, timeout=None):
+        def decorator(func):
+            @wraps(func)
+            def wrapper(*args, **kwargs):
+                md5 = md5_constructor()
+                md5.update('%s.%s' % (func.__module__, func.__name__))
+                if extra is not None:
+                    md5.update(str(extra))
+                if args:
+                    md5.update(repr(args))
+                if kwargs:
+                    md5.update(repr(sorted(kwargs.items())))
+
+                cache_key = 'c:%s' % md5.hexdigest()
+
+                try:
+                    result = self.get(cache_key)
+                except:
+                    result = func(*args, **kwargs)
+                    self.set(cache_key, result, timeout)
+                return result
+            return wrapper
+        return decorator
+
+class RedisCache(BaseCache):
+    def __init__(self, conn):
+        self.conn = conn
+
+    def get(self, cache_key):
+        data = self.conn.get(cache_key)
+        if data is None:
+            return None
+        return pickle.loads(data)
+    
+    def pipeline_get(self, cache_key_list):
+        if isinstance(cache_key_list, list) and len(cache_key_list) > 0:
+            pipe = self.conn.pipeline()
+            for key in cache_key_list:
+                pipe.get(key)
+            data = pipe.execute()
+            if data is not None and len(data) > 0:
+                res = []
+                for d in data:
+                    try: obj = pickle.loads(d)
+                    except: obj = None
+                    if obj is not None:
+                        res.append(obj)
+                return res
+        return None
+    
+    def pipeline_delete(self, cache_key_list):
+        if isinstance(cache_key_list, list) and len(cache_key_list) > 0:
+            pipe = self.conn.pipeline()
+            for key in cache_key_list:
+                pipe.delete(key)
+            data = pipe.execute()
+            if data is not None and len(data) > 0:
+                return data
+        return None
+    
+    def delete(self, cache_key):
+        return self.conn.delete(cache_key)
+
+    def set(self, cache_key, data, timeout=DEFAULT_TIMEOUT):
+        if self.conn is None:
+            return
+        pickled_data = pickle.dumps(data)
+        if timeout is not None:
+            self.conn.setex(cache_key, pickled_data, timeout)
+        else:
+            self.conn.set(cache_key, pickled_data)
+    
+    def flushall(self):
+        if self.conn is None:
+            return False
+        try:    self.conn.flushdb()
+        except: return False
+        return True
+    
+    def append_to_list(self, list_cache_key, data):
+        self.conn.rpush(list_cache_key, data)
+    
+    def get_all_list(self, list_cache_key):
+        return  self.conn.lrange(list_cache_key, 0, -1)
+
+class LazyCache(RedisCache):
+    __this = None
+    
+    def __new__(cls):
+        if cls.__this is None:
+            cls.__this = super(LazyCache, cls).__new__(cls)
+        return cls.__this
+
+    def __init__(self):
+        pass
+
+    def iazy_init(self):
+        try:
+            redis_conf = LazySettings().content.get('redis')
+        except AttributeError:
+            raise Exception('Check MONGOENGINE_REDISCACHE in settings. ')
+        try:
+            redis_conn = redis.Redis(**redis_conf)
+        except:
+            redis_conn = None
+        self.__class__.__this = RedisCache(redis_conn)
+
+    def get(self, cache_key):
+        self.iazy_init()
+        return self.__this.get(cache_key)
+    
+    def pipeline_get(self, cache_key_list):
+        self.iazy_init()
+        return self.__this.pipeline_get(cache_key_list)
+    
+    def pipeline_delete(self, cache_key_list):
+        self.iazy_init()
+        return self.__this.pipeline_delete(cache_key_list)
+    
+    def delete(self, cache_key):
+        self.iazy_init()
+        return self.__this.delete(cache_key)
+    
+    def append_to_list(self, list_cache_key, data):
+        self.iazy_init()
+        self.__this.append_to_list(list_cache_key, data)
+    
+    def set(self, cache_key, data, timeout=DEFAULT_TIMEOUT):
+        self.iazy_init()
+        return self.__this.set(cache_key, data, timeout=timeout)
+    
+    def flushall(self):
+        self.iazy_init()
+        return self.__this.flushall(self)
+    
+    def get_all_list(self, list_cache_key):
+        self.iazy_init()
+        return self.__this.get_all_list(list_cache_key)
+
+_internal_cache = LazyCache()

mongoengine_rediscache/config.py

+'''
+Created on 19.06.2012
+
+@author: unax
+'''
+
+class ClassProperty(object):
+    def __init__(self, getter, setter):
+        self.getter = getter
+        self.setter = setter
+
+    def __get__(self, cls, owner):
+        return getattr(cls, self.getter)()
+
+    def __set__(self, cls, value):
+        getattr(cls, self.setter)(value)
+
+class MetaSettings(type):
+    options = ClassProperty('get_options', 'set_options')
+
+class LazySettings(object):
+    __metaclass__ = MetaSettings
+    __this = None
+    __settings = None
+    __scheme = None
+    __simple_scheme = None
+    __keyhashed = None
+
+    def __new__(cls):
+        if cls.__this is None:
+            cls.__this = super(LazySettings, cls).__new__(cls)
+        return cls.__this
+    
+    def create(self, **options):
+        conf = None
+        if len(options) > 1 and 'redis' in options:
+            conf = options
+        else:
+            try:
+                from django.conf import settings
+                conf = getattr(settings, 'MONGOENGINE_REDISCACHE', None)
+            except:
+                return False
+
+        if conf:
+            self.__class__.__settings  = conf
+            self.__class__.__scheme    = conf.get('scheme')
+            self.__class__.__keyhashed = conf.get('keyhashed')
+            simple_scheme = {}
+            for model_location in conf.get('scheme'):
+                simple_scheme[model_location.split('.')[-1]] = conf['scheme'][model_location]
+            self.__class__.__simple_scheme = simple_scheme
+            return True
+        else:
+            return False
+
+    @classmethod
+    def get_options(cls):
+        return cls().content
+
+    @classmethod
+    def set_options(cls, option_dict):
+        if isinstance(option_dict, dict):
+            cls().create(**option_dict)
+
+    @property
+    def content(self):
+        if self.__settings is None and not self.create():
+            raise Exception('Mongoengine rediscache error! No settings.')
+        return self.__settings
+
+    @property
+    def scheme(self):
+        return self.__scheme
+
+    @property
+    def keyhashed(self):
+        return self.__keyhashed
+
+    @property
+    def simple_scheme(self):
+        return  self.__simple_scheme
+    
+    @classmethod
+    def timelimit(cls, model_name, operation):
+        scheme = cls().simple_scheme.get(model_name)
+        if scheme:
+            timeout = scheme.get(operation)
+            if not isinstance(timeout, int):
+                timeout = scheme.get('all')
+            return timeout

mongoengine_rediscache/fields.py

+'''
+Created on 12.01.2012
+
+@author: unax
+'''
+from mongoengine.fields import ReferenceField, ListField
+from helper import _queryset_list
+from config import LazySettings
+from base_cache import _internal_cache as cache
+from bson.dbref import DBRef
+# for old version: from pymongo.dbref import DBRef
+
+class ListFieldCached(ListField):
+    def __get__(self, instance, owner):
+        if instance is None:
+            return self
+        timeout = LazySettings.timelimit(instance.__class__.__name__, 'list_reference')
+        changed = False
+        if instance.pk is None:
+            changed = True # this is new model
+        else:
+            try:    changed = self.name in instance._changed_fields # this model changed
+            except: pass
+        if (not isinstance(timeout, int)) or changed:
+            return super(ListFieldCached, self).__get__(instance, owner)
+        
+        DBRef_list = instance._data.get(self.name)
+        if isinstance(DBRef_list, _queryset_list):
+            return DBRef_list
+        
+        if DBRef_list and len(DBRef_list) > 0:
+            keys = []
+            list_reference = True
+            for dbref_obj in DBRef_list:
+                if not isinstance(dbref_obj, DBRef):
+                    list_reference = False
+                    break                    
+                keys.append('%s:get:pk=%s' % (dbref_obj.collection , dbref_obj.id))
+            if list_reference:
+                models = cache.pipeline_get(keys)
+                del keys
+                if models is None or len(models) != len(DBRef_list) or changed:
+                    models = super(ListFieldCached, self).__get__(instance, owner)
+                    if models and len(models) > 0:
+                        instance._data[self.name] = _queryset_list()
+                        for obj in models:
+                            if not isinstance(obj, DBRef):
+                                cache.set('%s:get:pk=%s' % (obj._get_collection_name(), obj.pk), obj, timeout)
+                            instance._data[self.name].append(obj)
+                return models
+        return super(ListFieldCached, self).__get__(instance, owner)
+
+class ReferenceFieldCached(ReferenceField):
+    def __get__(self, instance, owner):
+        if instance is None:
+            return self
+        value = instance._data.get(self.name)
+        if isinstance(value, (DBRef)):
+            timeout = LazySettings.timelimit(instance.__class__.__name__ , 'reference')
+            if isinstance(timeout, int):
+                collection = value.collection
+                cache_key = '%s:get:pk=%s' % (collection , value.id)
+                obj = cache.get(cache_key)
+                if obj is None:
+                    obj = super(ReferenceFieldCached, self).__get__(instance, owner)
+                    cache.set(cache_key, obj, timeout)
+                if obj is not None:
+                    instance._data[self.name] = obj
+                return obj
+        return super(ReferenceFieldCached, self).__get__(instance, owner)

mongoengine_rediscache/helper.py

+'''
+Created on 19.06.2012
+
+@author: unax
+'''
+
+class _queryset_list(list):
+    def __init__(self, anylist=None):
+        if anylist is None:
+            super(_queryset_list, self).__init__()
+        else:
+            super(_queryset_list, self).__init__(anylist)
+    
+    def count(self):
+        return len(self)

mongoengine_rediscache/invalidation.py

+'''
+Created on 13.01.2012
+
+@author: unax
+'''
+
+from journal import records
+from base_cache import _internal_cache as cache
+
+def model_change(pk, collection):
+    cache.pipeline_delete(records('list', collection))
+    cache.pipeline_delete(records('count', collection))
+    cache.pipeline_delete(records('get', collection, 'pk=%s' % str(pk)))
+    cache.delete("%s:get:journal:pk=%s" % (collection, str(pk)))
+    cache.delete("%s:list:journal:" % collection)
+    cache.delete("%s:count:journal:" % collection)
+
+class CacheInvalidator:
+    @classmethod
+    def post_save(cls, sender, document, **kwargs):
+        model_change(document.pk, document._get_collection_name())
+                
+    @classmethod
+    def post_delete(cls, sender, document, **kwargs):
+        model_change(document.pk, document._get_collection_name())

mongoengine_rediscache/journal.py

+# -*- coding: utf-8 -*-
+'''
+Created on 12.01.2012
+
+@author: unax
+'''
+from base_cache import _internal_cache as cache
+
+def add_find_record(cache_key, collection, timeout):
+    try:     cache.append_to_list("%s:list:journal:" % collection, cache_key)
+    except:  return
+
+def add_count_record(cache_key, collection, timeout):
+    try:     cache.append_to_list("%s:count:journal:" % collection, cache_key)
+    except:  return
+
+def add_get_record(pk, cache_key, collection, timeout):
+    try:     cache.append_to_list("%s:get:journal:pk=%s" % (collection, str(pk)), cache_key)
+    except:  return
+
+def records(query_type, collection, clarify=''):
+    try:     record_list = cache.get_all_list("%s:%s:journal:%s" % (collection, query_type, clarify))
+    except:  record_list = []
+    if query_type == 'get' and isinstance(record_list, list) and clarify != '':
+        record_list.append('%s:get:%s' % (collection, clarify))
+    return record_list

mongoengine_rediscache/misc.py

+# -*- coding: utf-8 -*-
+'''
+Created on 11.01.2012
+
+@author: unax
+'''
+from mongoengine import Document
+from mongoengine.queryset import QuerySet
+from datetime import datetime
+import hashlib
+from config import LazySettings
+from bson.dbref import DBRef
+# for old version: from pymongo.dbref import DBRef
+
+from re import _pattern_type
+
+class CacheNameMixer(object):
+    __line = None
+
+    def __init__(self, query_dict=None):
+        self.__line = self.__parse(query_dict)
+
+    def __str__(self):
+        if LazySettings().keyhashed:
+            return self.hash
+        return self.__line
+
+    def __unicode__(self):
+        return unicode(self.__line)
+
+    @property
+    def hash(self):
+        md5 = hashlib.md5()
+        md5.update(self.__line)
+        return md5.hexdigest()
+
+    @property
+    def line(self):
+        return str(self)
+
+    @property
+    def exist(self):
+        return self.__line is not None and len(self.__line) > 0
+
+    def __create_str(self, query_obj):
+        if isinstance(query_obj, (unicode, str)):
+            return unicode(query_obj)
+        elif isinstance(query_obj, (int, float)):
+            return str(query_obj)
+        elif isinstance(query_obj, datetime):
+            return query_obj.strftime("%Y-%m-%d %H:%M:%S")
+        elif isinstance(query_obj, Document):
+            return str(query_obj.pk)
+        elif isinstance(query_obj, _pattern_type):
+            return "regex(%s)" % query_obj.pattern
+        elif isinstance(query_obj, dict):
+            return self.__parse(query_obj)
+        elif isinstance(query_obj, DBRef):
+            return "%s.id=%s" % (query_obj.collection, query_obj.id)
+        elif isinstance(query_obj, tuple):
+            return "(%s)" % (",".join([ self.__create_str(obj) for obj in query_obj ]))          
+        elif isinstance(query_obj, list) or isinstance(query_obj, QuerySet):
+            return "[%s]" % (",".join([ self.__create_str(obj) for obj in query_obj ]))
+        else:
+            try:
+                return str(query_obj)
+            except:
+                pass
+        return 'unknown_type'
+
+    def __parse(self, query_dict): # query_dict is dict, list or tuple
+        if isinstance(query_dict, dict) and query_dict.keys() > 0:
+            query_line = []
+            for key in query_dict:                
+                query_line.append(u'%s=%s' % (key, self.__create_str(query_dict.get(key))))
+            return (u"|".join(query_line)).encode('utf8')
+        elif isinstance(query_dict, tuple) or isinstance(query_dict, list):
+            return (u"(%s)" % (u",".join([ self.__create_str(key) for key in query_dict ]))).encode('utf8')
+        return None
+
+    def append(self, query_dict):
+        new_line = self.__parse(query_dict).replace(' ', '')
+        if self.__line is not None and new_line is not None:
+            self.__line += '|%s' % new_line
+        elif new_line is not None:
+            self.__line = new_line

mongoengine_rediscache/queryset.py

+# -*- coding: utf-8 -*-
+'''
+Created on 11.01.2012
+@author: unax
+'''
+from mongoengine.queryset import QuerySet
+from misc import CacheNameMixer
+from helper import _queryset_list
+from config import LazySettings
+from base_cache import _internal_cache as cache
+import journal
+
+#================ for mongoengine ====================
+
+class CachedQuerySet(QuerySet):
+    @property
+    def core_cache_name(self):
+        name = CacheNameMixer(self._query)
+        if not name.exist:
+            name.append(('all',))
+        if self._skip:
+            name.append({ 'skip'  : self._skip })
+        if self._limit:
+            name.append({ 'limit' : self._limit })
+        if self._ordering:
+            name.append(self._ordering)
+        return name.line
+ 
+    def count(self):
+        timeout = LazySettings.timelimit(self._document.__name__, 'count')
+        if isinstance(timeout, int):
+            cache_key = "%s:count:%s" % (self._document._get_collection_name(), self.core_cache_name)
+            n = cache.get(cache_key)
+            if n is None:
+                if self._limit == 0:
+                    return 0
+                n = self._cursor.count(with_limit_and_skip=True)
+                cache.set(cache_key, n, timeout)
+                # add in journal
+                journal.add_count_record(cache_key, self._document._get_collection_name() , timeout)
+            del cache_key
+            return n
+        return super(CachedQuerySet, self).count()
+    
+    def get(self, *q_objs, **query):
+        timeout = LazySettings.timelimit(self._document.__name__, 'get')
+        document = None
+        if isinstance(timeout, int):
+            core_cache_name = str(CacheNameMixer(query))
+            cache_key = "%s:get:%s" % (self._document._get_collection_name() , core_cache_name)
+            document = cache.get(cache_key)
+            if document is None:
+                self.__call__(*q_objs, **query)
+                count = super(CachedQuerySet, self).count()
+                if count == 1:
+