Commits

Anonymous committed 3ab7c3f

Version 2! It better now, see README

  • Participants
  • Parent commits 9562e3a

Comments (0)

Files changed (16)

File django_test_application/mongoengine_rediscache/__init__.py

+VERSION = (2,0,1)
 
 def install_signals():
     from invalidation import CacheInvalidator

File django_test_application/mongoengine_rediscache/base_cache.py

         if data is None:
             return None
         return pickle.loads(data)
-    
+
     def pipeline_get(self, cache_key_list):
-        if isinstance(cache_key_list, list) and len(cache_key_list) > 0:
+        if cache_key_list:
             pipe = self.conn.pipeline()
             for key in cache_key_list:
                 pipe.get(key)
             data = pipe.execute()
-            if data is not None and len(data) > 0:
-                res = []
-                for d in data:
-                    try: obj = pickle.loads(d)
-                    except: obj = None
-                    if obj is not None:
-                        res.append(obj)
-                return res
+            if data:
+                return [ pickle.loads(d) for d in data if d ]
         return None
-    
+
     def pipeline_delete(self, cache_key_list):
         if isinstance(cache_key_list, list) and len(cache_key_list) > 0:
             pipe = self.conn.pipeline()
             for key in cache_key_list:
                 pipe.delete(key)
             data = pipe.execute()
-            if data is not None and len(data) > 0:
+            if data:
                 return data
         return None
-    
+
     def delete(self, cache_key):
         return self.conn.delete(cache_key)
 
             self.conn.setex(cache_key, pickled_data, timeout)
         else:
             self.conn.set(cache_key, pickled_data)
-    
+            
+    def set_int(self, cache_key, data, timeout=DEFAULT_TIMEOUT):
+        if not isinstance(data, int):
+            return
+        self.conn.setex(cache_key, data, timeout)
+
+    def get_int(self, cache_key):
+        try:    return int(self.conn.get(cache_key))
+        except: return None
+
+    def incr(self, name, amount=1):
+        self.conn.incr(name, amount)
+
     def flushall(self):
         if self.conn is None:
             return False
         try:    self.conn.flushdb()
         except: return False
         return True
-    
+
     def append_to_list(self, list_cache_key, data):
         self.conn.rpush(list_cache_key, data)
-    
+
     def get_all_list(self, list_cache_key):
         return  self.conn.lrange(list_cache_key, 0, -1)
 
                 cls.set             = cls.__cahe.set
                 cls.flushall        = cls.__cahe.flushall
                 cls.get_all_list    = cls.__cahe.get_all_list
+                cls.incr            = cls.__cahe.incr
+                cls.set_int         = cls.__cahe.set_int
+                cls.get_int         = cls.__cahe.get_int 
 
         if cls.__this is None:
             cls.__this = super(LazyCache, cls).__new__(cls)
     def set(self, *args, **kwargs):
         LazyCache()
 
+    def set_int(self, *args, **kwargs):
+        LazyCache()
+
+    def get_int(self, *args, **kwargs):
+        LazyCache()
+
     def flushall(self, *args, **kwargs):
         LazyCache()
 
     def get_all_list(self, *args, **kwargs):
         LazyCache()
+    
+    def incr(self, *args, **kwargs):
+        LazyCache()
 
 _internal_cache = LazyCache()

File django_test_application/mongoengine_rediscache/config.py

 @author: unax
 '''
 
+ABSOLUTE_VERSION_LIMIT = 4294967294 # is possible theoretically
+
+posible_options = ('list',
+                   'reference',
+                   'get',
+                   'list_reference',
+                   'count')
+
 class ClassProperty(object):
     def __init__(self, getter, setter):
         self.getter = getter
     __this = None
     __settings = None
     __scheme = None
-    __simple_scheme = None
     __keyhashed = None
 
     def __new__(cls):
                 return False
 
         if conf:
+            scheme = conf.get('scheme')
             self.__class__.__settings  = conf
-            self.__class__.__scheme    = conf.get('scheme')
+            for model in scheme:
+                for_all = scheme[model].get('all')
+                if isinstance(for_all, int):
+                    for key in posible_options:
+                        scheme[model][key] = for_all
             self.__class__.__keyhashed = conf.get('keyhashed')
-            simple_scheme = {}
-            for model_location in conf.get('scheme'):
-                simple_scheme[model_location.split('.')[-1]] = conf['scheme'][model_location]
-            self.__class__.__simple_scheme = simple_scheme
+            if conf.get('used'):
+                self.__class__.__scheme = scheme
+            else:
+                self.__class__.__scheme = {}
             return True
         else:
             return False
     def keyhashed(self):
         return self.__keyhashed
 
-    @property
-    def simple_scheme(self):
-        return  self.__simple_scheme
-    
     @classmethod
     def timelimit(cls, model_name, operation):
         scheme = cls().simple_scheme.get(model_name)

File django_test_application/mongoengine_rediscache/fields.py

 
 @author: unax
 '''
-from mongoengine.fields import ReferenceField, ListField
+from mongoengine import (
+    ReferenceField,
+    ListField,
+    Document)
 from helper import _queryset_list
-from config import LazySettings
 from base_cache import _internal_cache as cache
 from bson.dbref import DBRef
-# for old version: from pymongo.dbref import DBRef
+from config import LazySettings
+from misc import CacheNameMixer
+
+def _get_timeout(instance, operaton):
+    scheme = LazySettings().scheme.get('%s.%s' % (instance.__module__, instance.__class__.__name__))
+    if scheme and operaton in scheme:
+        return int(scheme.get(operaton))
+    return None
 
 class ListFieldCached(ListField):
     def __get__(self, instance, owner):
         if instance is None:
             return self
-        timeout = LazySettings.timelimit(instance.__class__.__name__, 'list_reference')
-        changed = False
-        if instance.pk is None:
-            changed = True # this is new model
-        else:
-            try:    changed = self.name in instance._changed_fields # this model changed
-            except: pass
-        if (not isinstance(timeout, int)) or changed:
-            return super(ListFieldCached, self).__get__(instance, owner)
-        
-        DBRef_list = instance._data.get(self.name)
-        if isinstance(DBRef_list, _queryset_list):
-            return DBRef_list
-        
-        if DBRef_list and len(DBRef_list) > 0:
-            keys = []
-            list_reference = True
-            for dbref_obj in DBRef_list:
-                if not isinstance(dbref_obj, DBRef):
-                    list_reference = False
-                    break                    
-                keys.append('%s:get:pk=%s' % (dbref_obj.collection , dbref_obj.id))
-            if list_reference:
+        timeout = _get_timeout(instance, 'list_reference')
+        if timeout:
+            changed = False
+            if instance.pk is None:
+                changed = True # this is new model
+            else:
+                try:    changed = self.name in instance._changed_fields # this model changed
+                except: pass
+    
+            if (not isinstance(timeout, int)) or changed:
+                return super(ListFieldCached, self).__get__(instance, owner)
+            
+            DBRef_list = instance._data.get(self.name)
+            if isinstance(DBRef_list, _queryset_list):
+                return DBRef_list
+            if DBRef_list and len(DBRef_list) > 0:
+                keys = []
+    
+                for dbref_obj in DBRef_list:
+                    if isinstance(dbref_obj, DBRef):
+                        keys.append('%s:get:%s' % (dbref_obj.collection,
+                                                   CacheNameMixer({ 'pk' : str(dbref_obj.id) }) ) )
+                    else:
+                        keys.append('%s:get:%s' % (self.document_type._get_collection_name(),
+                                                   CacheNameMixer({ 'pk' : str(dbref_obj)}) ) )
+    
                 models = cache.pipeline_get(keys)
                 del keys
                 if models is None or len(models) != len(DBRef_list) or changed:
                     models = super(ListFieldCached, self).__get__(instance, owner)
+    
                     if models and len(models) > 0:
                         instance._data[self.name] = _queryset_list()
                         for obj in models:
-                            if not isinstance(obj, DBRef):
-                                cache.set('%s:get:pk=%s' % (obj._get_collection_name(), obj.pk), obj, timeout)
-                            instance._data[self.name].append(obj)
+                            if isinstance(obj, Document):
+                                cache.set('%s:get:%s' % (obj._get_collection_name(),
+                                                         CacheNameMixer({ 'pk' : str(obj.pk) }) ),
+                                          obj, timeout)
+                                instance._data[self.name].append(obj)
                 return models
         return super(ListFieldCached, self).__get__(instance, owner)
 
     def __get__(self, instance, owner):
         if instance is None:
             return self
-        value = instance._data.get(self.name)
-        if isinstance(value, (DBRef)):
-            timeout = LazySettings.timelimit(instance.__class__.__name__ , 'reference')
-            if isinstance(timeout, int):
-                collection = value.collection
-                cache_key = '%s:get:pk=%s' % (collection , value.id)
-                obj = cache.get(cache_key)
-                if obj is None:
-                    obj = super(ReferenceFieldCached, self).__get__(instance, owner)
-                    cache.set(cache_key, obj, timeout)
-                if obj is not None:
+        timeout = _get_timeout(instance, 'reference')
+        if timeout:    
+            value = instance._data.get(self.name)
+            if not isinstance(value, Document): # for mongoengine dbref=False options
+                if isinstance(timeout, int):
+                    core = None
+                    if isinstance(value, DBRef):
+                        core = (value.collection,
+                                CacheNameMixer({ 'pk' : str(value.id) }) )
+                    else:
+                        core = (self.document_type._get_collection_name(),
+                                CacheNameMixer({ 'pk' : str(value) }) )
+
+                    cache_key = '%s:get:%s' % core
+                    obj = cache.get(cache_key)
+
+                    if obj is None:
+                        obj = super(ReferenceFieldCached, self).__get__(instance, owner)
+                        if obj:
+                            cache.set(cache_key, obj, timeout)
                     instance._data[self.name] = obj
-                return obj
+
         return super(ReferenceFieldCached, self).__get__(instance, owner)

File django_test_application/mongoengine_rediscache/helper.py

             super(_queryset_list, self).__init__()
         else:
             super(_queryset_list, self).__init__(anylist)
-    
+
     def count(self):
         return len(self)
+
+class SecondaryKey(object):
+    key = None
+    pk = None
+    def __init__(self, key, pk):
+        self.key = key
+        self.pk = pk

File django_test_application/mongoengine_rediscache/invalidation.py

 
 @author: unax
 '''
+SERVICE_TIME = 60
+from base_cache import _internal_cache as cache
+from misc import CacheNameMixer
 
-from journal import records
-from base_cache import _internal_cache as cache
-
-def model_change(pk, collection):
-    cache.pipeline_delete(records('list', collection))
-    cache.pipeline_delete(records('count', collection))
-    cache.pipeline_delete(records('get', collection, 'pk=%s' % str(pk)))
-    cache.delete("%s:get:journal:pk=%s" % (collection, str(pk)))
-    cache.delete("%s:list:journal:" % collection)
-    cache.delete("%s:count:journal:" % collection)
+def model_change(**params):
+    pk = params.get('pk')
+    collection = params.get('collection')
+    document = params.get('document')
+    if document:
+        pk = document.pk
+        collection = document._get_collection_name()
+    key = "%s:get:%s" % (collection, CacheNameMixer({ 'pk' : str(pk) }))
+    if document:
+        cache.set(key, document, SERVICE_TIME)
+    if params.get('delete'):
+        cache.delete(key)
+    cache.incr("version:%s" % collection, 1)
 
 class CacheInvalidator:
     @classmethod
     def post_save(cls, sender, document, **kwargs):
-        model_change(document.pk, document._get_collection_name())
-                
+        model_change(document=document)
+
     @classmethod
     def post_delete(cls, sender, document, **kwargs):
-        model_change(document.pk, document._get_collection_name())
+        model_change(pk=document.pk, collection=document._get_collection_name(), delete=True)

File django_test_application/mongoengine_rediscache/misc.py

 from mongoengine import Document
 from mongoengine.queryset import QuerySet
 from datetime import datetime
-import hashlib
+from hashlib import sha1
+from hashlib import md5
+from zlib import crc32
+from bson.dbref import DBRef
 from config import LazySettings
-from bson.dbref import DBRef
 # for old version: from pymongo.dbref import DBRef
 
+hash_func = {'md5'  : lambda st: md5(st).hexdigest(),
+             'sha1' : lambda st: sha1(st).hexdigest(),
+             'crc'  : lambda st: hex(crc32(st)) }
+
 from re import _pattern_type
 
 class CacheNameMixer(object):
     __line = None
+    __keyhashed = None
+
+    @property
+    def content(self):
+        return str(self)
 
     def __init__(self, query_dict=None):
+        self.__keyhashed = LazySettings().keyhashed
         self.__line = self.__parse(query_dict)
 
     def __str__(self):
-        if LazySettings().keyhashed:
-            return self.hash
-        return self.__line
+        return str(self.hash)
 
     def __unicode__(self):
-        return unicode(self.__line)
+        return unicode(self.hash)
 
     @property
     def hash(self):
-        md5 = hashlib.md5()
-        md5.update(self.__line)
-        return md5.hexdigest()
+        hash_method = hash_func.get(self.__keyhashed)
+        if hash_method:
+            return hash_method(self.__line)
+        return self.__line
 
     @property
     def line(self):

File django_test_application/mongoengine_rediscache/queryset.py

 @author: unax
 '''
 from mongoengine.queryset import QuerySet
+from mongoengine import Document
+from helper import (
+    _queryset_list,
+    SecondaryKey)
+from config import LazySettings, ABSOLUTE_VERSION_LIMIT
 from misc import CacheNameMixer
-from helper import _queryset_list
-from config import LazySettings
 from base_cache import _internal_cache as cache
-import journal
+from invalidation import model_change
 
 #================ for mongoengine ====================
 
 class CachedQuerySet(QuerySet):
+    cache_scheme_dict = None
+
+    @property
+    def cache_version(self):
+        version = cache.get_int("version:%s" % self._document._get_collection_name())
+        if not isinstance(version, int) or version > ABSOLUTE_VERSION_LIMIT:
+            version = 1
+            cache.set_int("version:%s" % self._document._get_collection_name(),
+                          version,
+                          max([ v for k, v in self.cache_scheme.iteritems() ]) + 1)
+        return version
+
+    @property
+    def cache_scheme(self):
+        if self.cache_scheme_dict is None:
+            self.cache_scheme_dict = dict()
+            d = LazySettings().scheme.get('%s.%s' % (self._document.__module__,
+                                                     self._document.__name__))
+            if d:
+                self.cache_scheme_dict.update(**d)
+        return self.cache_scheme_dict
+
     @property
     def core_cache_name(self):
         name = CacheNameMixer(self._query)
         if self._ordering:
             name.append(self._ordering)
         return name.line
- 
+
     def count(self):
-        timeout = LazySettings.timelimit(self._document.__name__, 'count')
+        timeout = self.cache_scheme.get('count')
         if isinstance(timeout, int):
-            cache_key = "%s:count:%s" % (self._document._get_collection_name(), self.core_cache_name)
-            n = cache.get(cache_key)
-            if n is None:
+            core = (self._document._get_collection_name(), self.core_cache_name)
+            cache_key = "%s:count:%s" % core
+            version = cache.get_int("version:%s:%s" % core)
+            if version:
+                v = self.cache_version
+                n = cache.get_int(cache_key)
+            else:
+                v = None
+                n = None
+
+            if not isinstance(n, int) or version != v:
                 if self._limit == 0:
-                    return 0
-                n = self._cursor.count(with_limit_and_skip=True)
-                cache.set(cache_key, n, timeout)
-                # add in journal
-                journal.add_count_record(cache_key, self._document._get_collection_name() , timeout)
+                    n = 0
+                else:
+                    n = self._cursor.count(with_limit_and_skip=True)
+                cache.set_int("version:%s:%s" % core, self.cache_version, timeout)
+                cache.set_int(cache_key, n, timeout)
             del cache_key
             return n
         return super(CachedQuerySet, self).count()
-    
+
     def get(self, *q_objs, **query):
-        timeout = LazySettings.timelimit(self._document.__name__, 'get')
+        timeout = self.cache_scheme.get('get')
         document = None
         if isinstance(timeout, int):
-            core_cache_name = str(CacheNameMixer(query))
-            cache_key = "%s:get:%s" % (self._document._get_collection_name() , core_cache_name)
+            core_cache_name = CacheNameMixer(query)
+            cache_key = "%s:get:%s" % (self._document._get_collection_name(),
+                                       core_cache_name)
             document = cache.get(cache_key)
-            if document is None:
+            if isinstance(document, SecondaryKey):
+                original_pk = document.pk
+                document = cache.get(document.key)
+                if not isinstance(document, Document):
+                    document = self.get(pk=original_pk)
+            elif document is None:
                 self.__call__(*q_objs, **query)
                 count = super(CachedQuerySet, self).count()
                 if count == 1:
                     raise self._document.MultipleObjectsReturned(u'%d items returned, instead of 1' % count)
                 elif count < 1:
                     raise self._document.DoesNotExist(u"%s matching query does not exist." % self._document._class_name)
-                cache.set(cache_key, document, timeout)
-                journal.add_get_record(document.pk, cache_key, self._document._get_collection_name(), timeout)
+                
+                original_cache_key = "%s:get:%s" % (self._document._get_collection_name(),
+                                                    CacheNameMixer({ 'pk' : str(document.pk) }))
+                if original_cache_key != cache_key:
+                    cache.set(cache_key, SecondaryKey(original_cache_key, str(document.pk)), timeout)
+
+                cache.set(original_cache_key, document, timeout)
         else:
             document = super(CachedQuerySet, self).get(*q_objs, **query)
         return document
 
     @property
     def cache(self):
-        timeout = LazySettings.timelimit(self._document.__name__, 'list')
+        timeout = self.cache_scheme.get('list')
         if isinstance(timeout, int):
-            cache_key = "%s:list:%s" % (self._document._get_collection_name(), self.core_cache_name)
-            cached_list = cache.get(cache_key)
-            if cached_list is None:
-            # creating cache
-                cached_list = _queryset_list()
-                if super(CachedQuerySet, self).count() > 0:
+            core = (self._document._get_collection_name(), self.core_cache_name)
+            cache_key = "%s:list:%s" % core
+            version = cache.get_int("version:%s:%s" % core)
+            if isinstance(version, int):
+                v = self.cache_version
+                cached_list = cache.get(cache_key)
+            else:
+                v = None
+                cached_list = None
+
+            if isinstance(cached_list, list) and version == v:
+                del cache_key
+                return _queryset_list(cache.pipeline_get(cached_list))
+            else:
+                # creating cache
+                if self.count() > 0:
+                    keys = list()
                     for obj in self:
-                        cached_list.append(obj)
-                    cache.set(cache_key, cached_list, timeout)
-                    # add in journal
-                    journal.add_find_record(cache_key, self._document._get_collection_name() , timeout)
-            del cache_key
-            return cached_list
+                        obj_cache_key = "%s:get:%s" % (self._document._get_collection_name(),
+                                                       CacheNameMixer({ 'pk' : str(obj.pk) }))
+                        keys.append(obj_cache_key)
+                        cache.set(obj_cache_key, obj, timeout)
+                    cache.set(cache_key, keys, timeout - 1)
+                    cache.set_int("version:%s:%s" % core, self.cache_version, timeout - 1)
+                del cache_key
         return self
+
+    def update_one(self, safe_update=True, upsert=False, write_options=None, **update):
+        res = QuerySet.update_one(self, safe_update=safe_update, upsert=upsert, write_options=write_options, **update)
+        model_change(collection=self._document._get_collection_name())
+        return res
+
+    def update(self, safe_update=True, upsert=False, multi=True, write_options=None, **update):
+        res = QuerySet.update(self, safe_update=safe_update, upsert=upsert, multi=multi, write_options=write_options, **update)
+        model_change(collection=self._document._get_collection_name())
+        return res
+
+    def delete(self, safe=False):
+        res = QuerySet.delete(self, safe=safe)
+        model_change(collection=self._document._get_collection_name())
+        return res

File mongoengine_rediscache/__init__.py

+VERSION = (2,0,1)
 
 def install_signals():
     from invalidation import CacheInvalidator

File mongoengine_rediscache/base_cache.py

         if data is None:
             return None
         return pickle.loads(data)
-    
+
     def pipeline_get(self, cache_key_list):
-        if isinstance(cache_key_list, list) and len(cache_key_list) > 0:
+        if cache_key_list:
             pipe = self.conn.pipeline()
             for key in cache_key_list:
                 pipe.get(key)
             data = pipe.execute()
-            if data is not None and len(data) > 0:
-                res = []
-                for d in data:
-                    try: obj = pickle.loads(d)
-                    except: obj = None
-                    if obj is not None:
-                        res.append(obj)
-                return res
+            if data:
+                return [ pickle.loads(d) for d in data if d ]
         return None
-    
+
     def pipeline_delete(self, cache_key_list):
         if isinstance(cache_key_list, list) and len(cache_key_list) > 0:
             pipe = self.conn.pipeline()
             for key in cache_key_list:
                 pipe.delete(key)
             data = pipe.execute()
-            if data is not None and len(data) > 0:
+            if data:
                 return data
         return None
-    
+
     def delete(self, cache_key):
         return self.conn.delete(cache_key)
 
             self.conn.setex(cache_key, pickled_data, timeout)
         else:
             self.conn.set(cache_key, pickled_data)
-    
+            
+    def set_int(self, cache_key, data, timeout=DEFAULT_TIMEOUT):
+        if not isinstance(data, int):
+            return
+        self.conn.setex(cache_key, data, timeout)
+
+    def get_int(self, cache_key):
+        try:    return int(self.conn.get(cache_key))
+        except: return None
+
+    def incr(self, name, amount=1):
+        self.conn.incr(name, amount)
+
     def flushall(self):
         if self.conn is None:
             return False
         try:    self.conn.flushdb()
         except: return False
         return True
-    
+
     def append_to_list(self, list_cache_key, data):
         self.conn.rpush(list_cache_key, data)
-    
+
     def get_all_list(self, list_cache_key):
         return  self.conn.lrange(list_cache_key, 0, -1)
 
                 cls.set             = cls.__cahe.set
                 cls.flushall        = cls.__cahe.flushall
                 cls.get_all_list    = cls.__cahe.get_all_list
+                cls.incr            = cls.__cahe.incr
+                cls.set_int         = cls.__cahe.set_int
+                cls.get_int         = cls.__cahe.get_int 
 
         if cls.__this is None:
             cls.__this = super(LazyCache, cls).__new__(cls)
     def set(self, *args, **kwargs):
         LazyCache()
 
+    def set_int(self, *args, **kwargs):
+        LazyCache()
+
+    def get_int(self, *args, **kwargs):
+        LazyCache()
+
     def flushall(self, *args, **kwargs):
         LazyCache()
 
     def get_all_list(self, *args, **kwargs):
         LazyCache()
+    
+    def incr(self, *args, **kwargs):
+        LazyCache()
 
 _internal_cache = LazyCache()

File mongoengine_rediscache/config.py

 @author: unax
 '''
 
+ABSOLUTE_VERSION_LIMIT = 4294967294 # is possible theoretically
+
+posible_options = ('list',
+                   'reference',
+                   'get',
+                   'list_reference',
+                   'count')
+
 class ClassProperty(object):
     def __init__(self, getter, setter):
         self.getter = getter
     __this = None
     __settings = None
     __scheme = None
-    __simple_scheme = None
     __keyhashed = None
 
     def __new__(cls):
                 return False
 
         if conf:
+            scheme = conf.get('scheme')
             self.__class__.__settings  = conf
-            self.__class__.__scheme    = conf.get('scheme')
+            for model in scheme:
+                for_all = scheme[model].get('all')
+                if isinstance(for_all, int):
+                    for key in posible_options:
+                        scheme[model][key] = for_all
             self.__class__.__keyhashed = conf.get('keyhashed')
-            simple_scheme = {}
-            for model_location in conf.get('scheme'):
-                simple_scheme[model_location.split('.')[-1]] = conf['scheme'][model_location]
-            self.__class__.__simple_scheme = simple_scheme
+            if conf.get('used'):
+                self.__class__.__scheme = scheme
+            else:
+                self.__class__.__scheme = {}
             return True
         else:
             return False
     def keyhashed(self):
         return self.__keyhashed
 
-    @property
-    def simple_scheme(self):
-        return  self.__simple_scheme
-    
     @classmethod
     def timelimit(cls, model_name, operation):
         scheme = cls().simple_scheme.get(model_name)

File mongoengine_rediscache/fields.py

 
 @author: unax
 '''
-from mongoengine.fields import ReferenceField, ListField
+from mongoengine import (
+    ReferenceField,
+    ListField,
+    Document)
 from helper import _queryset_list
-from config import LazySettings
 from base_cache import _internal_cache as cache
 from bson.dbref import DBRef
-# for old version: from pymongo.dbref import DBRef
+from config import LazySettings
+from misc import CacheNameMixer
+
+def _get_timeout(instance, operaton):
+    scheme = LazySettings().scheme.get('%s.%s' % (instance.__module__, instance.__class__.__name__))
+    if scheme and operaton in scheme:
+        return int(scheme.get(operaton))
+    return None
 
 class ListFieldCached(ListField):
     def __get__(self, instance, owner):
         if instance is None:
             return self
-        timeout = LazySettings.timelimit(instance.__class__.__name__, 'list_reference')
-        changed = False
-        if instance.pk is None:
-            changed = True # this is new model
-        else:
-            try:    changed = self.name in instance._changed_fields # this model changed
-            except: pass
-        if (not isinstance(timeout, int)) or changed:
-            return super(ListFieldCached, self).__get__(instance, owner)
-        
-        DBRef_list = instance._data.get(self.name)
-        if isinstance(DBRef_list, _queryset_list):
-            return DBRef_list
-        
-        if DBRef_list and len(DBRef_list) > 0:
-            keys = []
-            list_reference = True
-            for dbref_obj in DBRef_list:
-                if not isinstance(dbref_obj, DBRef):
-                    list_reference = False
-                    break                    
-                keys.append('%s:get:pk=%s' % (dbref_obj.collection , dbref_obj.id))
-            if list_reference:
+        timeout = _get_timeout(instance, 'list_reference')
+        if timeout:
+            changed = False
+            if instance.pk is None:
+                changed = True # this is new model
+            else:
+                try:    changed = self.name in instance._changed_fields # this model changed
+                except: pass
+    
+            if (not isinstance(timeout, int)) or changed:
+                return super(ListFieldCached, self).__get__(instance, owner)
+            
+            DBRef_list = instance._data.get(self.name)
+            if isinstance(DBRef_list, _queryset_list):
+                return DBRef_list
+            if DBRef_list and len(DBRef_list) > 0:
+                keys = []
+    
+                for dbref_obj in DBRef_list:
+                    if isinstance(dbref_obj, DBRef):
+                        keys.append('%s:get:%s' % (dbref_obj.collection,
+                                                   CacheNameMixer({ 'pk' : str(dbref_obj.id) }) ) )
+                    else:
+                        keys.append('%s:get:%s' % (self.document_type._get_collection_name(),
+                                                   CacheNameMixer({ 'pk' : str(dbref_obj)}) ) )
+    
                 models = cache.pipeline_get(keys)
                 del keys
                 if models is None or len(models) != len(DBRef_list) or changed:
                     models = super(ListFieldCached, self).__get__(instance, owner)
+    
                     if models and len(models) > 0:
                         instance._data[self.name] = _queryset_list()
                         for obj in models:
-                            if not isinstance(obj, DBRef):
-                                cache.set('%s:get:pk=%s' % (obj._get_collection_name(), obj.pk), obj, timeout)
-                            instance._data[self.name].append(obj)
+                            if isinstance(obj, Document):
+                                cache.set('%s:get:%s' % (obj._get_collection_name(),
+                                                         CacheNameMixer({ 'pk' : str(obj.pk) }) ),
+                                          obj, timeout)
+                                instance._data[self.name].append(obj)
                 return models
         return super(ListFieldCached, self).__get__(instance, owner)
 
     def __get__(self, instance, owner):
         if instance is None:
             return self
-        value = instance._data.get(self.name)
-        if isinstance(value, (DBRef)):
-            timeout = LazySettings.timelimit(instance.__class__.__name__ , 'reference')
-            if isinstance(timeout, int):
-                collection = value.collection
-                cache_key = '%s:get:pk=%s' % (collection , value.id)
-                obj = cache.get(cache_key)
-                if obj is None:
-                    obj = super(ReferenceFieldCached, self).__get__(instance, owner)
-                    cache.set(cache_key, obj, timeout)
-                if obj is not None:
+        timeout = _get_timeout(instance, 'reference')
+        if timeout:    
+            value = instance._data.get(self.name)
+            if not isinstance(value, Document): # for mongoengine dbref=False options
+                if isinstance(timeout, int):
+                    core = None
+                    if isinstance(value, DBRef):
+                        core = (value.collection,
+                                CacheNameMixer({ 'pk' : str(value.id) }) )
+                    else:
+                        core = (self.document_type._get_collection_name(),
+                                CacheNameMixer({ 'pk' : str(value) }) )
+
+                    cache_key = '%s:get:%s' % core
+                    obj = cache.get(cache_key)
+
+                    if obj is None:
+                        obj = super(ReferenceFieldCached, self).__get__(instance, owner)
+                        if obj:
+                            cache.set(cache_key, obj, timeout)
                     instance._data[self.name] = obj
-                return obj
+
         return super(ReferenceFieldCached, self).__get__(instance, owner)

File mongoengine_rediscache/helper.py

             super(_queryset_list, self).__init__()
         else:
             super(_queryset_list, self).__init__(anylist)
-    
+
     def count(self):
         return len(self)
+
+class SecondaryKey(object):
+    key = None
+    pk = None
+    def __init__(self, key, pk):
+        self.key = key
+        self.pk = pk

File mongoengine_rediscache/invalidation.py

 
 @author: unax
 '''
+SERVICE_TIME = 60
+from base_cache import _internal_cache as cache
+from misc import CacheNameMixer
 
-from journal import records
-from base_cache import _internal_cache as cache
-
-def model_change(pk, collection):
-    cache.pipeline_delete(records('list', collection))
-    cache.pipeline_delete(records('count', collection))
-    cache.pipeline_delete(records('get', collection, 'pk=%s' % str(pk)))
-    cache.delete("%s:get:journal:pk=%s" % (collection, str(pk)))
-    cache.delete("%s:list:journal:" % collection)
-    cache.delete("%s:count:journal:" % collection)
+def model_change(**params):
+    pk = params.get('pk')
+    collection = params.get('collection')
+    document = params.get('document')
+    if document:
+        pk = document.pk
+        collection = document._get_collection_name()
+    key = "%s:get:%s" % (collection, CacheNameMixer({ 'pk' : str(pk) }))
+    if document:
+        cache.set(key, document, SERVICE_TIME)
+    if params.get('delete'):
+        cache.delete(key)
+    cache.incr("version:%s" % collection, 1)
 
 class CacheInvalidator:
     @classmethod
     def post_save(cls, sender, document, **kwargs):
-        model_change(document.pk, document._get_collection_name())
-                
+        model_change(document=document)
+
     @classmethod
     def post_delete(cls, sender, document, **kwargs):
-        model_change(document.pk, document._get_collection_name())
+        model_change(pk=document.pk, collection=document._get_collection_name(), delete=True)

File mongoengine_rediscache/misc.py

 from mongoengine import Document
 from mongoengine.queryset import QuerySet
 from datetime import datetime
-import hashlib
+from hashlib import sha1
+from hashlib import md5
+from zlib import crc32
+from bson.dbref import DBRef
 from config import LazySettings
-from bson.dbref import DBRef
 # for old version: from pymongo.dbref import DBRef
 
+hash_func = {'md5'  : lambda st: md5(st).hexdigest(),
+             'sha1' : lambda st: sha1(st).hexdigest(),
+             'crc'  : lambda st: hex(crc32(st)) }
+
 from re import _pattern_type
 
 class CacheNameMixer(object):
     __line = None
+    __keyhashed = None
+
+    @property
+    def content(self):
+        return str(self)
 
     def __init__(self, query_dict=None):
+        self.__keyhashed = LazySettings().keyhashed
         self.__line = self.__parse(query_dict)
 
     def __str__(self):
-        if LazySettings().keyhashed:
-            return self.hash
-        return self.__line
+        return str(self.hash)
 
     def __unicode__(self):
-        return unicode(self.__line)
+        return unicode(self.hash)
 
     @property
     def hash(self):
-        md5 = hashlib.md5()
-        md5.update(self.__line)
-        return md5.hexdigest()
+        hash_method = hash_func.get(self.__keyhashed)
+        if hash_method:
+            return hash_method(self.__line)
+        return self.__line
 
     @property
     def line(self):

File mongoengine_rediscache/queryset.py

 @author: unax
 '''
 from mongoengine.queryset import QuerySet
+from mongoengine import Document
+from helper import (
+    _queryset_list,
+    SecondaryKey)
+from config import LazySettings, ABSOLUTE_VERSION_LIMIT
 from misc import CacheNameMixer
-from helper import _queryset_list
-from config import LazySettings
 from base_cache import _internal_cache as cache
-import journal
+from invalidation import model_change
 
 #================ for mongoengine ====================
 
 class CachedQuerySet(QuerySet):
+    cache_scheme_dict = None
+
+    @property
+    def cache_version(self):
+        version = cache.get_int("version:%s" % self._document._get_collection_name())
+        if not isinstance(version, int) or version > ABSOLUTE_VERSION_LIMIT:
+            version = 1
+            cache.set_int("version:%s" % self._document._get_collection_name(),
+                          version,
+                          max([ v for k, v in self.cache_scheme.iteritems() ]) + 1)
+        return version
+
+    @property
+    def cache_scheme(self):
+        if self.cache_scheme_dict is None:
+            self.cache_scheme_dict = dict()
+            d = LazySettings().scheme.get('%s.%s' % (self._document.__module__,
+                                                     self._document.__name__))
+            if d:
+                self.cache_scheme_dict.update(**d)
+        return self.cache_scheme_dict
+
     @property
     def core_cache_name(self):
         name = CacheNameMixer(self._query)
         if self._ordering:
             name.append(self._ordering)
         return name.line
- 
+
     def count(self):
-        timeout = LazySettings.timelimit(self._document.__name__, 'count')
+        timeout = self.cache_scheme.get('count')
         if isinstance(timeout, int):
-            cache_key = "%s:count:%s" % (self._document._get_collection_name(), self.core_cache_name)
-            n = cache.get(cache_key)
-            if n is None:
+            core = (self._document._get_collection_name(), self.core_cache_name)
+            cache_key = "%s:count:%s" % core
+            version = cache.get_int("version:%s:%s" % core)
+            if version:
+                v = self.cache_version
+                n = cache.get_int(cache_key)
+            else:
+                v = None
+                n = None
+
+            if not isinstance(n, int) or version != v:
                 if self._limit == 0:
-                    return 0
-                n = self._cursor.count(with_limit_and_skip=True)
-                cache.set(cache_key, n, timeout)
-                # add in journal
-                journal.add_count_record(cache_key, self._document._get_collection_name() , timeout)
+                    n = 0
+                else:
+                    n = self._cursor.count(with_limit_and_skip=True)
+                cache.set_int("version:%s:%s" % core, self.cache_version, timeout)
+                cache.set_int(cache_key, n, timeout)
             del cache_key
             return n
         return super(CachedQuerySet, self).count()
-    
+
     def get(self, *q_objs, **query):
-        timeout = LazySettings.timelimit(self._document.__name__, 'get')
+        timeout = self.cache_scheme.get('get')
         document = None
         if isinstance(timeout, int):
-            core_cache_name = str(CacheNameMixer(query))
-            cache_key = "%s:get:%s" % (self._document._get_collection_name() , core_cache_name)
+            core_cache_name = CacheNameMixer(query)
+            cache_key = "%s:get:%s" % (self._document._get_collection_name(),
+                                       core_cache_name)
             document = cache.get(cache_key)
-            if document is None:
+            if isinstance(document, SecondaryKey):
+                original_pk = document.pk
+                document = cache.get(document.key)
+                if not isinstance(document, Document):
+                    document = self.get(pk=original_pk)
+            elif document is None:
                 self.__call__(*q_objs, **query)
                 count = super(CachedQuerySet, self).count()
                 if count == 1:
                     raise self._document.MultipleObjectsReturned(u'%d items returned, instead of 1' % count)
                 elif count < 1:
                     raise self._document.DoesNotExist(u"%s matching query does not exist." % self._document._class_name)
-                cache.set(cache_key, document, timeout)
-                journal.add_get_record(document.pk, cache_key, self._document._get_collection_name(), timeout)
+                
+                original_cache_key = "%s:get:%s" % (self._document._get_collection_name(),
+                                                    CacheNameMixer({ 'pk' : str(document.pk) }))
+                if original_cache_key != cache_key:
+                    cache.set(cache_key, SecondaryKey(original_cache_key, str(document.pk)), timeout)
+
+                cache.set(original_cache_key, document, timeout)
         else:
             document = super(CachedQuerySet, self).get(*q_objs, **query)
         return document
 
     @property
     def cache(self):
-        timeout = LazySettings.timelimit(self._document.__name__, 'list')
+        timeout = self.cache_scheme.get('list')
         if isinstance(timeout, int):
-            cache_key = "%s:list:%s" % (self._document._get_collection_name(), self.core_cache_name)
-            cached_list = cache.get(cache_key)
-            if cached_list is None:
-            # creating cache
-                cached_list = _queryset_list()
-                if super(CachedQuerySet, self).count() > 0:
+            core = (self._document._get_collection_name(), self.core_cache_name)
+            cache_key = "%s:list:%s" % core
+            version = cache.get_int("version:%s:%s" % core)
+            if isinstance(version, int):
+                v = self.cache_version
+                cached_list = cache.get(cache_key)
+            else:
+                v = None
+                cached_list = None
+
+            if isinstance(cached_list, list) and version == v:
+                del cache_key
+                return _queryset_list(cache.pipeline_get(cached_list))
+            else:
+                # creating cache
+                if self.count() > 0:
+                    keys = list()
                     for obj in self:
-                        cached_list.append(obj)
-                    cache.set(cache_key, cached_list, timeout)
-                    # add in journal
-                    journal.add_find_record(cache_key, self._document._get_collection_name() , timeout)
-            del cache_key
-            return cached_list
+                        obj_cache_key = "%s:get:%s" % (self._document._get_collection_name(),
+                                                       CacheNameMixer({ 'pk' : str(obj.pk) }))
+                        keys.append(obj_cache_key)
+                        cache.set(obj_cache_key, obj, timeout)
+                    cache.set(cache_key, keys, timeout - 1)
+                    cache.set_int("version:%s:%s" % core, self.cache_version, timeout - 1)
+                del cache_key
         return self
+
+    def update_one(self, safe_update=True, upsert=False, write_options=None, **update):
+        res = QuerySet.update_one(self, safe_update=safe_update, upsert=upsert, write_options=write_options, **update)
+        model_change(collection=self._document._get_collection_name())
+        return res
+
+    def update(self, safe_update=True, upsert=False, multi=True, write_options=None, **update):
+        res = QuerySet.update(self, safe_update=safe_update, upsert=upsert, multi=multi, write_options=write_options, **update)
+        model_change(collection=self._document._get_collection_name())
+        return res
+
+    def delete(self, safe=False):
+        res = QuerySet.delete(self, safe=safe)
+        model_change(collection=self._document._get_collection_name())
+        return res