Anonymous avatar Anonymous committed b0fa16e

initial version

Comments (0)

Files changed (78)

+syntax: glob
+.project
+.pydevproject
+.settings
+.idea
+.DS_Store
+*~
+*.orig
+*.pyc
+*.pyo
+*.swp
+*.tmp
+_generated_media*
+desktop.ini
+settings_overrides.py
+nbproject
+django
+django_mongodb_engine
+djangotoolbox
+mediagenerator
+temp.*
+static\Tests
+Thumbs.db
+Copyright (c) Waldemar Kornewald, Thomas Wanschik, and all contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+    1. Redistributions of source code must retain the above copyright notice, 
+       this list of conditions and the following disclaimer.
+
+    2. Redistributions in binary form must reproduce the above copyright 
+       notice, this list of conditions and the following disclaimer in the
+       documentation and/or other materials provided with the distribution.
+
+    3. Neither the name of All Buttons Pressed nor
+       the names of its contributors may be used to endorse or promote products
+       derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

Empty file added.

+application: haditbefore-dev
+version: 1
+runtime: python
+api_version: 1
+
+builtins:
+- remote_api: on
+
+inbound_services:
+- warmup
+- xmpp_message
+- xmpp_presence
+
+handlers:
+- url: /_ah/queue/deferred
+  script: djangoappengine/deferred/handler.py
+  login: admin
+
+- url: /_ah/stats/.*
+  script: djangoappengine/appstats/ui.py
+
+- url: /media/admin
+  static_dir: django/contrib/admin/media
+  expiration: '0'
+
+- url: /.*
+  script: djangoappengine/main/main.py

autoload/__init__.py

+def autodiscover(module_name):
+    """
+    Automatically loads modules specified by module_name for each app in
+    installed apps.
+    """
+    from django.conf import settings
+    from django.utils.importlib import import_module
+    from django.utils.module_loading import module_has_submodule
+
+    for app in settings.INSTALLED_APPS:
+        mod = import_module(app)
+        # Attempt to import the app's module.
+        try:
+            import_module('%s.%s' % (app, module_name))
+        except:
+            # Decide whether to bubble up this error. If the app just
+            # doesn't have an module, we can ignore the error
+            # attempting to import it, otherwise we want it to bubble up.
+            if module_has_submodule(mod, module_name):
+                raise

autoload/middleware.py

+from django.utils.importlib import import_module
+from django.conf import settings
+
+# load all models.py to ensure signal handling installation or index loading
+# of some apps 
+for app in settings.INSTALLED_APPS:
+    try:
+        import_module('%s.models' % (app))
+    except ImportError:
+        pass
+
+class AutoloadMiddleware(object):
+    """Empty because the import above already does everything for us"""
+    pass

autoload/models.py

+# Load the siteconf module
+from django.conf import settings
+from django.utils.importlib import import_module
+SITECONF_MODULE = getattr(settings, 'AUTOLOAD_SITECONF', settings.ROOT_URLCONF)
+import_module(SITECONF_MODULE)
+cron:
+- description: keep alive
+  url: /
+  schedule: every 2 minutes

dbindexer/__init__.py

+def autodiscover():
+    from autoload import autodiscover as auto_discover
+    auto_discover('dbindexes')
+    
+def load_indexes():
+    from django.conf import settings
+    from django.utils.importlib import import_module
+
+    for name in getattr(settings, 'DB_INDEX_MODULES', ()):
+        import_module(name)
+from .lookups import LookupDoesNotExist, ExtraFieldLookup
+from . import lookups as lookups_module
+from .resolver import resolver
+import inspect
+
+# TODO: add possibility to add lookup modules
+def create_lookup(lookup_def):
+    for _, cls in inspect.getmembers(lookups_module):
+        if inspect.isclass(cls) and issubclass(cls, ExtraFieldLookup) and \
+                cls.matches_lookup_def(lookup_def):
+            return cls()
+    raise LookupDoesNotExist('No Lookup found for %s .' % lookup_def)
+
+def register_index(model, mapping):
+    for field_name, lookups in mapping.items():
+        if not isinstance(lookups, (list, tuple)):
+            lookups = (lookups, )
+            
+        # create indexes and add model and field_name to lookups
+        # create ExtraFieldLookup instances on the fly if needed
+        for lookup in lookups:
+            lookup_def = None
+            if not isinstance(lookup, ExtraFieldLookup):
+                lookup_def = lookup
+                lookup = create_lookup(lookup_def)
+            lookup.contribute(model, field_name, lookup_def)
+            resolver.create_index(lookup)

dbindexer/backends.py

+from django.db import models
+from django.db.models.fields import FieldDoesNotExist
+from django.db.models.sql.constants import JOIN_TYPE, LHS_ALIAS, LHS_JOIN_COL, \
+    TABLE_NAME, RHS_JOIN_COL
+from django.utils.tree import Node
+from djangotoolbox.fields import ListField
+from .lookups import StandardLookup
+
+OR = 'OR'
+
+# TODO: optimize code
+class BaseResolver(object):
+    def __init__(self):
+        # mapping from lookups to indexes
+        self.index_map = {}
+        # mapping from column names to field names
+        self.column_to_name = {}
+        
+    ''' API called by resolver''' 
+    
+    def create_index(self, lookup):
+        field_to_index = self.get_field_to_index(lookup.model, lookup.field_name)
+        
+        # backend doesn't now how to handle this index definition
+        if not field_to_index:
+            return 
+        
+        index_field = lookup.get_field_to_add(field_to_index)        
+        config_field = index_field.item_field if \
+            isinstance(index_field, ListField) else index_field  
+        if hasattr(field_to_index, 'max_length') and \
+                isinstance(config_field, models.CharField):
+            config_field.max_length = field_to_index.max_length
+            
+        # don't install a field if it already exists
+        try:
+            lookup.model._meta.get_field(self.index_name(lookup))
+        except:
+            lookup.model.add_to_class(self.index_name(lookup), index_field)
+            self.index_map[lookup] = index_field
+            self.add_column_to_name(lookup.model, lookup.field_name)
+        else:
+            # makes dbindexer unit test compatible
+            if lookup not in self.index_map:
+                self.index_map[lookup] = lookup.model._meta.get_field(
+                    self.index_name(lookup))
+                self.add_column_to_name(lookup.model, lookup.field_name)
+                
+    def convert_insert_query(self, query):
+        '''Converts a database saving query.'''
+        
+        for lookup in self.index_map.keys():
+            self._convert_insert_query(query, lookup)
+    
+    def _convert_insert_query(self, query, lookup):
+        if not lookup.model == query.model:
+            return
+                    
+        position = self.get_query_position(query, lookup)
+        if position is None:
+            return
+        
+        value = self.get_value(lookup.model, lookup.field_name, query)
+        value = lookup.convert_value(value)
+        query.values[position] = (self.get_index(lookup), value)
+            
+    def convert_filters(self, query):
+        self._convert_filters(query, query.where)
+
+    ''' helper methods '''
+    
+    def _convert_filters(self, query, filters):
+        for index, child in enumerate(filters.children[:]):
+            if isinstance(child, Node):
+                self._convert_filters(query, child)
+                continue
+
+            self.convert_filter(query, filters, child, index)
+
+    def convert_filter(self, query, filters, child, index):
+        constraint, lookup_type, annotation, value = child
+        
+        if constraint.field is None:
+            return
+        
+        field_name = self.column_to_name.get(constraint.field.column)
+        if field_name and constraint.alias == \
+                query.table_map[query.model._meta.db_table][0]:
+            for lookup in self.index_map.keys():
+                if lookup.matches_filter(query.model, field_name, lookup_type,
+                                         value):
+                    new_lookup_type, new_value = lookup.convert_lookup(value,
+                                                                       lookup_type)
+                    index_name = self.index_name(lookup)
+                    self._convert_filter(query, filters, child, index,
+                                         new_lookup_type, new_value, index_name)
+        
+    def _convert_filter(self, query, filters, child, index, new_lookup_type,
+                        new_value, index_name):
+        constraint, lookup_type, annotation, value = child
+        lookup_type, value = new_lookup_type, new_value
+        constraint.field = query.get_meta().get_field(index_name)
+        constraint.col = constraint.field.column
+        child = constraint, lookup_type, annotation, value
+        filters.children[index] = child
+    
+    def index_name(self, lookup):
+        return lookup.index_name
+    
+    def get_field_to_index(self, model, field_name):
+        try:
+            return model._meta.get_field(field_name)
+        except:
+            return None
+    
+    def get_value(self, model, field_name, query):
+        field_to_index = self.get_field_to_index(model, field_name)
+        for query_field, value in query.values[:]:
+            if field_to_index == query_field:
+                return value
+        raise FieldDoesNotExist('Cannot find field in query.')
+    
+    def add_column_to_name(self, model, field_name):
+        column_name = model._meta.get_field(field_name).column
+        self.column_to_name[column_name] = field_name
+    
+    def get_index(self, lookup):
+        return self.index_map[lookup]
+    
+    def get_query_position(self, query, lookup):
+        for index, (field, query_value) in enumerate(query.values[:]):
+            if field is self.get_index(lookup):
+                return index
+        return None
+
+def unref_alias(query, alias):
+    table_name = query.alias_map[alias][TABLE_NAME]
+    query.alias_refcount[alias] -= 1
+    if query.alias_refcount[alias] < 1:
+        # Remove all information about the join
+        del query.alias_refcount[alias]
+        del query.join_map[query.rev_join_map[alias]]
+        del query.rev_join_map[alias]
+        del query.alias_map[alias]
+        query.table_map[table_name].remove(alias)
+        if len(query.table_map[table_name]) == 0:
+            del query.table_map[table_name]
+        query.used_aliases.discard(alias)
+
+class FKNullFix(BaseResolver):
+    '''
+        Django doesn't generate correct code for ForeignKey__isnull.
+        It becomes a JOIN with pk__isnull which won't work on nonrel DBs,
+        so we rewrite the JOIN here.
+    '''
+     
+    def create_index(self, lookup):
+        pass
+    
+    def convert_insert_query(self, query):
+        pass
+    
+    def convert_filter(self, query, filters, child, index):
+        constraint, lookup_type, annotation, value = child
+        if constraint.field is not None and lookup_type == 'isnull' and \
+                        isinstance(constraint.field, models.ForeignKey):
+            self.fix_fk_null_filter(query, constraint)
+            
+    def unref_alias(self, query, alias):
+        unref_alias(query, alias)
+            
+    def fix_fk_null_filter(self, query, constraint):
+        alias = constraint.alias
+        table_name = query.alias_map[alias][TABLE_NAME]
+        lhs_join_col = query.alias_map[alias][LHS_JOIN_COL]
+        rhs_join_col = query.alias_map[alias][RHS_JOIN_COL]
+        if table_name != constraint.field.rel.to._meta.db_table or \
+                rhs_join_col != constraint.field.rel.to._meta.pk.column or \
+                lhs_join_col != constraint.field.column:
+            return
+        next_alias = query.alias_map[alias][LHS_ALIAS]
+        if not next_alias:
+            return
+        self.unref_alias(query, alias)
+        alias = next_alias
+        constraint.col = constraint.field.column
+        constraint.alias = alias
+
+class ConstantFieldJOINResolver(BaseResolver):
+    def create_index(self, lookup):
+        if '__' in lookup.field_name:
+            super(ConstantFieldJOINResolver, self).create_index(lookup)
+    
+    def convert_insert_query(self, query):
+        '''Converts a database saving query.'''
+        
+        for lookup in self.index_map.keys():
+            if '__' in lookup.field_name:
+                self._convert_insert_query(query, lookup)
+    
+    def convert_filter(self, query, filters, child, index):
+        constraint, lookup_type, annotation, value = child
+        field_chain = self.get_field_chain(query, constraint)
+        
+        if field_chain is None:
+            return
+        
+        for lookup in self.index_map.keys():
+            if lookup.matches_filter(query.model, field_chain, lookup_type,
+                                     value):
+                self.resolve_join(query, child)
+                new_lookup_type, new_value = lookup.convert_lookup(value,
+                                                                   lookup_type)
+                index_name = self.index_name(lookup)
+                self._convert_filter(query, filters, child, index,
+                                     new_lookup_type, new_value, index_name)
+    
+    def get_field_to_index(self, model, field_name):
+        model = self.get_model_chain(model, field_name)[-1]
+        field_name = field_name.split('__')[-1]
+        return super(ConstantFieldJOINResolver, self).get_field_to_index(model,
+            field_name)
+    
+    def get_value(self, model, field_name, query):
+        value = super(ConstantFieldJOINResolver, self).get_value(model,
+                                    field_name.split('__')[0],
+                                    query)
+        if value is not None:
+            value = self.get_target_value(model, field_name, value)
+        return value        
+
+    def get_field_chain(self, query, constraint):
+        if constraint.field is None:
+            return
+
+        column_index = self.get_column_index(query, constraint)
+        return self.column_to_name.get(column_index)
+
+    def get_model_chain(self, model, field_chain):
+        model_chain = [model, ]
+        for value in field_chain.split('__')[:-1]:
+            model = model._meta.get_field(value).rel.to
+            model_chain.append(model)
+        return model_chain
+       
+    def get_target_value(self, start_model, field_chain, pk):
+        fields = field_chain.split('__')
+        foreign_key = start_model._meta.get_field(fields[0])
+        
+        if not foreign_key.rel:
+            # field isn't a related one, so return the value itself
+            return pk
+        
+        target_model = foreign_key.rel.to
+        foreignkey = target_model.objects.all().get(pk=pk)
+        for value in fields[1:-1]:
+            foreignkey = getattr(foreignkey, value)
+        
+        if isinstance(foreignkey._meta.get_field(fields[-1]), models.ForeignKey):
+            return getattr(foreignkey, '%s_id' % fields[-1])
+        else:
+            return getattr(foreignkey, fields[-1])
+    
+    def add_column_to_name(self, model, field_name):
+        model_chain = self.get_model_chain(model, field_name)
+        column_chain = ''
+        field_names = field_name.split('__')
+        for model, name in zip(model_chain, field_names):
+            column_chain += model._meta.get_field(name).column + '__'
+        self.column_to_name[column_chain[:-2]] = field_name
+        
+    def unref_alias(self, query, alias):
+        unref_alias(query, alias)
+        
+    def get_column_index(self, query, constraint):
+        if constraint.field:
+            column_chain = constraint.field.column
+            alias = constraint.alias
+            while alias:
+                join = query.alias_map.get(alias)
+                if join and join[JOIN_TYPE] == 'INNER JOIN':
+                    column_chain += '__' + join[LHS_JOIN_COL]
+                    alias = query.alias_map[alias][LHS_ALIAS]
+                else:
+                    alias = None
+        return '__'.join(reversed(column_chain.split('__')))
+
+    def resolve_join(self, query, child):
+        constraint, lookup_type, annotation, value = child
+        if not constraint.field:
+            return
+
+        alias = constraint.alias
+        while True:
+            next_alias = query.alias_map[alias][LHS_ALIAS]
+            if not next_alias:
+                break
+            self.unref_alias(query, alias)
+            alias = next_alias
+        
+        constraint.alias = alias
+
+# TODO: distinguish in memory joins from standard joins somehow
+class InMemoryJOINResolver(ConstantFieldJOINResolver):
+    def __init__(self):
+        self.field_chains = []
+        super(InMemoryJOINResolver, self).__init__()
+
+    def create_index(self, lookup):
+        if '__' in lookup.field_name:
+            field_to_index = self.get_field_to_index(lookup.model, lookup.field_name)
+        
+            if not field_to_index:
+                return 
+            
+            # save old column_to_name so we can make in memory queries later on 
+            self.add_column_to_name(lookup.model, lookup.field_name)
+            
+            # don't add an extra field for standard lookups!
+            if isinstance(lookup, StandardLookup):
+                return 
+             
+            # install lookup on target model
+            model = self.get_model_chain(lookup.model, lookup.field_name)[-1]
+            lookup.model = model
+            lookup.field_name = lookup.field_name.split('__')[-1]
+            super(ConstantFieldJOINResolver, self).create_index(lookup)
+    
+    def convert_insert_query(self, query):
+        super(ConstantFieldJOINResolver, self).convert_insert_query(query)
+        
+    def _convert_filters(self, query, filters):
+        # or queries are not supported for in-memory-JOINs
+        if self.contains_OR(query.where, OR):
+            return
+        
+        # start with the deepest JOIN level filter!
+        all_filters = self.get_all_filters(filters)
+        all_filters.sort(key=lambda item: self.get_field_chain(query, item[1][0]) and \
+                         -len(self.get_field_chain(query, item[1][0])) or 0)
+        
+        for filters, child, index in all_filters:
+            # check if convert_filter removed a given child from the where-tree
+            if not self.contains_child(query.where, child):
+                continue
+            self.convert_filter(query, filters, child, index)
+    
+    def convert_filter(self, query, filters, child, index):
+        constraint, lookup_type, annotation, value = child
+        field_chain = self.get_field_chain(query, constraint)
+
+        if field_chain is None:
+            return
+        
+        if '__' not in field_chain:
+            return super(ConstantFieldJOINResolver, self).convert_filter(query,
+                filters, child, index)
+        
+        pks = self.get_pks(query, field_chain, lookup_type, value)
+        self.resolve_join(query, child)
+        self._convert_filter(query, filters, child, index, 'in',
+                             (pk for pk in pks), field_chain.split('__')[0])
+        
+    def tree_contains(self, filters, to_find, func):
+        result = False
+        for child in filters.children[:]:
+            if func(child, to_find):
+                result = True
+                break
+            if isinstance(child, Node):
+                result = self.tree_contains(child, to_find, func)
+                if result:
+                    break
+        return result
+    
+    def contains_OR(self, filters, or_):
+        return self.tree_contains(filters, or_,
+            lambda c, f: isinstance(c, Node) and c.connector == f)
+
+    def contains_child(self, filters, to_find):
+        return self.tree_contains(filters, to_find, lambda c, f: c is f)
+    
+    def get_all_filters(self, filters):
+        all_filters = []
+        for index, child in enumerate(filters.children[:]):
+            if isinstance(child, Node):
+                all_filters.extend(self.get_all_filters(child))
+                continue
+
+            all_filters.append((filters, child, index))
+        return all_filters
+    
+    def index_name(self, lookup):
+        # use another index_name to avoid conflicts with lookups defined on the
+        # target model which are handled by the BaseBackend
+        return lookup.index_name + '_in_memory_join'
+    
+    def get_pks(self, query, field_chain, lookup_type, value):
+        model_chain = self.get_model_chain(query.model, field_chain)
+                
+        first_lookup = {'%s__%s' %(field_chain.rsplit('__', 1)[-1],
+                                   lookup_type): value}
+        self.combine_with_same_level_filter(first_lookup, query, field_chain)
+        pks = model_chain[-1].objects.all().filter(**first_lookup).values_list(
+            'id', flat=True)
+
+        chains = [field_chain.rsplit('__', i+1)[0]
+                  for i in range(field_chain.count('__'))]
+        lookup = {}
+        for model, chain in reversed(zip(model_chain[1:-1], chains[:-1])):
+            lookup.update({'%s__%s' %(chain.rsplit('__', 1)[-1], 'in'):
+                           (pk for pk in pks)})
+            self.combine_with_same_level_filter(lookup, query, chain)
+            pks = model.objects.all().filter(**lookup).values_list('id', flat=True)
+        return pks
+    
+    def combine_with_same_level_filter(self, lookup, query, field_chain):
+        lookup_updates = {}
+        field_chains = self.get_all_field_chains(query, query.where)
+
+        for chain, child in field_chains.items():
+            if chain == field_chain:
+                continue
+            if field_chain.rsplit('__', 1)[0] == chain.rsplit('__', 1)[0]:
+                lookup_updates ['%s__%s' %(chain.rsplit('__', 1)[1], child[1])] \
+                    = child[3]
+                
+                self.remove_child(query.where, child)
+                self.resolve_join(query, child)
+                # TODO: update query.alias_refcount correctly!
+        lookup.update(lookup_updates)
+                
+    def remove_child(self, filters, to_remove):
+        ''' Removes a child object from filters. If filters doesn't contain
+            children afterwoods, filters will be removed from its parent. '''
+            
+        for child in filters.children[:]:
+            if child is to_remove:
+                self._remove_child(filters, to_remove)
+                return
+            elif isinstance(child, Node):
+                self.remove_child(child, to_remove)
+            
+            if hasattr(child, 'children') and not child.children:
+                self.remove_child(filters, child)
+    
+    def _remove_child(self, filters, to_remove):
+        result = []
+        for child in filters.children[:]:
+            if child is to_remove:
+                continue
+            result.append(child)
+        filters.children = result
+    
+    def get_all_field_chains(self, query, filters):
+        ''' Returns a dict mapping from field_chains to the corresponding child.'''
+
+        field_chains = {}
+        all_filters = self.get_all_filters(filters)
+        for filters, child, index in all_filters:
+            field_chain = self.get_field_chain(query, child[0])
+            # field_chain can be None if the user didn't specified an index for it
+            if field_chain:
+                field_chains[field_chain] = child
+        return field_chains

dbindexer/base.py

+from django.conf import settings
+from django.utils.importlib import import_module
+
+class DatabaseOperations(object):
+    dbindexer_compiler_module = __name__.rsplit('.', 1)[0] + '.compiler'
+
+    def __init__(self):
+        self._dbindexer_cache = {}
+
+    def compiler(self, compiler_name):
+        if compiler_name not in self._dbindexer_cache:
+            target = super(DatabaseOperations, self).compiler(compiler_name)
+            base = getattr(
+                import_module(self.dbindexer_compiler_module), compiler_name)
+            class Compiler(base, target):
+                pass
+            self._dbindexer_cache[compiler_name] = Compiler
+        return self._dbindexer_cache[compiler_name]
+
+class BaseDatabaseWrapper(object):
+    def __init__(self, *args, **kwargs):
+        super(BaseDatabaseWrapper, self).__init__(*args, **kwargs)
+        class Operations(DatabaseOperations, self.ops.__class__):
+            pass
+        self.ops.__class__ = Operations
+        self.ops.__init__()
+
+def DatabaseWrapper(settings_dict, *args, **kwargs):
+    target_settings = settings.DATABASES[settings_dict['TARGET']]
+    engine = target_settings['ENGINE'] + '.base'
+    target = import_module(engine).DatabaseWrapper
+    class Wrapper(BaseDatabaseWrapper, target):
+        pass
+    return Wrapper(settings_dict, *args, **kwargs)

dbindexer/compiler.py

+from .resolver import resolver
+from django.utils.importlib import import_module
+
+def __repr__(self):
+    return '<%s, %s, %s, %s>' % (self.alias, self.col, self.field.name,
+        self.field.model.__name__)
+
+from django.db.models.sql.where import Constraint
+Constraint.__repr__ = __repr__
+
+# TODO: manipulate a copy of the query instead of the query itself. This has to
+# be done because the query can be reused afterwards by the user so that a
+# manipulated query can result in strange behavior for these cases!
+#TODO: Add watching layer which gives suggestions for indexes via query inspection
+# at runtime
+
+class BaseCompiler(object):
+    def convert_filters(self):
+        resolver.convert_filters(self.query)
+
+class SQLCompiler(BaseCompiler):
+    def execute_sql(self, *args, **kwargs):
+        self.convert_filters()
+        return super(SQLCompiler, self).execute_sql(*args, **kwargs)
+
+    def results_iter(self):
+        self.convert_filters()
+        return super(SQLCompiler, self).results_iter()
+
+
+class SQLInsertCompiler(BaseCompiler):
+    def execute_sql(self, return_id=False):
+        resolver.convert_insert_query(self.query)
+        return super(SQLInsertCompiler, self).execute_sql(return_id=return_id)
+
+class SQLUpdateCompiler(BaseCompiler):
+    pass
+
+class SQLDeleteCompiler(BaseCompiler):
+    pass

dbindexer/lookups.py

+from django.db import models
+from djangotoolbox.fields import ListField
+from copy import deepcopy 
+
+import re
+regex = type(re.compile(''))
+
+class LookupDoesNotExist(Exception):
+    pass
+
+class LookupBase(type):
+    def __new__(cls, name, bases, attrs):
+        new_cls = type.__new__(cls, name, bases, attrs)
+        if not isinstance(new_cls.lookup_types, (list, tuple)):
+            new_cls.lookup_types = (new_cls.lookup_types, )
+        return new_cls 
+
+class ExtraFieldLookup(object):
+    '''Default is to behave like an exact filter on an ExtraField.'''
+    __metaclass__ = LookupBase
+    lookup_types = 'exact'
+    
+    def __init__(self, model=None, field_name=None, lookup_def=None,
+                 new_lookup='exact', field_to_add=models.CharField(
+                 max_length=500, editable=False, null=True)):
+        self.field_to_add = field_to_add
+        self.new_lookup = new_lookup
+        self.contribute(model, field_name, lookup_def)
+        
+    def contribute(self, model, field_name, lookup_def):
+        self.model = model
+        self.field_name = field_name
+        self.lookup_def = lookup_def
+            
+    @property
+    def index_name(self):
+        return 'idxf_%s_l_%s' % (self.field_name, self.lookup_types[0])
+    
+    def convert_lookup(self, value, lookup_type):
+        # TODO: can value be a list or tuple? (in case of in yes)
+        if isinstance(value, (tuple, list)):
+            value = [self._convert_lookup(val, lookup_type)[1] for val in value]
+        else:
+            _, value = self._convert_lookup(value, lookup_type)
+        return self.new_lookup, value
+    
+    def _convert_lookup(self, value, lookup_type):
+        return lookup_type, value
+    
+    def convert_value(self, value):
+        if isinstance(value, (tuple, list)):
+            value = [self._convert_value(val) for val in value]
+        else:
+            value = self._convert_value(value)
+        return value
+    
+    def _convert_value(self, value):
+        return value
+        
+    def matches_filter(self, model, field_name, lookup_type, value):
+        return self.model == model and lookup_type in self.lookup_types \
+            and field_name == self.field_name
+    
+    @classmethod
+    def matches_lookup_def(cls, lookup_def):
+        if lookup_def in cls.lookup_types:
+            return True
+        return False
+    
+    def get_field_to_add(self, field_to_index):
+        field_to_add = deepcopy(self.field_to_add)
+        if isinstance(field_to_index, ListField):
+            field_to_add = ListField(field_to_add, editable=False, null=True)
+        return field_to_add
+
+class DateLookup(ExtraFieldLookup):
+    def __init__(self, *args, **kwargs):
+        defaults = {'new_lookup': 'exact',
+                    'field_to_add': models.IntegerField(editable=False, null=True)}
+        defaults.update(kwargs)
+        ExtraFieldLookup.__init__(self, *args, **defaults)
+    
+    def _convert_lookup(self, value, lookup_type):
+        return self.new_lookup, value
+
+class Day(DateLookup):
+    lookup_types = 'day'
+    
+    def _convert_value(self, value):
+        return value.day
+
+class Month(DateLookup):
+    lookup_types = 'month'
+    
+    def _convert_value(self, value):
+        return value.month
+
+class Year(DateLookup):
+    lookup_types = 'year'
+
+    def _convert_value(self, value):
+        return value.year
+
+class Weekday(DateLookup):
+    lookup_types = 'week_day'
+    
+    def _convert_value(self, value):
+        return value.isoweekday()
+
+class Contains(ExtraFieldLookup):
+    lookup_types = 'contains'
+
+    def __init__(self, *args, **kwargs):
+        defaults = {'new_lookup': 'startswith',
+                    'field_to_add': ListField(models.CharField(500),
+                                              editable=False, null=True)
+        }
+        defaults.update(kwargs)
+        ExtraFieldLookup.__init__(self, *args, **defaults)
+    
+    def get_field_to_add(self, field_to_index):
+        # always return a ListField of CharFields even in the case of
+        # field_to_index being a ListField itself!
+        return deepcopy(self.field_to_add)
+    
+    def convert_value(self, value):
+        new_value = []
+        if isinstance(value, (tuple, list)):
+            for val in value:
+                new_value.extend(self.contains_indexer(val))
+        else:
+            new_value = self.contains_indexer(value)
+        return new_value
+     
+    def _convert_lookup(self, value, lookup_type):
+        return self.new_lookup, value
+
+    def contains_indexer(self, value):
+        # In indexing mode we add all postfixes ('o', 'lo', ..., 'hello')
+        result = []
+        if value:
+            result.extend([value[count:] for count in range(len(value))])
+        return result
+
+class Icontains(Contains):
+    lookup_types = 'icontains'
+    
+    def convert_value(self, value):
+        return [val.lower() for val in Contains.convert_value(self, value)]
+    
+    def _convert_lookup(self, value, lookup_type):
+        return self.new_lookup, value.lower()
+
+class Iexact(ExtraFieldLookup):
+    lookup_types = 'iexact'
+        
+    def _convert_lookup(self, value, lookup_type):
+        return self.new_lookup, value.lower()
+    
+    def _convert_value(self, value):
+        return value.lower()
+
+class Istartswith(ExtraFieldLookup):
+    lookup_types = 'istartswith'
+    
+    def __init__(self, *args, **kwargs):
+        defaults = {'new_lookup': 'startswith'}
+        defaults.update(kwargs)
+        ExtraFieldLookup.__init__(self, *args, **defaults)
+    
+    def _convert_lookup(self, value, lookup_type):
+        return self.new_lookup, value.lower()
+
+    def _convert_value(self, value):
+        return value.lower()
+
+class Endswith(ExtraFieldLookup):
+    lookup_types = 'endswith'
+    
+    def __init__(self, *args, **kwargs):
+        defaults = {'new_lookup': 'startswith'}
+        defaults.update(kwargs)
+        ExtraFieldLookup.__init__(self, *args, **defaults)
+    
+    def _convert_lookup(self, value, lookup_type):
+        return self.new_lookup, value[::-1]
+
+    def _convert_value(self, value):
+        return value[::-1]
+
+class Iendswith(Endswith):
+    lookup_types = 'iendswith'
+    
+    def _convert_lookup(self, value, lookup_type):
+        return self.new_lookup, value[::-1].lower()
+
+    def _convert_value(self, value):
+        return value[::-1].lower()
+
+class RegexLookup(ExtraFieldLookup):
+    lookup_types = ('regex', 'iregex')
+    
+    def __init__(self, *args, **kwargs):
+        defaults = {'field_to_add': models.NullBooleanField(editable=False,
+                                                            null=True) 
+        }
+        defaults.update(kwargs)
+        ExtraFieldLookup.__init__(self, *args, **defaults)        
+    
+    def contribute(self, model, field_name, lookup_def):
+        ExtraFieldLookup.contribute(self, model, field_name, lookup_def)
+        if isinstance(lookup_def, regex):
+            self.lookup_def = re.compile(lookup_def.pattern, re.S | re.U |
+                                         (lookup_def.flags & re.I))
+    
+    @property
+    def index_name(self):
+        return 'idxf_%s_l_%s' % (self.field_name,
+                                 self.lookup_def.pattern.encode('hex'))
+
+    def is_icase(self):
+        return self.lookup_def.flags & re.I
+    
+    def _convert_lookup(self, value, lookup_type):
+        return self.new_lookup, True
+
+    def _convert_value(self, value):
+        if self.lookup_def.match(value):
+            return True
+        return False
+        
+    def matches_filter(self, model, field_name, lookup_type, value):
+        return self.model == model and lookup_type == \
+                '%sregex' % ('i' if self.is_icase() else '') and \
+                value == self.lookup_def.pattern and field_name == self.field_name
+    
+    @classmethod
+    def matches_lookup_def(cls, lookup_def):
+        if isinstance(lookup_def, regex):
+            return True
+        return False 
+
+class StandardLookup(ExtraFieldLookup):
+    ''' Creates a copy of the field_to_index in order to allow querying for 
+        standard lookup_types on a JOINed property. '''
+    # TODO: database backend can specify standardLookups
+    lookup_types = ('exact', 'gt', 'gte', 'lt', 'lte', 'in', 'range', 'isnull')
+    
+    @property
+    def index_name(self):
+        return 'idxf_%s_l_%s' % (self.field_name, 'standard')
+    
+    def convert_lookup(self, value, lookup_type):
+        return lookup_type, value
+    
+    def get_field_to_add(self, field_to_index):
+        field_to_add = deepcopy(field_to_index)
+        if isinstance(field_to_add, (models.DateTimeField,
+                                    models.DateField, models.TimeField)):
+            field_to_add.auto_now_add = field_to_add.auto_now = False
+        return field_to_add

Empty file added.

dbindexer/resolver.py

+from django.conf import settings
+from django.utils.importlib import import_module
+from django.core.exceptions import ImproperlyConfigured
+
+class Resolver(object):
+    def __init__(self):
+        self.backends = []
+        self.load_backends(getattr(settings, 'DBINDEXER_BACKENDS',
+                               ('dbindexer.backends.BaseResolver',
+                                'dbindexer.backends.FKNullFix')))
+
+    def load_backends(self, backend_paths):
+        for backend in backend_paths:
+                self.backends.append(self.load_backend(backend))
+    
+    def load_backend(self, path):
+        module_name, attr_name = path.rsplit('.', 1)
+        try:
+            mod = import_module(module_name)
+        except (ImportError, ValueError), e:
+            raise ImproperlyConfigured('Error importing backend module %s: "%s"'
+                % (module_name, e))
+        try:
+            return getattr(mod, attr_name)()
+        except AttributeError:
+            raise ImproperlyConfigured('Module "%s" does not define a "%s" backend'
+                % (module_name, attr_name))
+
+    def convert_filters(self, query):
+        for backend in self.backends:
+            backend.convert_filters(query)
+
+    def create_index(self, lookup):
+        for backend in self.backends:
+            backend.create_index(lookup)
+
+    def convert_insert_query(self, query):
+        for backend in self.backends:
+            backend.convert_insert_query(query)
+
+resolver = Resolver()

dbindexer/tests.py

+from django.db import models
+from django.test import TestCase
+from .api import register_index
+from .lookups import StandardLookup
+from .resolver import resolver 
+from djangotoolbox.fields import ListField
+from datetime import datetime
+import re
+
+class ForeignIndexed2(models.Model):
+    name_fi2 = models.CharField(max_length=500)
+    age = models.IntegerField()
+    
+class ForeignIndexed(models.Model):
+    title = models.CharField(max_length=500)
+    name_fi = models.CharField(max_length=500)
+    fk = models.ForeignKey(ForeignIndexed2, null=True)
+
+class Indexed(models.Model):
+    name = models.CharField(max_length=500)
+    published = models.DateTimeField(auto_now_add=True)
+    foreignkey = models.ForeignKey(ForeignIndexed, null=True)
+    foreignkey2 = models.ForeignKey(ForeignIndexed2, related_name='idx_set', null=True)
+    tags = ListField(models.CharField(max_length=500, null=True))
+
+# TODO: add test for foreign key with multiple filters via different and equal paths
+# to do so we have to create some entities matching equal paths but not matching
+# different paths
+class TestIndexed(TestCase):
+    def setUp(self):
+        self.backends = list(resolver.backends)
+        resolver.backends = []
+        resolver.load_backends(('dbindexer.backends.BaseResolver',
+                      'dbindexer.backends.FKNullFix',
+#                      'dbindexer.backends.InMemoryJOINResolver',
+                      'dbindexer.backends.ConstantFieldJOINResolver',
+        ))
+        self.register_indexex()
+        
+        juubi = ForeignIndexed2(name_fi2='Juubi', age=2)
+        juubi.save()
+        rikudo = ForeignIndexed2(name_fi2='Rikudo', age=200)
+        rikudo.save()
+        
+        kyuubi = ForeignIndexed(name_fi='Kyuubi', title='Bijuu', fk=juubi)
+        hachibi= ForeignIndexed(name_fi='Hachibi', title='Bijuu', fk=rikudo)
+        kyuubi.save()
+        hachibi.save()
+                
+        Indexed(name='ItAchi', tags=('Sasuke', 'Madara'), foreignkey=kyuubi,
+                foreignkey2=juubi).save()
+        Indexed(name='YondAimE', tags=('Naruto', 'Jiraya'), foreignkey=kyuubi,
+                foreignkey2=juubi).save()
+        Indexed(name='Neji', tags=('Hinata'), foreignkey=hachibi,
+                foreignkey2=juubi).save()
+        Indexed(name='I1038593i', tags=('Sharingan'), foreignkey=hachibi,
+                foreignkey2=rikudo).save()
+    
+    def tearDown(self):
+        resolver.backends = self.backends
+        
+    def register_indexex(self):
+        register_index(Indexed, {
+            'name': ('iexact', 'endswith', 'istartswith', 'iendswith', 'contains',
+                     'icontains', re.compile('^i+', re.I), re.compile('^I+'),
+                     re.compile('^i\d*i$', re.I)),
+            'published': ('month', 'day', 'year', 'week_day'),
+            'tags': ('iexact', 'icontains', StandardLookup() ),
+            'foreignkey__fk': (StandardLookup()),
+            'foreignkey__title': 'iexact',
+            'foreignkey__name_fi': 'iexact',
+            'foreignkey__fk__name_fi2': ('iexact', 'endswith'),
+            'foreignkey2__name_fi2': (StandardLookup(), 'iexact'),
+            'foreignkey2__age': (StandardLookup())
+        })
+        
+        register_index(ForeignIndexed, {
+            'title': 'iexact',
+            'name_fi': ('iexact', 'icontains'),
+            'fk__name_fi2': ('iexact', 'endswith'),
+            'fk__age': (StandardLookup()),
+        })
+        
+    # TODO: add tests for created indexes for all backends!
+#    def test_model_fields(self):
+#        field_list = [(item[0], item[0].column) 
+#                       for item in Indexed._meta.get_fields_with_model()]
+#        print field_list
+#        x()
+        # in-memory JOIN backend shouldn't create multiple indexes on the foreignkey side
+        # for different paths or not even for index definition on different models. Test this!
+        # standard JOIN backend should always add extra fields to registered model. Test this!
+    
+    def test_joins(self):
+        self.assertEqual(2, len(Indexed.objects.all().filter(
+            foreignkey__fk__name_fi2__iexact='juuBi',
+            foreignkey__title__iexact='biJuu')))
+        
+        self.assertEqual(0, len(Indexed.objects.all().filter(
+            foreignkey__fk__name_fi2__iexact='juuBi',
+            foreignkey2__name_fi2__iexact='Rikudo')))
+        
+        self.assertEqual(1, len(Indexed.objects.all().filter(
+            foreignkey__fk__name_fi2__endswith='udo',
+            foreignkey2__name_fi2__iexact='Rikudo')))
+        
+        self.assertEqual(2, len(Indexed.objects.all().filter(
+            foreignkey__title__iexact='biJuu',
+            foreignkey__name_fi__iexact='kyuuBi')))
+        
+        self.assertEqual(2, len(Indexed.objects.all().filter(
+            foreignkey__title__iexact='biJuu',
+            foreignkey__name_fi__iexact='Hachibi')))
+                
+        self.assertEqual(1, len(Indexed.objects.all().filter(
+            foreignkey__title__iexact='biJuu', name__iendswith='iMe')))
+        
+        # JOINs on one field only
+        self.assertEqual(4, len(Indexed.objects.all().filter(
+            foreignkey__title__iexact='biJuu')))
+        self.assertEqual(3, len(Indexed.objects.all().filter(
+           foreignkey2__name_fi2='Juubi')))
+        
+        # text endswith instead iexact all the time :)
+        self.assertEqual(2, len(Indexed.objects.all().filter(
+            foreignkey__fk__name_fi2__endswith='bi')))
+        
+        # test JOINs via different paths targeting the same field
+        self.assertEqual(2, len(Indexed.objects.all().filter(
+            foreignkey__fk__name_fi2__iexact='juuBi')))
+        self.assertEqual(3, len(Indexed.objects.all().filter(
+           foreignkey2__name_fi2__iexact='Juubi')))
+        
+        # test standard lookups for foreign_keys
+        self.assertEqual(3, len(Indexed.objects.all().filter(
+            foreignkey2__age=2)))
+        self.assertEqual(4, len(Indexed.objects.all().filter(
+            foreignkey2__age__lt=201)))
+        
+        # test JOINs on different model
+        # standard lookups JOINs
+        self.assertEqual(1, len(ForeignIndexed.objects.all().filter(
+            fk__age=2)))
+        self.assertEqual(2, len(ForeignIndexed.objects.all().filter(
+            fk__age__lt=210)))
+        
+        # other JOINs
+        self.assertEqual(1, len(ForeignIndexed.objects.all().filter(
+            fk__name_fi2__iexact='juUBI')))
+        self.assertEqual(1, len(ForeignIndexed.objects.all().filter(
+            fk__name_fi2__endswith='bi')))
+
+    def test_fix_fk_isnull(self):
+        self.assertEqual(0, len(Indexed.objects.filter(foreignkey=None)))
+        self.assertEqual(4, len(Indexed.objects.exclude(foreignkey=None)))
+
+    def test_iexact(self):
+        self.assertEqual(1, len(Indexed.objects.filter(name__iexact='itaChi')))
+        self.assertEqual(1, Indexed.objects.filter(name__iexact='itaChi').count())
+        
+        self.assertEqual(2, ForeignIndexed.objects.filter(title__iexact='BIJUU').count())
+        self.assertEqual(1, ForeignIndexed.objects.filter(name_fi__iexact='KYuubi').count())
+        
+        # test on list field
+        self.assertEqual(1, Indexed.objects.filter(tags__iexact='SasuKE').count())
+    
+    def test_standard_lookups(self):
+        self.assertEqual(1, Indexed.objects.filter(tags__exact='Naruto').count())
+        
+        # test standard lookup on foreign_key
+        juubi = ForeignIndexed2.objects.all().get(name_fi2='Juubi', age=2)
+        self.assertEqual(2, Indexed.objects.filter(foreignkey__fk=juubi).count())
+    
+    def test_delete(self):
+        Indexed.objects.get(name__iexact='itaChi').delete()
+        self.assertEqual(0, Indexed.objects.all().filter(name__iexact='itaChi').count())
+
+    def test_delete_query(self):
+        Indexed.objects.all().delete()
+        self.assertEqual(0, Indexed.objects.all().filter(name__iexact='itaChi').count())
+
+    def test_istartswith(self):
+        self.assertEqual(1, len(Indexed.objects.all().filter(name__istartswith='iTa')))
+
+    def test_endswith(self):
+        self.assertEqual(1, len(Indexed.objects.all().filter(name__endswith='imE')))
+        self.assertEqual(1, len(Indexed.objects.all().filter(name__iendswith='iMe')))
+
+    def test_regex(self):
+        self.assertEqual(2, len(Indexed.objects.all().filter(name__iregex='^i+')))
+        self.assertEqual(2, len(Indexed.objects.all().filter(name__regex='^I+')))
+        self.assertEqual(1, len(Indexed.objects.all().filter(name__iregex='^i\d*i$')))
+
+    def test_date_filters(self):
+        now = datetime.now()
+        self.assertEqual(4, len(Indexed.objects.all().filter(published__month=now.month)))
+        self.assertEqual(4, len(Indexed.objects.all().filter(published__day=now.day)))
+        self.assertEqual(4, len(Indexed.objects.all().filter(published__year=now.year)))
+        self.assertEqual(4, len(Indexed.objects.all().filter(
+            published__week_day=now.isoweekday())))
+
+#    def test_contains(self):
+#        # passes on production but not on gae-sdk (development)
+#        self.assertEqual(1, len(Indexed.objects.all().filter(name__contains='Aim')))
+#        self.assertEqual(1, len(Indexed.objects.all().filter(name__icontains='aim')))
+#
+#        self.assertEqual(1, ForeignIndexed.objects.filter(name_fi__icontains='Yu').count())
+#
+#        # test icontains on a list
+#        self.assertEqual(2, len(Indexed.objects.all().filter(tags__icontains='RA')))

djangoappengine/.deps

+[repos]
+djangotoolbox = https://bitbucket.org/wkornewald/djangotoolbox
+
+[links]
+djangotoolbox = djangotoolbox/djangotoolbox

djangoappengine/.hgeol

+[patterns]
+.deps = native
+.hgignore = native
+.hgeol = native
+**.txt = native
+**.pyva = native
+**.py = native
+**.ru = native
+**.c = native
+**.cpp = native
+**.cu = native
+**.h = native
+**.hpp = native
+**.tmpl = native
+**.html = native
+**.htm = native
+**.js = native
+**.manifest = native
+**.yaml = native

djangoappengine/.hgignore

+syntax: glob
+build
+dist
+*.egg-info
+.project
+.pydevproject
+.settings
+*~
+*.orig
+*.pyc
+*.pyo
+*.swp
+*.tmp
+desktop.ini
+nbproject
+build
+dist

djangoappengine/LICENSE

+Copyright (c) Waldemar Kornewald, Thomas Wanschik, and all contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+    1. Redistributions of source code must retain the above copyright notice, 
+       this list of conditions and the following disclaimer.
+
+    2. Redistributions in binary form must reproduce the above copyright 
+       notice, this list of conditions and the following disclaimer in the
+       documentation and/or other materials provided with the distribution.
+
+    3. Neither the name of All Buttons Pressed nor
+       the names of its contributors may be used to endorse or promote products
+       derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

djangoappengine/MANIFEST.in

+include LICENSE
+include CHANGELOG.rst
+include README.rst
Add a comment to this file

djangoappengine/__init__.py

Empty file added.

Add a comment to this file

djangoappengine/appstats/__init__.py

Empty file added.

djangoappengine/appstats/ui.py

+# Initialize Django
+from djangoappengine.main import main
+
+from google.appengine.ext.appstats.ui import main
+
+if __name__ == '__main__':
+    main()

djangoappengine/boot.py

+import logging
+import os
+import sys
+
+PROJECT_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
+DATA_ROOT = os.path.join(PROJECT_DIR, '.gaedata')
+
+# Overrides for os.environ
+env_ext = {'DJANGO_SETTINGS_MODULE': 'settings'}
+
+def setup_env():
+    """Configures app engine environment for command-line apps."""
+    # Try to import the appengine code from the system path.
+    try:
+        from google.appengine.api import apiproxy_stub_map
+    except ImportError:
+        for k in [k for k in sys.modules if k.startswith('google')]:
+            del sys.modules[k]
+
+        # Not on the system path. Build a list of alternative paths where it
+        # may be. First look within the project for a local copy, then look for
+        # where the Mac OS SDK installs it.
+        paths = [os.path.join(PROJECT_DIR, '.google_appengine'),
+                 os.environ.get('APP_ENGINE_SDK'),
+                 '/usr/local/google_appengine',
+                 '/Applications/GoogleAppEngineLauncher.app/Contents/Resources/GoogleAppEngine-default.bundle/Contents/Resources/google_appengine']
+        for path in os.environ.get('PATH', '').split(os.pathsep):
+            path = path.rstrip(os.sep)
+            if path.endswith('google_appengine'):
+                paths.append(path)
+        if os.name in ('nt', 'dos'):
+            path = r'%(PROGRAMFILES)s\Google\google_appengine' % os.environ
+            paths.append(path)
+        # Loop through all possible paths and look for the SDK dir.
+        sdk_path = None
+        for path in paths:
+            if not path:
+                continue
+            path = os.path.expanduser(path)
+            path = os.path.realpath(path)
+            if os.path.exists(path):
+                sdk_path = path
+                break
+        if sdk_path is None:
+            # The SDK could not be found in any known location.
+            sys.stderr.write('The Google App Engine SDK could not be found!\n'
+                             "Make sure it's accessible via your PATH "
+                             "environment and called google_appengine.\n")
+            sys.exit(1)
+        # Add the SDK and the libraries within it to the system path.
+        extra_paths = [sdk_path]
+        lib = os.path.join(sdk_path, 'lib')
+        # Automatically add all packages in the SDK's lib folder:
+        for dir in os.listdir(lib):
+            path = os.path.join(lib, dir)
+            # Package can be under 'lib/<pkg>/<pkg>/' or 'lib/<pkg>/lib/<pkg>/'
+            detect = (os.path.join(path, dir), os.path.join(path, 'lib', dir))
+            for path in detect:
+                if os.path.isdir(path) and not dir == 'django':
+                    extra_paths.append(os.path.dirname(path))
+                    break
+        sys.path = extra_paths + sys.path
+        from google.appengine.api import apiproxy_stub_map
+
+    setup_project()
+    from .utils import have_appserver
+    if have_appserver:
+        # App Engine's threading.local is broken
+        setup_threading()
+    elif not os.path.exists(DATA_ROOT):
+        os.mkdir(DATA_ROOT)
+    setup_logging()
+
+    if not have_appserver:
+        # Patch Django to support loading management commands from zip files
+        from django.core import management
+        management.find_commands = find_commands
+
+def find_commands(management_dir):
+    """
+    Given a path to a management directory, returns a list of all the command
+    names that are available.
+    This version works for django deployments which are file based or
+    contained in a ZIP (in sys.path).
+
+    Returns an empty list if no commands are defined.
+    """
+    import pkgutil
+    return [modname for importer, modname, ispkg in pkgutil.iter_modules(
+                [os.path.join(management_dir, 'commands')]) if not ispkg]
+
+def setup_threading():
+    # XXX: GAE's threading.local doesn't work correctly with subclassing
+    try:
+        from django.utils._threading_local import local
+        import threading
+        threading.local = local
+    except ImportError:
+        pass
+
+def setup_logging():
+    # Fix Python 2.6 logging module
+    logging.logMultiprocessing = 0
+
+    # Enable logging
+    level = logging.DEBUG
+    from .utils import have_appserver
+    if have_appserver:
+        # We can't import settings at this point when running a normal
+        # manage.py command because this module gets imported from settings.py
+        from django.conf import settings
+        if not settings.DEBUG:
+            level = logging.INFO
+    logging.getLogger().setLevel(level)
+
+def setup_project():
+    from .utils import have_appserver, on_production_server
+    if have_appserver:
+        # This fixes a pwd import bug for os.path.expanduser()
+        env_ext['HOME'] = PROJECT_DIR
+
+    # The dev_appserver creates a sandbox which restricts access to certain
+    # modules and builtins in order to emulate the production environment.
+    # Here we get the subprocess module back into the dev_appserver sandbox.
+    # This module is just too important for development.
+    # Also we add the compiler/parser module back and enable https connections
+    # (seem to be broken on Windows because the _ssl module is disallowed).
+    if not have_appserver:
+        from google.appengine.tools import dev_appserver
+        try:
+            # Backup os.environ. It gets overwritten by the dev_appserver,
+            # but it's needed by the subprocess module.
+            env = dev_appserver.DEFAULT_ENV
+            dev_appserver.DEFAULT_ENV = os.environ.copy()
+            dev_appserver.DEFAULT_ENV.update(env)
+            # Backup the buffer() builtin. The subprocess in Python 2.5 on
+            # Linux and OS X uses needs it, but the dev_appserver removes it.
+            dev_appserver.buffer = buffer
+        except AttributeError:
+            logging.warn('Could not patch the default environment. '
+                         'The subprocess module will not work correctly.')
+
+        try:
+            # Allow importing compiler/parser and _ssl modules (for https)
+            dev_appserver.HardenedModulesHook._WHITE_LIST_C_MODULES.extend(
+                ('parser', '_ssl'))
+        except AttributeError:
+            logging.warn('Could not patch modules whitelist. '
+                         'The compiler and parser modules will not work and '
+                         'SSL support is disabled.')
+    elif not on_production_server:
+        try:
+            # Restore the real subprocess module
+            from google.appengine.api.mail_stub import subprocess
+            sys.modules['subprocess'] = subprocess
+            # Re-inject the buffer() builtin into the subprocess module
+            from google.appengine.tools import dev_appserver
+            subprocess.buffer = dev_appserver.buffer
+        except Exception, e:
+            logging.warn('Could not add the subprocess module to the sandbox: %s' % e)
+
+    os.environ.update(env_ext)
+
+    extra_paths = [PROJECT_DIR, os.path.join(os.path.dirname(__file__), 'lib')]
+    zip_packages_dir = os.path.join(PROJECT_DIR, 'zip-packages')
+
+    # We support zipped packages in the common and project folders.
+    if os.path.isdir(zip_packages_dir):
+        for zip_package in os.listdir(zip_packages_dir):
+            extra_paths.append(os.path.join(zip_packages_dir, zip_package))
+
+    # App Engine causes main.py to be reloaded if an exception gets raised
+    # on the first request of a main.py instance, so don't call setup_project()
+    # multiple times. We ensure this indirectly by checking if we've already
+    # modified sys.path, already.
+    if len(sys.path) < len(extra_paths) or \
+            sys.path[:len(extra_paths)] != extra_paths:
+        for path in extra_paths:
+            while path in sys.path:
+                sys.path.remove(path)
+        sys.path = extra_paths + sys.path
Add a comment to this file

djangoappengine/db/__init__.py

Empty file added.

djangoappengine/db/base.py

+from ..boot import DATA_ROOT
+from ..utils import appid, on_production_server
+from .creation import DatabaseCreation
+from .stubs import stub_manager
+from django.db.backends.util import format_number
+from djangotoolbox.db.base import NonrelDatabaseFeatures, \
+    NonrelDatabaseOperations, NonrelDatabaseWrapper, NonrelDatabaseClient, \
+    NonrelDatabaseValidation, NonrelDatabaseIntrospection
+from google.appengine.ext.db.metadata import get_kinds, get_namespaces
+from google.appengine.api.datastore import Query, Delete
+from google.appengine.api.namespace_manager import set_namespace
+import logging
+import os
+
+DATASTORE_PATHS = {
+    'datastore_path': os.path.join(DATA_ROOT, 'datastore'),
+    'blobstore_path': os.path.join(DATA_ROOT, 'blobstore'),
+    'rdbms_sqlite_path': os.path.join(DATA_ROOT, 'rdbms'),
+    'prospective_search_path': os.path.join(DATA_ROOT, 'prospective-search'),
+}
+
+def get_datastore_paths(options):
+    paths = {}
+    for key, path in DATASTORE_PATHS.items():
+        paths[key] = options.get(key, path)
+    return paths
+
+def destroy_datastore(paths):
+    """Destroys the appengine datastore at the specified paths."""
+    for path in paths.values():
+        if not path:
+            continue
+        try:
+            os.remove(path)
+        except OSError, error:
+            if error.errno != 2:
+                logging.error("Failed to clear datastore: %s" % error)
+
+class DatabaseFeatures(NonrelDatabaseFeatures):
+    allows_primary_key_0 = True
+    supports_dicts = True
+
+class DatabaseOperations(NonrelDatabaseOperations):
+    compiler_module = __name__.rsplit('.', 1)[0] + '.compiler'
+
+    DEFAULT_MAX_DIGITS = 16
+
+    def value_to_db_decimal(self, value, max_digits, decimal_places):
+        if value is None:
+            return None
+        sign = value < 0 and u'-' or u''
+        if sign: 
+            value = abs(value)
+        if max_digits is None: 
+            max_digits = self.DEFAULT_MAX_DIGITS
+
+        if decimal_places is None:
+            value = unicode(value)
+        else:
+            value = format_number(value, max_digits, decimal_places)
+        decimal_places = decimal_places or 0
+        n = value.find('.')
+
+        if n < 0:
+            n = len(value)
+        if n < max_digits - decimal_places:
+            value = u"0" * (max_digits - decimal_places - n) + value
+        return sign + value
+
+    def sql_flush(self, style, tables, sequences):
+        self.connection.flush()
+        return []
+
+class DatabaseClient(NonrelDatabaseClient):
+    pass
+
+class DatabaseValidation(NonrelDatabaseValidation):
+    pass
+
+class DatabaseIntrospection(NonrelDatabaseIntrospection):
+    def table_names(self):
+        """Returns a list of names of all tables that exist in the database."""
+        return [kind.key().name() for kind in Query(kind='__kind__').Run()]
+
+class DatabaseWrapper(NonrelDatabaseWrapper):
+    def __init__(self, *args, **kwds):
+        super(DatabaseWrapper, self).__init__(*args, **kwds)
+        self.features = DatabaseFeatures(self)
+        self.ops = DatabaseOperations(self)
+        self.client = DatabaseClient(self)
+        self.creation = DatabaseCreation(self)
+        self.validation = DatabaseValidation(self)
+        self.introspection = DatabaseIntrospection(self)
+        options = self.settings_dict
+        self.remote_app_id = options.get('REMOTE_APP_ID', appid)
+        self.domain = options.get('DOMAIN', 'appspot.com')
+        self.remote_api_path = options.get('REMOTE_API_PATH', None)
+        self.secure_remote_api = options.get('SECURE_REMOTE_API', True)
+
+        remote = options.get('REMOTE', False)
+        if on_production_server:
+            remote = False
+        if remote:
+            stub_manager.setup_remote_stubs(self)
+        else:
+            stub_manager.setup_stubs(self)
+
+    def flush(self):
+        """Helper function to remove the current datastore and re-open the stubs"""
+        if stub_manager.active_stubs == 'remote':
+            import random
+            import string
+            code = ''.join([random.choice(string.ascii_letters) for x in range(4)])
+            print '\n\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
+            print '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
+            print "Warning! You're about to delete the *production* datastore!"
+            print 'Only models defined in your INSTALLED_APPS can be removed!'
+            print 'If you want to clear the whole datastore you have to use the ' \
+                  'datastore viewer in the dashboard. Also, in order to delete all '\
+                  'unneeded indexes you have to run appcfg.py vacuum_indexes.'
+            print 'In order to proceed you have to enter the following code:'
+            print code
+            response = raw_input('Repeat: ')
+            if code == response:
+                print 'Deleting...'
+                delete_all_entities()
+                print "Datastore flushed! Please check your dashboard's " \
+                      'datastore viewer for any remaining entities and remove ' \
+                      'all unneeded indexes with manage.py vacuum_indexes.'
+            else:
+                print 'Aborting'
+                exit()
+        elif stub_manager.active_stubs == 'test':
+            stub_manager.deactivate_test_stubs()
+            stub_manager.activate_test_stubs()
+#        elif on_production_server or have_appserver:
+#            delete_all_entities()
+        else:
+            destroy_datastore(get_datastore_paths(self.settings_dict))
+            stub_manager.setup_local_stubs(self)
+
+def delete_all_entities():
+    for namespace in get_namespaces():
+        set_namespace(namespace)
+        for kind in get_kinds():
+            if kind.startswith('__'):
+                continue
+            while True:
+                data = Query(kind=kind, keys_only=True).Get(200)
+                if not data:
+                    break
+                Delete(data)

djangoappengine/db/compiler.py

+from .db_settings import get_model_indexes
+
+import datetime
+import sys
+
+from django.db.models.sql import aggregates as sqlaggregates
+from django.db.models.sql.constants import LOOKUP_SEP, MULTI, SINGLE
+from django.db.models.sql.where import AND, OR
+from django.db.utils import DatabaseError, IntegrityError
+from django.utils.tree import Node
+
+from functools import wraps
+
+from google.appengine.api.datastore import Entity, Query, MultiQuery, \
+    Put, Get, Delete, Key
+from google.appengine.api.datastore_errors import Error as GAEError
+from google.appengine.api.datastore_types import Text, Category, Email, Link, \
+    PhoneNumber, PostalAddress, Text, Blob, ByteString, GeoPt, IM, Key, \
+    Rating, BlobKey
+
+from djangotoolbox.db.basecompiler import NonrelQuery, NonrelCompiler, \
+    NonrelInsertCompiler, NonrelUpdateCompiler, NonrelDeleteCompiler
+
+import cPickle as pickle
+
+import decimal
+
+# Valid query types (a dictionary is used for speedy lookups).
+OPERATORS_MAP = {
+    'exact': '=',
+    'gt': '>',
+    'gte': '>=',
+    'lt': '<',
+    'lte': '<=',
+
+    # The following operators are supported with special code below:
+    'isnull': None,
+    'in': None,
+    'startswith': None,
+    'range': None,
+    'year': None,
+}
+
+NEGATION_MAP = {
+    'gt': '<=',
+    'gte': '<',
+    'lt': '>=',
+    'lte': '>',
+    # TODO: support these filters
+    #'exact': '!=', # this might actually become individual '<' and '>' queries
+}
+
+def safe_call(func):
+    @wraps(func)
+    def _func(*args, **kwargs):
+        try:
+            return func(*args, **kwargs)
+        except GAEError, e:
+            raise DatabaseError, DatabaseError(str(e)), sys.exc_info()[2]
+    return _func
+
+class GAEQuery(NonrelQuery):
+    # ----------------------------------------------
+    # Public API
+    # ----------------------------------------------
+    def __init__(self, compiler, fields):
+        super(GAEQuery, self).__init__(compiler, fields)
+        self.inequality_field = None
+        self.pk_filters = None
+        self.excluded_pks = ()
+        self.has_negated_exact_filter = False
+        self.ordering = ()
+        self.gae_ordering = []
+        pks_only = False
+        if len(fields) == 1 and fields[0].primary_key:
+            pks_only = True
+        self.db_table = self.query.get_meta().db_table
+        self.pks_only = pks_only
+        start_cursor = getattr(self.query, '_gae_start_cursor', None)
+        end_cursor = getattr(self.query, '_gae_end_cursor', None)
+        self.gae_query = [Query(self.db_table, keys_only=self.pks_only,
+                                cursor=start_cursor, end_cursor=end_cursor)]
+
+    # This is needed for debugging
+    def __repr__(self):
+        return '<GAEQuery: %r ORDER %r>' % (self.gae_query, self.ordering)
+
+    @safe_call
+    def fetch(self, low_mark, high_mark):
+        query = self._build_query()
+        executed = False
+        if self.excluded_pks and high_mark is not None:
+            high_mark += len(self.excluded_pks)
+        if self.pk_filters is not None:
+            results = self.get_matching_pk(low_mark, high_mark)
+        else:
+            if high_mark is None:
+                kw = {}
+                if low_mark:
+                    kw['offset'] = low_mark
+                results = query.Run(**kw)
+                executed = True
+            elif high_mark > low_mark:
+                results = query.Get(high_mark - low_mark, low_mark)
+                executed = True
+            else:
+                results = ()
+
+        for entity in results:
+            if isinstance(entity, Key):
+                key = entity
+            else:
+                key = entity.key()
+            if key in self.excluded_pks:
+                continue
+            yield self._make_entity(entity)
+
+        if executed and not isinstance(query, MultiQuery):
+            self.query._gae_cursor = query.GetCompiledCursor()
+
+    @safe_call
+    def count(self, limit=None):
+        if self.pk_filters is not None:
+            return len(self.get_matching_pk(0, limit))
+        if self.excluded_pks:
+            return len(list(self.fetch(0, 2000)))
+        kw = {}
+        if limit is not None:
+            kw['limit'] = limit
+        return self._build_query().Count(**kw)
+
+    @safe_call
+    def delete(self):
+        if self.pk_filters is not None:
+            keys = [key for key in self.pk_filters if key is not None]
+        else:
+            keys = self.fetch()
+        if keys:
+            Delete(keys)
+
+    @safe_call
+    def order_by(self, ordering):
+        self.ordering = ordering
+        for order in self.ordering:
+            if order.startswith('-'):
+                order, direction = order[1:], Query.DESCENDING
+            else:
+                direction = Query.ASCENDING
+            if order == self.query.get_meta().pk.column:
+                order = '__key__'
+            self.gae_ordering.append((order, direction))
+
+    # This function is used by the default add_filters() implementation
+    @safe_call
+    def add_filter(self, column, lookup_type, negated, db_type, value):
+        if value in ([], ()):
+            self.pk_filters = []
+            return
+
+        # Emulated/converted lookups
+        if column == self.query.get_meta().pk.column:
+            column = '__key__'
+            db_table = self.query.get_meta().db_table
+            if lookup_type in ('exact', 'in'):
+                # Optimization: batch-get by key
+                if self.pk_filters is not None:
+                    raise DatabaseError("You can't apply multiple AND filters "
+                                        "on the primary key. "
+                                        "Did you mean __in=[...]?")
+                if not isinstance(value, (tuple, list)):
+                    value = [value]
+                pks = [create_key(db_table, pk) for pk in value if pk]
+                if negated:
+                    self.excluded_pks = pks
+                else:
+                    self.pk_filters = pks
+                return
+            else:
+                # XXX: set db_type to 'gae_key' in order to allow
+                # convert_value_for_db to recognize the value to be a Key and
+                # not a str. Otherwise the key would be converted back to a
+                # unicode (see convert_value_for_db)
+                db_type = 'gae_key'
+                key_type_error = 'Lookup values on primary keys have to be' \
+                                 'a string or an integer.'
+                if lookup_type == 'range':
+                    if isinstance(value, (list, tuple)) and not (
+                            isinstance(value[0], (basestring, int, long)) and
+                            isinstance(value[1], (basestring, int, long))):
+                        raise DatabaseError(key_type_error)
+                elif not isinstance(value, (basestring, int, long)):
+                    raise DatabaseError(key_type_error)
+                # for lookup type range we have to deal with a list
+                if lookup_type == 'range':
+                    value[0] = create_key(db_table, value[0])
+                    value[1] = create_key(db_table, value[1])
+                else:
+                    value = create_key(db_table, value)
+        if lookup_type not in OPERATORS_MAP:
+            raise DatabaseError("Lookup type %r isn't supported" % lookup_type)
+
+        # We check for negation after lookup_type isnull because it
+        # simplifies the code. All following lookup_type checks assume
+        # that they're not negated.
+        if lookup_type == 'isnull':
+            if (negated and value) or not value:
+                # TODO/XXX: is everything greater than None?
+                op = '>'
+            else:
+                op = '='
+            value = None
+        elif negated and lookup_type == 'exact':
+            if self.has_negated_exact_filter:
+                raise DatabaseError("You can't exclude more than one __exact "
+                                    "filter")
+            self.has_negated_exact_filter = True
+            self._combine_filters(column, db_type,
+                                  (('<', value), ('>', value)))
+            return
+        elif negated:
+            try:
+                op = NEGATION_MAP[lookup_type]
+            except KeyError:
+                raise DatabaseError("Lookup type %r can't be negated" % lookup_type)
+            if self.inequality_field and column != self.inequality_field:
+                raise DatabaseError("Can't have inequality filters on multiple "
+                    "columns (here: %r and %r)" % (self.inequality_field, column))
+            self.inequality_field = column
+        elif lookup_type == 'in':
+            # Create sub-query combinations, one for each value
+            if len(self.gae_query) * len(value) > 30:
+                raise DatabaseError("You can't query against more than "
+                                    "30 __in filter value combinations")
+            op_values = [('=', v) for v in value]
+            self._combine_filters(column, db_type, op_values)
+            return
+        elif lookup_type == 'startswith':
+            self._add_filter(column, '>=', db_type, value)
+            if isinstance(value, str):
+                value = value.decode('utf8')
+            if isinstance(value, Key):
+                value = list(value.to_path())
+                if isinstance(value[-1], str):
+                    value[-1] = value[-1].decode('utf8')
+                value[-1] += u'\ufffd'
+                value = Key.from_path(*value)
+            else:
+                value += u'\ufffd'
+            self._add_filter(column, '<=', db_type, value)
+            return
+        elif lookup_type in ('range', 'year'):
+            self._add_filter(column, '>=', db_type, value[0])
+            op = '<=' if lookup_type == 'range' else '<'
+            self._add_filter(column, op, db_type, value[1])
+            return
+        else:
+            op = OPERATORS_MAP[lookup_type]
+
+        self._add_filter(column, op, db_type, value)
+
+    # ----------------------------------------------
+    # Internal API
+    # ----------------------------------------------
+    def _add_filter(self, column, op, db_type, value):
+        for query in self.gae_query:
+            key = '%s %s' % (column, op)
+            value = self.convert_value_for_db(db_type, value)
+            if isinstance(value, Text):
+                raise DatabaseError('TextField is not indexed, by default, '
+                                    "so you can't filter on it. Please add "
+                                    'an index definition for the column %s '
+                                    'on the model %s.%s as described here:\n'
+                                    'http://www.allbuttonspressed.com/blog/django/2010/07/Managing-per-field-indexes-on-App-Engine'
+                                    % (column, self.query.model.__module__, self.query.model.__name__))
+            if key in query:
+                existing_value = query[key]
+                if isinstance(existing_value, list):
+                    existing_value.append(value)
+                else:
+                    query[key] = [existing_value, value]
+            else:
+                query[key] = value
+
+    def _combine_filters(self, column, db_type, op_values):
+        gae_query = self.gae_query
+        combined = []
+        for query in gae_query:
+            for op, value in op_values:
+                self.gae_query = [Query(self.db_table,
+                                        keys_only=self.pks_only)]
+                self.gae_query[0].update(query)
+                self._add_filter(column, op, db_type, value)
+                combined.append(self.gae_query[0])
+        self.gae_query = combined
+
+    def _make_entity(self, entity):
+        if isinstance(entity, Key):
+            key = entity
+            entity = {}
+        else:
+            key = entity.key()
+
+        entity[self.query.get_meta().pk.column] = key
+        return entity
+
+    @safe_call
+    def _build_query(self):
+        for query in self.gae_query:
+            query.Order(*self.gae_ordering)
+        if len(self.gae_query) > 1:
+            return MultiQuery(self.gae_query, self.gae_ordering)
+        return self.gae_query[0]
+
+    def get_matching_pk(self, low_mark=0, high_mark=None):
+        if not self.pk_filters:
+            return []
+
+        results = [result for result in Get(self.pk_filters)
+                   if result is not None and
+                       self.matches_filters(result)]
+        if self.ordering:
+            results.sort(cmp=self.order_pk_filtered)
+        if high_mark is not None and high_mark < len(results) - 1:
+            results = results[:high_mark]
+        if low_mark:
+            results = results[low_mark:]
+        return results
+
+    def order_pk_filtered(self, lhs, rhs):
+        left = dict(lhs)
+        left[self.query.get_meta().pk.column] = lhs.key().to_path()
+        right = dict(rhs)
+        right[self.query.get_meta().pk.column] = rhs.key().to_path()
+        return self._order_in_memory(left, right)
+
+    def matches_filters(self, entity):
+        item = dict(entity)
+        pk = self.query.get_meta().pk
+        value = self.convert_value_from_db(pk.db_type(connection=self.connection),
+            entity.key())
+        item[pk.column] = value
+        result = self._matches_filters(item, self.query.where)
+        return result
+
+class SQLCompiler(NonrelCompiler):
+    """
+    A simple App Engine query: no joins, no distinct, etc.
+    """
+    query_class = GAEQuery
+
+    def convert_value_from_db(self, db_type, value):
+        if isinstance(value, (list, tuple, set)) and \
+                db_type.startswith(('ListField:', 'SetField:')):
+            db_sub_type = db_type.split(':', 1)[1]
+            value = [self.convert_value_from_db(db_sub_type, subvalue)
+                     for subvalue in value]
+
+        if db_type.startswith('SetField:') and value is not None:
+            value = set(value)
+
+        if db_type.startswith('DictField:') and value is not None:
+            value = pickle.loads(value)
+            if ':' in db_type:
+                db_sub_type = db_type.split(':', 1)[1]
+                value = dict((key, self.convert_value_from_db(db_sub_type, value[key]))
+                             for key in value)
+
+        # the following GAE database types are all unicode subclasses, cast them
+        # to unicode so they appear like pure unicode instances for django
+        if isinstance(value, basestring) and value and db_type.startswith('decimal'):
+            value = decimal.Decimal(value)
+        elif isinstance(value, (Category, Email, Link, PhoneNumber, PostalAddress,
+                Text, unicode)):
+            value = unicode(value)
+        elif isinstance(value, Blob):
+            value = str(value)
+        elif isinstance(value, str):
+            # always retrieve strings as unicode (it is possible that old datasets
+            # contain non unicode strings, nevertheless work with unicode ones)
+            value = value.decode('utf-8')
+        elif isinstance(value, Key):
+            # for now we do not support KeyFields thus a Key has to be the own
+            # primary key
+            # TODO: GAE: support parents via GAEKeyField
+            assert value.parent() is None, "Parents are not yet supported!"
+            if db_type == 'integer':
+                if value.id() is None:
+                    raise DatabaseError('Wrong type for Key. Expected integer, found'
+                        'None')
+                else:
+                    value = value.id()
+            elif db_type == 'text':
+                if value.name() is None:
+                    raise DatabaseError('Wrong type for Key. Expected string, found'
+                        'None')
+                else:
+                    value = value.name()
+            else:
+                raise DatabaseError("%s fields cannot be keys on GAE" % db_type)
+        elif db_type == 'date' and isinstance(value, datetime.datetime):
+            value = value.date()
+        elif db_type == 'time' and isinstance(value, datetime.datetime):
+            value = value.time()
+        return value
+
+    def convert_value_for_db(self, db_type, value):
+        if isinstance(value, unicode):
+            value = unicode(value)
+        elif isinstance(value, str):
+            value = str(value)
+        elif isinstance(value, (list, tuple, set)) and \
+                db_type.startswith(('ListField:', 'SetField:')):
+            db_sub_type = db_type.split(':', 1)[1]
+            value = [self.convert_value_for_db(db_sub_type, subvalue)
+                     for subvalue in value]
+        elif isinstance(value, decimal.Decimal) and db_type.startswith("decimal:"):
+            value = self.connection.ops.value_to_db_decimal(value, *eval(db_type[8:]))
+        elif isinstance(value, dict) and db_type.startswith('DictField:'):