Commits

Mikko Ohtamaa  committed e7226fa

Packaging appengine properly

  • Participants
  • Parent commits 84a2930

Comments (0)

Files changed (73)

File __init__.py

Empty file removed.

File boot.py

-import logging
-import os
-import sys
-
-if "GOOGLE_APPENGINE_PROJECT_ROOT" in os.environ:
-        # Read location of App Engine .yaml files from OS environment
-        PROJECT_DIR = os.environ["GOOGLE_APPENGINE_PROJECT_ROOT"]
-else:
-        # Defualt to the location of this file
-        PROJECT_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
-
-# Overrides for os.environ
-env_ext = {'DJANGO_SETTINGS_MODULE': 'settings'}
-
-def setup_env():
-    """Configures app engine environment for command-line apps."""
-    # Try to import the appengine code from the system path.
-    try:
-        from google.appengine.api import apiproxy_stub_map
-    except ImportError:
-        for k in [k for k in sys.modules if k.startswith('google')]:
-            del sys.modules[k]
-
-        # Not on the system path. Build a list of alternative paths where it
-        # may be. First look within the project for a local copy, then look for
-        # where the Mac OS SDK installs it.
-        paths = [os.path.join(PROJECT_DIR, '.google_appengine'),
-                 os.environ.get('APP_ENGINE_SDK'),
-                 '/usr/local/google_appengine',
-                 '/Applications/GoogleAppEngineLauncher.app/Contents/Resources/GoogleAppEngine-default.bundle/Contents/Resources/google_appengine']
-        for path in os.environ.get('PATH', '').split(os.pathsep):
-            path = path.rstrip(os.sep)
-            if path.endswith('google_appengine'):
-                paths.append(path)
-        if os.name in ('nt', 'dos'):
-            path = r'%(PROGRAMFILES)s\Google\google_appengine' % os.environ
-            paths.append(path)
-        # Loop through all possible paths and look for the SDK dir.
-        sdk_path = None
-        for path in paths:
-            if not path:
-                continue
-            path = os.path.expanduser(path)
-            path = os.path.realpath(path)
-            if os.path.exists(path):
-                sdk_path = path
-                break
-        if sdk_path is None:
-            # The SDK could not be found in any known location.
-            sys.stderr.write('The Google App Engine SDK could not be found!\n'
-                             "Make sure it's accessible via your PATH "
-                             "environment and called google_appengine.\n")
-            sys.exit(1)
-        # Add the SDK and the libraries within it to the system path.
-        extra_paths = [sdk_path]
-        lib = os.path.join(sdk_path, 'lib')
-        # Automatically add all packages in the SDK's lib folder:
-        for dir in os.listdir(lib):
-            path = os.path.join(lib, dir)
-            # Package can be under 'lib/<pkg>/<pkg>/' or 'lib/<pkg>/lib/<pkg>/'
-            detect = (os.path.join(path, dir), os.path.join(path, 'lib', dir))
-            for path in detect:
-                if os.path.isdir(path) and not dir == 'django':
-                    extra_paths.append(os.path.dirname(path))
-                    break
-        sys.path = extra_paths + sys.path
-        from google.appengine.api import apiproxy_stub_map
-    
-    setup_project()
-    from .utils import have_appserver
-    if have_appserver:
-        # App Engine's threading.local is broken
-        setup_threading()
-    setup_logging()
-
-    if not have_appserver:
-        # Patch Django to support loading management commands from zip files
-        from django.core import management
-        management.find_commands = find_commands
-
-def find_commands(management_dir):
-    """
-    Given a path to a management directory, returns a list of all the command
-    names that are available.
-    This version works for django deployments which are file based or
-    contained in a ZIP (in sys.path).
-
-    Returns an empty list if no commands are defined.
-    """
-    import pkgutil
-    return [modname for importer, modname, ispkg in pkgutil.iter_modules(
-                [os.path.join(management_dir, 'commands')]) if not ispkg]
-
-def setup_threading():
-    # XXX: GAE's threading.local doesn't work correctly with subclassing
-    try:
-        from django.utils._threading_local import local
-        import threading
-        threading.local = local
-    except ImportError:
-        pass
-
-def setup_logging():
-    # Fix Python 2.6 logging module
-    logging.logMultiprocessing = 0
-
-    # Enable logging
-    level = logging.DEBUG
-    from .utils import have_appserver
-    if have_appserver:
-        # We can't import settings at this point when running a normal
-        # manage.py command because this module gets imported from settings.py
-        from django.conf import settings
-        if not settings.DEBUG:
-            level = logging.INFO
-    logging.getLogger().setLevel(level)
-
-def setup_project():
-    from .utils import have_appserver, on_production_server
-    if have_appserver:
-        # This fixes a pwd import bug for os.path.expanduser()
-        env_ext['HOME'] = PROJECT_DIR
-
-    # The dev_appserver creates a sandbox which restricts access to certain
-    # modules and builtins in order to emulate the production environment.
-    # Here we get the subprocess module back into the dev_appserver sandbox.
-    # This module is just too important for development.
-    # Also we add the compiler/parser module back and enable https connections
-    # (seem to be broken on Windows because the _ssl module is disallowed).
-    if not have_appserver:
-        from google.appengine.tools import dev_appserver
-        try:
-            # Backup os.environ. It gets overwritten by the dev_appserver,
-            # but it's needed by the subprocess module.
-            env = dev_appserver.DEFAULT_ENV
-            dev_appserver.DEFAULT_ENV = os.environ.copy()
-            dev_appserver.DEFAULT_ENV.update(env)
-            # Backup the buffer() builtin. The subprocess in Python 2.5 on
-            # Linux and OS X uses needs it, but the dev_appserver removes it.
-            dev_appserver.buffer = buffer
-        except AttributeError:
-            logging.warn('Could not patch the default environment. '
-                         'The subprocess module will not work correctly.')
-
-        try:
-            # Allow importing compiler/parser and _ssl modules (for https)
-            dev_appserver.HardenedModulesHook._WHITE_LIST_C_MODULES.extend(
-                ('parser', '_ssl'))
-        except AttributeError:
-            logging.warn('Could not patch modules whitelist. '
-                         'The compiler and parser modules will not work and '
-                         'SSL support is disabled.')
-    elif not on_production_server:
-        try:
-            # Restore the real subprocess module
-            from google.appengine.api.mail_stub import subprocess
-            sys.modules['subprocess'] = subprocess
-            # Re-inject the buffer() builtin into the subprocess module
-            from google.appengine.tools import dev_appserver
-            subprocess.buffer = dev_appserver.buffer
-        except Exception, e:
-            logging.warn('Could not add the subprocess module to the sandbox: %s' % e)
-
-    os.environ.update(env_ext)
-
-    extra_paths = [PROJECT_DIR, os.path.join(os.path.dirname(__file__), 'lib')]
-    zip_packages_dir = os.path.join(PROJECT_DIR, 'zip-packages')
-
-    # We support zipped packages in the common and project folders.
-    if os.path.isdir(zip_packages_dir):
-        for zip_package in os.listdir(zip_packages_dir):
-            extra_paths.append(os.path.join(zip_packages_dir, zip_package))
-
-    # App Engine causes main.py to be reloaded if an exception gets raised
-    # on the first request of a main.py instance, so don't call setup_project()
-    # multiple times. We ensure this indirectly by checking if we've already
-    # modified sys.path, already.
-    if len(sys.path) < len(extra_paths) or \
-            sys.path[:len(extra_paths)] != extra_paths:
-        for path in extra_paths:
-            while path in sys.path:
-                sys.path.remove(path)
-        sys.path = extra_paths + sys.path

File db/__init__.py

Empty file removed.

File db/base.py

-from ..utils import appid, have_appserver, on_production_server
-from .creation import DatabaseCreation
-from django.db.backends.util import format_number
-from djangotoolbox.db.base import NonrelDatabaseFeatures, \
-    NonrelDatabaseOperations, NonrelDatabaseWrapper, NonrelDatabaseClient, \
-    NonrelDatabaseValidation, NonrelDatabaseIntrospection
-from urllib2 import HTTPError, URLError
-import logging
-import os
-import time
-
-REMOTE_API_SCRIPT = '$PYTHON_LIB/google/appengine/ext/remote_api/handler.py'
-
-def auth_func():
-    import getpass
-    return raw_input('Login via Google Account: '), getpass.getpass('Password: ')
-
-def rpc_server_factory(*args, ** kwargs):
-    from google.appengine.tools import appengine_rpc
-    kwargs['save_cookies'] = True
-    return appengine_rpc.HttpRpcServer(*args, ** kwargs)
-
-def get_datastore_paths(options):
-    """Returns a tuple with the path to the datastore and history file.
-
-    The datastore is stored in the same location as dev_appserver uses by
-    default, but the name is altered to be unique to this project so multiple
-    Django projects can be developed on the same machine in parallel.
-
-    Returns:
-      (datastore_path, history_path)
-    """
-    from google.appengine.tools import dev_appserver_main
-    datastore_path = options.get('datastore_path',
-                                 dev_appserver_main.DEFAULT_ARGS['datastore_path'].replace(
-                                 'dev_appserver', 'django_%s' % appid))
-    blobstore_path = options.get('blobstore_path',
-                                 dev_appserver_main.DEFAULT_ARGS['blobstore_path'].replace(
-                                 'dev_appserver', 'django_%s' % appid))
-    history_path = options.get('history_path',
-                               dev_appserver_main.DEFAULT_ARGS['history_path'].replace(
-                               'dev_appserver', 'django_%s' % appid))
-    return datastore_path, blobstore_path, history_path
-
-def get_test_datastore_paths(inmemory=True):
-    """Returns a tuple with the path to the test datastore and history file.
-
-    If inmemory is true, (None, None) is returned to request an in-memory
-    datastore. If inmemory is false the path returned will be similar to the path
-    returned by get_datastore_paths but with a different name.
-
-    Returns:
-      (datastore_path, history_path)
-    """
-    if inmemory:
-        return None, None, None
-    datastore_path, blobstore_path, history_path = get_datastore_paths()
-    datastore_path = datastore_path.replace('.datastore', '.testdatastore')
-    blobstore_path = blobstore_path.replace('.blobstore', '.testblobstore')
-    history_path = history_path.replace('.datastore', '.testdatastore')
-    return datastore_path, blobstore_path, history_path
-
-def destroy_datastore(*args):
-    """Destroys the appengine datastore at the specified paths."""
-    for path in args:
-        if not path:
-            continue
-        try:
-            os.remove(path)
-        except OSError, error:
-            if error.errno != 2:
-                logging.error("Failed to clear datastore: %s" % error)
-
-class DatabaseFeatures(NonrelDatabaseFeatures):
-    allows_primary_key_0 = True
-    supports_dicts = True
-
-class DatabaseOperations(NonrelDatabaseOperations):
-    compiler_module = __name__.rsplit('.', 1)[0] + '.compiler'
-
-    DEFAULT_MAX_DIGITS = 16
-    def value_to_db_decimal(self, value, max_digits, decimal_places):
-        if value is None: 
-            return None
-        sign = value < 0 and u'-' or u''
-        if sign: 
-            value = abs(value)
-        if max_digits is None: 
-            max_digits = self.DEFAULT_MAX_DIGITS
-
-        if decimal_places is None:
-            value = unicode(value)
-        else:
-            value = format_number(value, max_digits, decimal_places)
-        decimal_places = decimal_places or 0
-        n = value.find('.')
-
-        if n < 0:
-            n = len(value)
-        if n < max_digits - decimal_places:
-            value = u"0" * (max_digits - decimal_places - n) + value
-        return sign + value
-
-    def sql_flush(self, style, tables, sequences):
-        self.connection.flush()
-        return []
-
-class DatabaseClient(NonrelDatabaseClient):
-    pass
-
-class DatabaseValidation(NonrelDatabaseValidation):
-    pass
-
-class DatabaseIntrospection(NonrelDatabaseIntrospection):
-    pass
-
-class DatabaseWrapper(NonrelDatabaseWrapper):
-    def __init__(self, *args, **kwds):
-        super(DatabaseWrapper, self).__init__(*args, **kwds)
-        self.features = DatabaseFeatures(self)
-        self.ops = DatabaseOperations(self)
-        self.client = DatabaseClient(self)
-        self.creation = DatabaseCreation(self)
-        self.validation = DatabaseValidation(self)
-        self.introspection = DatabaseIntrospection(self)
-        options = self.settings_dict
-        self.use_test_datastore = False
-        self.test_datastore_inmemory = True
-        self.remote = options.get('REMOTE', False)
-        if on_production_server:
-            self.remote = False
-        self.remote_app_id = options.get('REMOTE_APP_ID', appid)
-        self.remote_api_path = options.get('REMOTE_API_PATH', None)
-        self.secure_remote_api = options.get('SECURE_REMOTE_API', True)
-        self._setup_stubs()
-
-    def _get_paths(self):
-        if self.use_test_datastore:
-            return get_test_datastore_paths(self.test_datastore_inmemory)
-        else:
-            return get_datastore_paths(self.settings_dict)
-
-    def _setup_stubs(self):
-        # If this code is being run without an appserver (eg. via a django
-        # commandline flag) then setup a default stub environment.
-        if not have_appserver:
-            from google.appengine.tools import dev_appserver_main
-            args = dev_appserver_main.DEFAULT_ARGS.copy()
-            args['datastore_path'], args['blobstore_path'], args['history_path'] = self._get_paths()
-            from google.appengine.tools import dev_appserver
-            dev_appserver.SetupStubs(appid, **args)
-        # If we're supposed to set up the remote_api, do that now.
-        if self.remote:
-            self.setup_remote()
-
-    def setup_remote(self):
-        if not self.remote_api_path:
-            from ..utils import appconfig
-            for handler in appconfig.handlers:
-                if handler.script == REMOTE_API_SCRIPT:
-                    self.remote_api_path = handler.url
-                    break
-        self.remote = True
-        remote_url = 'https://%s.appspot.com%s' % (self.remote_app_id,
-                                                   self.remote_api_path)
-        logging.info('Setting up remote_api for "%s" at %s' %
-                     (self.remote_app_id, remote_url))
-        if not have_appserver:
-            print 'Connecting to remote_api handler'
-        from google.appengine.ext.remote_api import remote_api_stub
-        remote_api_stub.ConfigureRemoteApi(self.remote_app_id,
-            self.remote_api_path, auth_func, secure=self.secure_remote_api,
-            rpc_server_factory=rpc_server_factory)
-        retry_delay = 1
-        while retry_delay <= 16:
-            try:
-                remote_api_stub.MaybeInvokeAuthentication()
-            except HTTPError, e:
-                if not have_appserver:
-                    print 'Retrying in %d seconds...' % retry_delay
-                time.sleep(retry_delay)
-                retry_delay *= 2
-            else:
-                break
-        else:
-            try:
-                remote_api_stub.MaybeInvokeAuthentication()
-            except HTTPError, e:
-                raise URLError("%s\n"
-                               "Couldn't reach remote_api handler at %s.\n"
-                               "Make sure you've deployed your project and "
-                               "installed a remote_api handler in app.yaml."
-                               % (e, remote_url))
-        logging.info('Now using the remote datastore for "%s" at %s' %
-                     (self.remote_app_id, remote_url))
-
-    def flush(self):
-        """Helper function to remove the current datastore and re-open the stubs"""
-        if self.remote:
-            import random, string
-            code = ''.join([random.choice(string.ascii_letters) for x in range(4)])
-            print '\n\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
-            print '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
-            print "Warning! You're about to delete the *production* datastore!"
-            print 'Only models defined in your INSTALLED_APPS can be removed!'
-            print 'If you want to clear the whole datastore you have to use the ' \
-                  'datastore viewer in the dashboard. Also, in order to delete all '\
-                  'unneeded indexes you have to run appcfg.py vacuum_indexes.'
-            print 'In order to proceed you have to enter the following code:'
-            print code
-            response = raw_input('Repeat: ')
-            if code == response:
-                print 'Deleting...'
-                from django.db import models
-                from google.appengine.api import datastore as ds
-                for model in models.get_models():
-                    print 'Deleting %s...' % model._meta.db_table
-                    while True:
-                        data = ds.Query(model._meta.db_table, keys_only=True).Get(200)
-                        if not data:
-                            break
-                        ds.Delete(data)
-                print "Datastore flushed! Please check your dashboard's " \
-                      'datastore viewer for any remaining entities and remove ' \
-                      'all unneeded indexes with manage.py vacuum_indexes.'
-            else:
-                print 'Aborting'
-                exit()
-        else:
-            destroy_datastore(*self._get_paths())
-        self._setup_stubs()

File db/compiler.py

-from .db_settings import get_model_indexes
-
-import datetime
-import sys
-
-from django.db.models.sql import aggregates as sqlaggregates
-from django.db.models.sql.constants import LOOKUP_SEP, MULTI, SINGLE
-from django.db.models.sql.where import AND, OR
-from django.db.utils import DatabaseError, IntegrityError
-from django.utils.tree import Node
-
-from functools import wraps
-
-from google.appengine.api.datastore import Entity, Query, MultiQuery, \
-    Put, Get, Delete, Key
-from google.appengine.api.datastore_errors import Error as GAEError
-from google.appengine.api.datastore_types import Text, Category, Email, Link, \
-    PhoneNumber, PostalAddress, Text, Blob, ByteString, GeoPt, IM, Key, \
-    Rating, BlobKey
-
-from djangotoolbox.db.basecompiler import NonrelQuery, NonrelCompiler, \
-    NonrelInsertCompiler, NonrelUpdateCompiler, NonrelDeleteCompiler
-
-import cPickle as pickle
-
-import decimal
-
-# Valid query types (a dictionary is used for speedy lookups).
-OPERATORS_MAP = {
-    'exact': '=',
-    'gt': '>',
-    'gte': '>=',
-    'lt': '<',
-    'lte': '<=',
-
-    # The following operators are supported with special code below:
-    'isnull': None,
-    'in': None,
-    'startswith': None,
-    'range': None,
-    'year': None,
-}
-
-NEGATION_MAP = {
-    'gt': '<=',
-    'gte': '<',
-    'lt': '>=',
-    'lte': '>',
-    # TODO: support these filters
-    #'exact': '!=', # this might actually become individual '<' and '>' queries
-}
-
-def safe_call(func):
-    @wraps(func)
-    def _func(*args, **kwargs):
-        try:
-            return func(*args, **kwargs)
-        except GAEError, e:
-            raise DatabaseError, DatabaseError(str(e)), sys.exc_info()[2]
-    return _func
-
-class GAEQuery(NonrelQuery):
-    # ----------------------------------------------
-    # Public API
-    # ----------------------------------------------
-    def __init__(self, compiler, fields):
-        super(GAEQuery, self).__init__(compiler, fields)
-        self.inequality_field = None
-        self.pk_filters = None
-        self.excluded_pks = ()
-        self.has_negated_exact_filter = False
-        self.ordering = ()
-        self.gae_ordering = []
-        pks_only = False
-        if len(fields) == 1 and fields[0].primary_key:
-            pks_only = True
-        self.db_table = self.query.get_meta().db_table
-        self.pks_only = pks_only
-        start_cursor = getattr(self.query, '_gae_start_cursor', None)
-        end_cursor = getattr(self.query, '_gae_end_cursor', None)
-        self.gae_query = [Query(self.db_table, keys_only=self.pks_only,
-                                cursor=start_cursor, end_cursor=end_cursor)]
-
-    # This is needed for debugging
-    def __repr__(self):
-        return '<GAEQuery: %r ORDER %r>' % (self.gae_query, self.ordering)
-
-    @safe_call
-    def fetch(self, low_mark, high_mark):
-        query = self._build_query()
-        executed = False
-        if self.excluded_pks and high_mark is not None:
-            high_mark += len(self.excluded_pks)
-        if self.pk_filters is not None:
-            results = self.get_matching_pk(low_mark, high_mark)
-        else:
-            if high_mark is None:
-                kw = {}
-                if low_mark:
-                    kw['offset'] = low_mark
-                results = query.Run(**kw)
-                executed = True
-            elif high_mark > low_mark:
-                results = query.Get(high_mark - low_mark, low_mark)
-                executed = True
-            else:
-                results = ()
-
-        for entity in results:
-            if isinstance(entity, Key):
-                key = entity
-            else:
-                key = entity.key()
-            if key in self.excluded_pks:
-                continue
-            yield self._make_entity(entity)
-
-        if executed and not isinstance(query, MultiQuery):
-            self.query._gae_cursor = query.GetCompiledCursor()
-
-    @safe_call
-    def count(self, limit=None):
-        if self.pk_filters is not None:
-            return len(self.get_matching_pk(0, limit))
-        if self.excluded_pks:
-            return len(list(self.fetch(0, 2000)))
-        kw = {}
-        if limit is not None:
-            kw['limit'] = limit
-        return self._build_query().Count(**kw)
-
-    @safe_call
-    def delete(self):
-        if self.pk_filters is not None:
-            keys = [key for key in self.pk_filters if key is not None]
-        else:
-            keys = self.fetch()
-        if keys:
-            Delete(keys)
-
-    @safe_call
-    def order_by(self, ordering):
-        self.ordering = ordering
-        for order in self.ordering:
-            if order.startswith('-'):
-                order, direction = order[1:], Query.DESCENDING
-            else:
-                direction = Query.ASCENDING
-            if order == self.query.get_meta().pk.column:
-                order = '__key__'
-            self.gae_ordering.append((order, direction))
-
-    # This function is used by the default add_filters() implementation
-    @safe_call
-    def add_filter(self, column, lookup_type, negated, db_type, value):
-        if value in ([], ()):
-            self.pk_filters = []
-            return
-
-        # Emulated/converted lookups
-        if column == self.query.get_meta().pk.column:
-            column = '__key__'
-            db_table = self.query.get_meta().db_table
-            if lookup_type in ('exact', 'in'):
-                # Optimization: batch-get by key
-                if self.pk_filters is not None:
-                    raise DatabaseError("You can't apply multiple AND filters "
-                                        "on the primary key. "
-                                        "Did you mean __in=[...]?")
-                if not isinstance(value, (tuple, list)):
-                    value = [value]
-                pks = [create_key(db_table, pk) for pk in value if pk]
-                if negated:
-                    self.excluded_pks = pks
-                else:
-                    self.pk_filters = pks
-                return
-            else:
-                # XXX: set db_type to 'gae_key' in order to allow
-                # convert_value_for_db to recognize the value to be a Key and
-                # not a str. Otherwise the key would be converted back to a
-                # unicode (see convert_value_for_db)
-                db_type = 'gae_key'
-                key_type_error = 'Lookup values on primary keys have to be' \
-                                 'a string or an integer.'
-                if lookup_type == 'range':
-                    if isinstance(value,(list, tuple)) and not(isinstance(
-                            value[0], (basestring, int, long)) and \
-                            isinstance(value[1], (basestring, int, long))):
-                        raise DatabaseError(key_type_error)
-                elif not isinstance(value,(basestring, int, long)):
-                    raise DatabaseError(key_type_error)
-                # for lookup type range we have to deal with a list
-                if lookup_type == 'range':
-                    value[0] = create_key(db_table, value[0])
-                    value[1] = create_key(db_table, value[1])
-                else:
-                    value = create_key(db_table, value)
-        if lookup_type not in OPERATORS_MAP:
-            raise DatabaseError("Lookup type %r isn't supported" % lookup_type)
-
-        # We check for negation after lookup_type isnull because it
-        # simplifies the code. All following lookup_type checks assume
-        # that they're not negated.
-        if lookup_type == 'isnull':
-            if (negated and value) or not value:
-                # TODO/XXX: is everything greater than None?
-                op = '>'
-            else:
-                op = '='
-            value = None
-        elif negated and lookup_type == 'exact':
-            if self.has_negated_exact_filter:
-                raise DatabaseError("You can't exclude more than one __exact "
-                                    "filter")
-            self.has_negated_exact_filter = True
-            self._combine_filters(column, db_type,
-                                  (('<', value), ('>', value)))
-            return
-        elif negated:
-            try:
-                op = NEGATION_MAP[lookup_type]
-            except KeyError:
-                raise DatabaseError("Lookup type %r can't be negated" % lookup_type)
-            if self.inequality_field and column != self.inequality_field:
-                raise DatabaseError("Can't have inequality filters on multiple "
-                    "columns (here: %r and %r)" % (self.inequality_field, column))
-            self.inequality_field = column
-        elif lookup_type == 'in':
-            # Create sub-query combinations, one for each value
-            if len(self.gae_query) * len(value) > 30:
-                raise DatabaseError("You can't query against more than "
-                                    "30 __in filter value combinations")
-            op_values = [('=', v) for v in value]
-            self._combine_filters(column, db_type, op_values)
-            return
-        elif lookup_type == 'startswith':
-            self._add_filter(column, '>=', db_type, value)
-            if isinstance(value, str):
-                value = value.decode('utf8')
-            if isinstance(value, Key):
-                value = list(value.to_path())
-                if isinstance(value[-1], str):
-                    value[-1] = value[-1].decode('utf8')
-                value[-1] += u'\ufffd'
-                value = Key.from_path(*value)
-            else:
-                value += u'\ufffd'
-            self._add_filter(column, '<=', db_type, value)
-            return
-        elif lookup_type in ('range', 'year'):
-            self._add_filter(column, '>=', db_type, value[0])
-            op = '<=' if lookup_type == 'range' else '<'
-            self._add_filter(column, op, db_type, value[1])
-            return
-        else:
-            op = OPERATORS_MAP[lookup_type]
-
-        self._add_filter(column, op, db_type, value)
-
-    # ----------------------------------------------
-    # Internal API
-    # ----------------------------------------------
-    def _add_filter(self, column, op, db_type, value):
-        for query in self.gae_query:
-            key = '%s %s' % (column, op)
-            value = self.convert_value_for_db(db_type, value)
-            if isinstance(value, Text):
-                raise DatabaseError('TextField is not indexed, by default, '
-                                    "so you can't filter on it. "
-                                    'Please add an index definition for the '
-                                    'column "%s" as described here:\n'
-                                    'http://www.allbuttonspressed.com/blog/django/2010/07/Managing-per-field-indexes-on-App-Engine'
-                                    % column)
-            if key in query:
-                existing_value = query[key]
-                if isinstance(existing_value, list):
-                    existing_value.append(value)
-                else:
-                    query[key] = [existing_value, value]
-            else:
-                query[key] = value
-
-    def _combine_filters(self, column, db_type, op_values):
-        gae_query = self.gae_query
-        combined = []
-        for query in gae_query:
-            for op, value in op_values:
-                self.gae_query = [Query(self.db_table,
-                                        keys_only=self.pks_only)]
-                self.gae_query[0].update(query)
-                self._add_filter(column, op, db_type, value)
-                combined.append(self.gae_query[0])
-        self.gae_query = combined
-
-    def _make_entity(self, entity):
-        if isinstance(entity, Key):
-            key = entity
-            entity = {}
-        else:
-            key = entity.key()
-
-        entity[self.query.get_meta().pk.column] = key
-        return entity
-
-    @safe_call
-    def _build_query(self):
-        if len(self.gae_query) > 1:
-            return MultiQuery(self.gae_query, self.gae_ordering)
-        query = self.gae_query[0]
-        query.Order(*self.gae_ordering)
-        return query
-
-    def get_matching_pk(self, low_mark=0, high_mark=None):
-        if not self.pk_filters:
-            return []
-
-        results = [result for result in Get(self.pk_filters)
-                   if result is not None and
-                       self.matches_filters(result)]
-        if self.ordering:
-            results.sort(cmp=self.order_pk_filtered)
-        if high_mark is not None and high_mark < len(results) - 1:
-            results = results[:high_mark]
-        if low_mark:
-            results = results[low_mark:]
-        return results
-
-    def order_pk_filtered(self, lhs, rhs):
-        left = dict(lhs)
-        left[self.query.get_meta().pk.column] = lhs.key().to_path()
-        right = dict(rhs)
-        right[self.query.get_meta().pk.column] = rhs.key().to_path()
-        return self._order_in_memory(left, right)
-
-    def matches_filters(self, entity):
-        item = dict(entity)
-        pk = self.query.get_meta().pk
-        value = self.convert_value_from_db(pk.db_type(connection=self.connection),
-            entity.key())
-        item[pk.column] = value
-        result = self._matches_filters(item, self.query.where)
-        return result
-
-class SQLCompiler(NonrelCompiler):
-    """
-    A simple App Engine query: no joins, no distinct, etc.
-    """
-    query_class = GAEQuery
-
-    def convert_value_from_db(self, db_type, value):
-        if isinstance(value, (list, tuple, set)) and \
-                db_type.startswith(('ListField:', 'SetField:')):
-            db_sub_type = db_type.split(':', 1)[1]
-            value = [self.convert_value_from_db(db_sub_type, subvalue)
-                     for subvalue in value]
-
-        if db_type.startswith('SetField:') and value is not None:
-            value = set(value)
-
-        if db_type.startswith('DictField:') and value is not None:
-            value = pickle.loads(value)
-            if ':' in db_type:
-                db_sub_type = db_type.split(':', 1)[1]
-                value = dict((key, self.convert_value_from_db(db_sub_type, value[key]))
-                             for key in value)
-
-        # the following GAE database types are all unicode subclasses, cast them
-        # to unicode so they appear like pure unicode instances for django
-        if isinstance(value, basestring) and value and db_type.startswith('decimal'):
-            value = decimal.Decimal(value)
-        elif isinstance(value, (Category, Email, Link, PhoneNumber, PostalAddress,
-                Text, unicode)):
-            value = unicode(value)
-        elif isinstance(value, Blob):
-            value = str(value)
-        elif isinstance(value, str):
-            # always retrieve strings as unicode (it is possible that old datasets
-            # contain non unicode strings, nevertheless work with unicode ones)
-            value = value.decode('utf-8')
-        elif isinstance(value, Key):
-            # for now we do not support KeyFields thus a Key has to be the own
-            # primary key
-            # TODO: GAE: support parents via GAEKeyField
-            assert value.parent() is None, "Parents are not yet supported!"
-            if db_type == 'integer':
-                if value.id() is None:
-                    raise DatabaseError('Wrong type for Key. Expected integer, found'
-                        'None')
-                else:
-                    value = value.id()
-            elif db_type == 'text':
-                if value.name() is None:
-                    raise DatabaseError('Wrong type for Key. Expected string, found'
-                        'None')
-                else:
-                    value = value.name()
-            else:
-                raise DatabaseError("%s fields cannot be keys on GAE" % db_type)
-        elif db_type == 'date' and isinstance(value, datetime.datetime):
-            value = value.date()
-        elif db_type == 'time' and isinstance(value, datetime.datetime):
-            value = value.time()
-        return value
-
-    def convert_value_for_db(self, db_type, value):
-        if isinstance(value, unicode):
-            value = unicode(value)
-        elif isinstance(value, str):
-            value = str(value)
-        elif isinstance(value, (list, tuple, set)) and \
-                db_type.startswith(('ListField:', 'SetField:')):
-            db_sub_type = db_type.split(':', 1)[1]
-            value = [self.convert_value_for_db(db_sub_type, subvalue)
-                     for subvalue in value]
-        elif isinstance(value, decimal.Decimal) and db_type.startswith("decimal:"):
-            value = self.connection.ops.value_to_db_decimal(value, *eval(db_type[8:]))
-        elif isinstance(value, dict) and db_type.startswith('DictField:'):
-            if ':' in db_type:
-                db_sub_type = db_type.split(':', 1)[1]
-                value = dict([(key, self.convert_value_for_db(db_sub_type, value[key]))
-                              for key in value])
-            value = Blob(pickle.dumps(value))
-
-        if db_type == 'gae_key':
-            return value
-        elif db_type == 'longtext':
-            # long text fields cannot be indexed on GAE so use GAE's database
-            # type Text
-            value = Text((isinstance(value, str) and value.decode('utf-8')) or value)
-        elif db_type == 'text':
-            value = (isinstance(value, str) and value.decode('utf-8')) or value
-        elif db_type == 'blob':
-            value = Blob(value)
-        elif type(value) is str:
-            # always store unicode strings
-            value = value.decode('utf-8')
-        elif db_type == 'date' or db_type == 'time' or db_type == 'datetime':
-            # here we have to check the db_type because GAE always stores datetimes
-            value = to_datetime(value)
-        return value
-
-class SQLInsertCompiler(NonrelInsertCompiler, SQLCompiler):
-    @safe_call
-    def insert(self, data, return_id=False):
-        gae_data = {}
-        opts = self.query.get_meta()
-        unindexed_fields = get_model_indexes(self.query.model)['unindexed']
-        unindexed_cols = [opts.get_field(name).column
-                          for name in unindexed_fields]
-        kwds = {'unindexed_properties': unindexed_cols}
-        for column, value in data.items():
-            if column == opts.pk.column:
-                if isinstance(value, basestring):
-                    kwds['name'] = value
-                else:
-                    kwds['id'] = value
-            elif isinstance(value, (tuple, list)) and not len(value):
-                # gae does not store emty lists (and even does not allow passing empty
-                # lists to Entity.update) so skip them
-                continue
-            else:
-                gae_data[column] = value
-
-        entity = Entity(self.query.get_meta().db_table, **kwds)
-        entity.update(gae_data)
-        key = Put(entity)
-        return key.id_or_name()
-
-class SQLUpdateCompiler(NonrelUpdateCompiler, SQLCompiler):
-    pass
-
-class SQLDeleteCompiler(NonrelDeleteCompiler, SQLCompiler):
-    pass
-
-def to_datetime(value):
-    """Convert a time or date to a datetime for datastore storage.
-
-    Args:
-    value: A datetime.time, datetime.date or string object.
-
-    Returns:
-    A datetime object with date set to 1970-01-01 if value is a datetime.time
-    A datetime object with date set to value.year - value.month - value.day and
-    time set to 0:00 if value is a datetime.date
-    """
-
-    if value is None:
-        return value
-    elif isinstance(value, datetime.datetime):
-        return value
-    elif isinstance(value, datetime.date):
-        return datetime.datetime(value.year, value.month, value.day)
-    elif isinstance(value, datetime.time):
-        return datetime.datetime(1970, 1, 1, value.hour, value.minute,
-            value.second, value.microsecond)
-
-def create_key(db_table, value):
-    if isinstance(value, (int, long)) and value < 1:
-        return None
-    return Key.from_path(db_table, value)

File db/creation.py

-from .db_settings import get_model_indexes
-from djangotoolbox.db.creation import NonrelDatabaseCreation
-
-class StringType(object):
-    def __init__(self, internal_type):
-        self.internal_type = internal_type
-
-    def __mod__(self, field):
-        indexes = get_model_indexes(field['model'])
-        if field['name'] in indexes['indexed']:
-            return 'text'
-        elif field['name'] in indexes['unindexed']:
-            return 'longtext'
-        return self.internal_type
-
-def get_data_types():
-    # TODO: Add GAEKeyField and a corresponding db_type
-    string_types = ('text', 'longtext')
-    data_types = NonrelDatabaseCreation.data_types.copy()
-    for name, field_type in data_types.items():
-        if field_type in string_types:
-            data_types[name] = StringType(field_type)
-    return data_types
-
-class DatabaseCreation(NonrelDatabaseCreation):
-    # This dictionary maps Field objects to their associated GAE column
-    # types, as strings. Column-type strings can contain format strings; they'll
-    # be interpolated against the values of Field.__dict__ before being output.
-    # If a column type is set to None, it won't be included in the output.
-
-    data_types = get_data_types()
-
-    def create_test_db(self, *args, **kw):
-        """Destroys the test datastore. A new store will be recreated on demand"""
-        self.destroy_test_db()
-        self.connection.use_test_datastore = True
-        self.connection.flush()
-
-    def destroy_test_db(self, *args, **kw):
-        """Destroys the test datastore files."""
-        from .base import destroy_datastore, get_test_datastore_paths
-        destroy_datastore(*get_test_datastore_paths())

File db/db_settings.py

-from django.conf import settings
-from django.utils.importlib import import_module
-
-# TODO: add autodiscover() and make API more like dbindexer's register_index
-
-_MODULE_NAMES = getattr(settings, 'GAE_SETTINGS_MODULES', ())
-
-FIELD_INDEXES = None
-
-# TODO: add support for eventual consistency setting on specific models
-
-def get_model_indexes(model):
-    indexes = get_indexes()
-    model_index = {'indexed': [], 'unindexed': []}
-    for item in reversed(model.mro()):
-        config = indexes.get(item, {})
-        model_index['indexed'].extend(config.get('indexed', ()))
-        model_index['unindexed'].extend(config.get('unindexed', ()))
-    return model_index
-
-def get_indexes():
-    global FIELD_INDEXES
-    if FIELD_INDEXES is None:
-        field_indexes = {}
-        for name in _MODULE_NAMES:
-            field_indexes.update(import_module(name).FIELD_INDEXES)
-        FIELD_INDEXES = field_indexes
-    return FIELD_INDEXES

File db/utils.py

-from google.appengine.datastore.datastore_query import Cursor
-
-def get_cursor(queryset):
-    # Evaluate QuerySet
-    len(queryset)
-    cursor = getattr(queryset.query, '_gae_cursor', None)
-    return Cursor.to_websafe_string(cursor)
-
-def set_cursor(queryset, start=None, end=None):
-    if start is not None:
-        start = Cursor.from_websafe_string(start)
-        queryset.query._gae_start_cursor = start
-    if end is not None:
-        end = Cursor.from_websafe_string(end)
-        queryset.query._gae_end_cursor = end
-    # Evaluate QuerySet
-    len(queryset)

File dbindexes.py

-from django.conf import settings
-
-if 'django.contrib.auth' in settings.INSTALLED_APPS:
-    from dbindexer.api import register_index
-    from django.contrib.auth.models import User
-
-    register_index(User, {
-        'username': 'iexact',
-        'email': 'iexact',
-    })

File deferred/__init__.py

Empty file removed.

File deferred/handler.py

-import os, sys
-
-parent_dir = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
-if parent_dir not in sys.path:
-    sys.path.insert(0, parent_dir)
-
-# Initialize Django
-from djangoappengine.main import main as gaemain
-
-# Import and run the actual handler
-from google.appengine.ext.deferred.handler import main
-if __name__ == '__main__':
-    main()

File djangoappengine/__init__.py

Empty file added.

File djangoappengine/boot.py

+import logging
+import os
+import sys
+
+if "GOOGLE_APPENGINE_PROJECT_ROOT" in os.environ:
+        # Read location of App Engine .yaml files from OS environment
+        PROJECT_DIR = os.environ["GOOGLE_APPENGINE_PROJECT_ROOT"]
+else:
+        # Defualt to the location of this file
+        PROJECT_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
+
+# Overrides for os.environ
+env_ext = {'DJANGO_SETTINGS_MODULE': 'settings'}
+
+def setup_env():
+    """Configures app engine environment for command-line apps."""
+    # Try to import the appengine code from the system path.
+    try:
+        from google.appengine.api import apiproxy_stub_map
+    except ImportError:
+        for k in [k for k in sys.modules if k.startswith('google')]:
+            del sys.modules[k]
+
+        # Not on the system path. Build a list of alternative paths where it
+        # may be. First look within the project for a local copy, then look for
+        # where the Mac OS SDK installs it.
+        paths = [os.path.join(PROJECT_DIR, '.google_appengine'),
+                 os.environ.get('APP_ENGINE_SDK'),
+                 '/usr/local/google_appengine',
+                 '/Applications/GoogleAppEngineLauncher.app/Contents/Resources/GoogleAppEngine-default.bundle/Contents/Resources/google_appengine']
+        for path in os.environ.get('PATH', '').split(os.pathsep):
+            path = path.rstrip(os.sep)
+            if path.endswith('google_appengine'):
+                paths.append(path)
+        if os.name in ('nt', 'dos'):
+            path = r'%(PROGRAMFILES)s\Google\google_appengine' % os.environ
+            paths.append(path)
+        # Loop through all possible paths and look for the SDK dir.
+        sdk_path = None
+        for path in paths:
+            if not path:
+                continue
+            path = os.path.expanduser(path)
+            path = os.path.realpath(path)
+            if os.path.exists(path):
+                sdk_path = path
+                break
+        if sdk_path is None:
+            # The SDK could not be found in any known location.
+            sys.stderr.write('The Google App Engine SDK could not be found!\n'
+                             "Make sure it's accessible via your PATH "
+                             "environment and called google_appengine.\n")
+            sys.exit(1)
+        # Add the SDK and the libraries within it to the system path.
+        extra_paths = [sdk_path]
+        lib = os.path.join(sdk_path, 'lib')
+        # Automatically add all packages in the SDK's lib folder:
+        for dir in os.listdir(lib):
+            path = os.path.join(lib, dir)
+            # Package can be under 'lib/<pkg>/<pkg>/' or 'lib/<pkg>/lib/<pkg>/'
+            detect = (os.path.join(path, dir), os.path.join(path, 'lib', dir))
+            for path in detect:
+                if os.path.isdir(path) and not dir == 'django':
+                    extra_paths.append(os.path.dirname(path))
+                    break
+        sys.path = extra_paths + sys.path
+        from google.appengine.api import apiproxy_stub_map
+    
+    setup_project()
+    from .utils import have_appserver
+    if have_appserver:
+        # App Engine's threading.local is broken
+        setup_threading()
+    setup_logging()
+
+    if not have_appserver:
+        # Patch Django to support loading management commands from zip files
+        from django.core import management
+        management.find_commands = find_commands
+
+def find_commands(management_dir):
+    """
+    Given a path to a management directory, returns a list of all the command
+    names that are available.
+    This version works for django deployments which are file based or
+    contained in a ZIP (in sys.path).
+
+    Returns an empty list if no commands are defined.
+    """
+    import pkgutil
+    return [modname for importer, modname, ispkg in pkgutil.iter_modules(
+                [os.path.join(management_dir, 'commands')]) if not ispkg]
+
+def setup_threading():
+    # XXX: GAE's threading.local doesn't work correctly with subclassing
+    try:
+        from django.utils._threading_local import local
+        import threading
+        threading.local = local
+    except ImportError:
+        pass
+
+def setup_logging():
+    # Fix Python 2.6 logging module
+    logging.logMultiprocessing = 0
+
+    # Enable logging
+    level = logging.DEBUG
+    from .utils import have_appserver
+    if have_appserver:
+        # We can't import settings at this point when running a normal
+        # manage.py command because this module gets imported from settings.py
+        from django.conf import settings
+        if not settings.DEBUG:
+            level = logging.INFO
+    logging.getLogger().setLevel(level)
+
+def setup_project():
+    from .utils import have_appserver, on_production_server
+    if have_appserver:
+        # This fixes a pwd import bug for os.path.expanduser()
+        env_ext['HOME'] = PROJECT_DIR
+
+    # The dev_appserver creates a sandbox which restricts access to certain
+    # modules and builtins in order to emulate the production environment.
+    # Here we get the subprocess module back into the dev_appserver sandbox.
+    # This module is just too important for development.
+    # Also we add the compiler/parser module back and enable https connections
+    # (seem to be broken on Windows because the _ssl module is disallowed).
+    if not have_appserver:
+        from google.appengine.tools import dev_appserver
+        try:
+            # Backup os.environ. It gets overwritten by the dev_appserver,
+            # but it's needed by the subprocess module.
+            env = dev_appserver.DEFAULT_ENV
+            dev_appserver.DEFAULT_ENV = os.environ.copy()
+            dev_appserver.DEFAULT_ENV.update(env)
+            # Backup the buffer() builtin. The subprocess in Python 2.5 on
+            # Linux and OS X uses needs it, but the dev_appserver removes it.
+            dev_appserver.buffer = buffer
+        except AttributeError:
+            logging.warn('Could not patch the default environment. '
+                         'The subprocess module will not work correctly.')
+
+        try:
+            # Allow importing compiler/parser and _ssl modules (for https)
+            dev_appserver.HardenedModulesHook._WHITE_LIST_C_MODULES.extend(
+                ('parser', '_ssl'))
+        except AttributeError:
+            logging.warn('Could not patch modules whitelist. '
+                         'The compiler and parser modules will not work and '
+                         'SSL support is disabled.')
+    elif not on_production_server:
+        try:
+            # Restore the real subprocess module
+            from google.appengine.api.mail_stub import subprocess
+            sys.modules['subprocess'] = subprocess
+            # Re-inject the buffer() builtin into the subprocess module
+            from google.appengine.tools import dev_appserver
+            subprocess.buffer = dev_appserver.buffer
+        except Exception, e:
+            logging.warn('Could not add the subprocess module to the sandbox: %s' % e)
+
+    os.environ.update(env_ext)
+
+    extra_paths = [PROJECT_DIR, os.path.join(os.path.dirname(__file__), 'lib')]
+    zip_packages_dir = os.path.join(PROJECT_DIR, 'zip-packages')
+
+    # We support zipped packages in the common and project folders.
+    if os.path.isdir(zip_packages_dir):
+        for zip_package in os.listdir(zip_packages_dir):
+            extra_paths.append(os.path.join(zip_packages_dir, zip_package))
+
+    # App Engine causes main.py to be reloaded if an exception gets raised
+    # on the first request of a main.py instance, so don't call setup_project()
+    # multiple times. We ensure this indirectly by checking if we've already
+    # modified sys.path, already.
+    if len(sys.path) < len(extra_paths) or \
+            sys.path[:len(extra_paths)] != extra_paths:
+        for path in extra_paths:
+            while path in sys.path:
+                sys.path.remove(path)
+        sys.path = extra_paths + sys.path

File djangoappengine/db/__init__.py

Empty file added.

File djangoappengine/db/base.py

+from ..utils import appid, have_appserver, on_production_server
+from .creation import DatabaseCreation
+from django.db.backends.util import format_number
+from djangotoolbox.db.base import NonrelDatabaseFeatures, \
+    NonrelDatabaseOperations, NonrelDatabaseWrapper, NonrelDatabaseClient, \
+    NonrelDatabaseValidation, NonrelDatabaseIntrospection
+from urllib2 import HTTPError, URLError
+import logging
+import os
+import time
+
+REMOTE_API_SCRIPT = '$PYTHON_LIB/google/appengine/ext/remote_api/handler.py'
+
+def auth_func():
+    import getpass
+    return raw_input('Login via Google Account: '), getpass.getpass('Password: ')
+
+def rpc_server_factory(*args, ** kwargs):
+    from google.appengine.tools import appengine_rpc
+    kwargs['save_cookies'] = True
+    return appengine_rpc.HttpRpcServer(*args, ** kwargs)
+
+def get_datastore_paths(options):
+    """Returns a tuple with the path to the datastore and history file.
+
+    The datastore is stored in the same location as dev_appserver uses by
+    default, but the name is altered to be unique to this project so multiple
+    Django projects can be developed on the same machine in parallel.
+
+    Returns:
+      (datastore_path, history_path)
+    """
+    from google.appengine.tools import dev_appserver_main
+    datastore_path = options.get('datastore_path',
+                                 dev_appserver_main.DEFAULT_ARGS['datastore_path'].replace(
+                                 'dev_appserver', 'django_%s' % appid))
+    blobstore_path = options.get('blobstore_path',
+                                 dev_appserver_main.DEFAULT_ARGS['blobstore_path'].replace(
+                                 'dev_appserver', 'django_%s' % appid))
+    history_path = options.get('history_path',
+                               dev_appserver_main.DEFAULT_ARGS['history_path'].replace(
+                               'dev_appserver', 'django_%s' % appid))
+    return datastore_path, blobstore_path, history_path
+
+def get_test_datastore_paths(inmemory=True):
+    """Returns a tuple with the path to the test datastore and history file.
+
+    If inmemory is true, (None, None) is returned to request an in-memory
+    datastore. If inmemory is false the path returned will be similar to the path
+    returned by get_datastore_paths but with a different name.
+
+    Returns:
+      (datastore_path, history_path)
+    """
+    if inmemory:
+        return None, None, None
+    datastore_path, blobstore_path, history_path = get_datastore_paths()
+    datastore_path = datastore_path.replace('.datastore', '.testdatastore')
+    blobstore_path = blobstore_path.replace('.blobstore', '.testblobstore')
+    history_path = history_path.replace('.datastore', '.testdatastore')
+    return datastore_path, blobstore_path, history_path
+
+def destroy_datastore(*args):
+    """Destroys the appengine datastore at the specified paths."""
+    for path in args:
+        if not path:
+            continue
+        try:
+            os.remove(path)
+        except OSError, error:
+            if error.errno != 2:
+                logging.error("Failed to clear datastore: %s" % error)
+
+class DatabaseFeatures(NonrelDatabaseFeatures):
+    allows_primary_key_0 = True
+    supports_dicts = True
+
+class DatabaseOperations(NonrelDatabaseOperations):
+    compiler_module = __name__.rsplit('.', 1)[0] + '.compiler'
+
+    DEFAULT_MAX_DIGITS = 16
+    def value_to_db_decimal(self, value, max_digits, decimal_places):
+        if value is None: 
+            return None
+        sign = value < 0 and u'-' or u''
+        if sign: 
+            value = abs(value)
+        if max_digits is None: 
+            max_digits = self.DEFAULT_MAX_DIGITS
+
+        if decimal_places is None:
+            value = unicode(value)
+        else:
+            value = format_number(value, max_digits, decimal_places)
+        decimal_places = decimal_places or 0
+        n = value.find('.')
+
+        if n < 0:
+            n = len(value)
+        if n < max_digits - decimal_places:
+            value = u"0" * (max_digits - decimal_places - n) + value
+        return sign + value
+
+    def sql_flush(self, style, tables, sequences):
+        self.connection.flush()
+        return []
+
+class DatabaseClient(NonrelDatabaseClient):
+    pass
+
+class DatabaseValidation(NonrelDatabaseValidation):
+    pass
+
+class DatabaseIntrospection(NonrelDatabaseIntrospection):
+    pass
+
+class DatabaseWrapper(NonrelDatabaseWrapper):
+    def __init__(self, *args, **kwds):
+        super(DatabaseWrapper, self).__init__(*args, **kwds)
+        self.features = DatabaseFeatures(self)
+        self.ops = DatabaseOperations(self)
+        self.client = DatabaseClient(self)
+        self.creation = DatabaseCreation(self)
+        self.validation = DatabaseValidation(self)
+        self.introspection = DatabaseIntrospection(self)
+        options = self.settings_dict
+        self.use_test_datastore = False
+        self.test_datastore_inmemory = True
+        self.remote = options.get('REMOTE', False)
+        if on_production_server:
+            self.remote = False
+        self.remote_app_id = options.get('REMOTE_APP_ID', appid)
+        self.remote_api_path = options.get('REMOTE_API_PATH', None)
+        self.secure_remote_api = options.get('SECURE_REMOTE_API', True)
+        self._setup_stubs()
+
+    def _get_paths(self):
+        if self.use_test_datastore:
+            return get_test_datastore_paths(self.test_datastore_inmemory)
+        else:
+            return get_datastore_paths(self.settings_dict)
+
+    def _setup_stubs(self):
+        # If this code is being run without an appserver (eg. via a django
+        # commandline flag) then setup a default stub environment.
+        if not have_appserver:
+            from google.appengine.tools import dev_appserver_main
+            args = dev_appserver_main.DEFAULT_ARGS.copy()
+            args['datastore_path'], args['blobstore_path'], args['history_path'] = self._get_paths()
+            from google.appengine.tools import dev_appserver
+            dev_appserver.SetupStubs(appid, **args)
+        # If we're supposed to set up the remote_api, do that now.
+        if self.remote:
+            self.setup_remote()
+
+    def setup_remote(self):
+        if not self.remote_api_path:
+            from ..utils import appconfig
+            for handler in appconfig.handlers:
+                if handler.script == REMOTE_API_SCRIPT:
+                    self.remote_api_path = handler.url
+                    break
+        self.remote = True
+        remote_url = 'https://%s.appspot.com%s' % (self.remote_app_id,
+                                                   self.remote_api_path)
+        logging.info('Setting up remote_api for "%s" at %s' %
+                     (self.remote_app_id, remote_url))
+        if not have_appserver:
+            print 'Connecting to remote_api handler'
+        from google.appengine.ext.remote_api import remote_api_stub
+        remote_api_stub.ConfigureRemoteApi(self.remote_app_id,
+            self.remote_api_path, auth_func, secure=self.secure_remote_api,
+            rpc_server_factory=rpc_server_factory)
+        retry_delay = 1
+        while retry_delay <= 16:
+            try:
+                remote_api_stub.MaybeInvokeAuthentication()
+            except HTTPError, e:
+                if not have_appserver:
+                    print 'Retrying in %d seconds...' % retry_delay
+                time.sleep(retry_delay)
+                retry_delay *= 2
+            else:
+                break
+        else:
+            try:
+                remote_api_stub.MaybeInvokeAuthentication()
+            except HTTPError, e:
+                raise URLError("%s\n"
+                               "Couldn't reach remote_api handler at %s.\n"
+                               "Make sure you've deployed your project and "
+                               "installed a remote_api handler in app.yaml."
+                               % (e, remote_url))
+        logging.info('Now using the remote datastore for "%s" at %s' %
+                     (self.remote_app_id, remote_url))
+
+    def flush(self):
+        """Helper function to remove the current datastore and re-open the stubs"""
+        if self.remote:
+            import random, string
+            code = ''.join([random.choice(string.ascii_letters) for x in range(4)])
+            print '\n\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
+            print '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
+            print "Warning! You're about to delete the *production* datastore!"
+            print 'Only models defined in your INSTALLED_APPS can be removed!'
+            print 'If you want to clear the whole datastore you have to use the ' \
+                  'datastore viewer in the dashboard. Also, in order to delete all '\
+                  'unneeded indexes you have to run appcfg.py vacuum_indexes.'
+            print 'In order to proceed you have to enter the following code:'
+            print code
+            response = raw_input('Repeat: ')
+            if code == response:
+                print 'Deleting...'
+                from django.db import models
+                from google.appengine.api import datastore as ds
+                for model in models.get_models():
+                    print 'Deleting %s...' % model._meta.db_table
+                    while True:
+                        data = ds.Query(model._meta.db_table, keys_only=True).Get(200)
+                        if not data:
+                            break
+                        ds.Delete(data)
+                print "Datastore flushed! Please check your dashboard's " \
+                      'datastore viewer for any remaining entities and remove ' \
+                      'all unneeded indexes with manage.py vacuum_indexes.'
+            else:
+                print 'Aborting'
+                exit()
+        else:
+            destroy_datastore(*self._get_paths())
+        self._setup_stubs()

File djangoappengine/db/compiler.py

+from .db_settings import get_model_indexes
+
+import datetime
+import sys
+
+from django.db.models.sql import aggregates as sqlaggregates
+from django.db.models.sql.constants import LOOKUP_SEP, MULTI, SINGLE
+from django.db.models.sql.where import AND, OR
+from django.db.utils import DatabaseError, IntegrityError
+from django.utils.tree import Node
+
+from functools import wraps
+
+from google.appengine.api.datastore import Entity, Query, MultiQuery, \
+    Put, Get, Delete, Key
+from google.appengine.api.datastore_errors import Error as GAEError
+from google.appengine.api.datastore_types import Text, Category, Email, Link, \
+    PhoneNumber, PostalAddress, Text, Blob, ByteString, GeoPt, IM, Key, \
+    Rating, BlobKey
+
+from djangotoolbox.db.basecompiler import NonrelQuery, NonrelCompiler, \
+    NonrelInsertCompiler, NonrelUpdateCompiler, NonrelDeleteCompiler
+
+import cPickle as pickle
+
+import decimal
+
+# Valid query types (a dictionary is used for speedy lookups).
+OPERATORS_MAP = {
+    'exact': '=',
+    'gt': '>',
+    'gte': '>=',
+    'lt': '<',
+    'lte': '<=',
+
+    # The following operators are supported with special code below:
+    'isnull': None,
+    'in': None,
+    'startswith': None,
+    'range': None,
+    'year': None,
+}
+
+NEGATION_MAP = {
+    'gt': '<=',
+    'gte': '<',
+    'lt': '>=',
+    'lte': '>',
+    # TODO: support these filters
+    #'exact': '!=', # this might actually become individual '<' and '>' queries
+}
+
+def safe_call(func):
+    @wraps(func)
+    def _func(*args, **kwargs):
+        try:
+            return func(*args, **kwargs)
+        except GAEError, e:
+            raise DatabaseError, DatabaseError(str(e)), sys.exc_info()[2]
+    return _func
+
+class GAEQuery(NonrelQuery):
+    # ----------------------------------------------
+    # Public API
+    # ----------------------------------------------
+    def __init__(self, compiler, fields):
+        super(GAEQuery, self).__init__(compiler, fields)
+        self.inequality_field = None
+        self.pk_filters = None
+        self.excluded_pks = ()
+        self.has_negated_exact_filter = False
+        self.ordering = ()
+        self.gae_ordering = []
+        pks_only = False
+        if len(fields) == 1 and fields[0].primary_key:
+            pks_only = True
+        self.db_table = self.query.get_meta().db_table
+        self.pks_only = pks_only
+        start_cursor = getattr(self.query, '_gae_start_cursor', None)
+        end_cursor = getattr(self.query, '_gae_end_cursor', None)
+        self.gae_query = [Query(self.db_table, keys_only=self.pks_only,
+                                cursor=start_cursor, end_cursor=end_cursor)]
+
+    # This is needed for debugging
+    def __repr__(self):
+        return '<GAEQuery: %r ORDER %r>' % (self.gae_query, self.ordering)
+
+    @safe_call
+    def fetch(self, low_mark, high_mark):
+        query = self._build_query()
+        executed = False
+        if self.excluded_pks and high_mark is not None:
+            high_mark += len(self.excluded_pks)
+        if self.pk_filters is not None:
+            results = self.get_matching_pk(low_mark, high_mark)
+        else:
+            if high_mark is None:
+                kw = {}
+                if low_mark:
+                    kw['offset'] = low_mark
+                results = query.Run(**kw)
+                executed = True
+            elif high_mark > low_mark:
+                results = query.Get(high_mark - low_mark, low_mark)
+                executed = True
+            else:
+                results = ()
+
+        for entity in results:
+            if isinstance(entity, Key):
+                key = entity
+            else:
+                key = entity.key()
+            if key in self.excluded_pks:
+                continue
+            yield self._make_entity(entity)
+
+        if executed and not isinstance(query, MultiQuery):
+            self.query._gae_cursor = query.GetCompiledCursor()
+
+    @safe_call
+    def count(self, limit=None):
+        if self.pk_filters is not None:
+            return len(self.get_matching_pk(0, limit))
+        if self.excluded_pks:
+            return len(list(self.fetch(0, 2000)))
+        kw = {}
+        if limit is not None:
+            kw['limit'] = limit
+        return self._build_query().Count(**kw)
+
+    @safe_call
+    def delete(self):
+        if self.pk_filters is not None:
+            keys = [key for key in self.pk_filters if key is not None]
+        else:
+            keys = self.fetch()
+        if keys:
+            Delete(keys)
+
+    @safe_call
+    def order_by(self, ordering):
+        self.ordering = ordering
+        for order in self.ordering:
+            if order.startswith('-'):
+                order, direction = order[1:], Query.DESCENDING
+            else:
+                direction = Query.ASCENDING
+            if order == self.query.get_meta().pk.column:
+                order = '__key__'
+            self.gae_ordering.append((order, direction))
+
+    # This function is used by the default add_filters() implementation
+    @safe_call
+    def add_filter(self, column, lookup_type, negated, db_type, value):
+        if value in ([], ()):
+            self.pk_filters = []
+            return
+
+        # Emulated/converted lookups
+        if column == self.query.get_meta().pk.column:
+            column = '__key__'
+            db_table = self.query.get_meta().db_table
+            if lookup_type in ('exact', 'in'):
+                # Optimization: batch-get by key
+                if self.pk_filters is not None:
+                    raise DatabaseError("You can't apply multiple AND filters "
+                                        "on the primary key. "
+                                        "Did you mean __in=[...]?")
+                if not isinstance(value, (tuple, list)):
+                    value = [value]
+                pks = [create_key(db_table, pk) for pk in value if pk]
+                if negated:
+                    self.excluded_pks = pks
+                else:
+                    self.pk_filters = pks
+                return
+            else:
+                # XXX: set db_type to 'gae_key' in order to allow
+                # convert_value_for_db to recognize the value to be a Key and
+                # not a str. Otherwise the key would be converted back to a
+                # unicode (see convert_value_for_db)
+                db_type = 'gae_key'
+                key_type_error = 'Lookup values on primary keys have to be' \
+                                 'a string or an integer.'
+                if lookup_type == 'range':
+                    if isinstance(value,(list, tuple)) and not(isinstance(
+                            value[0], (basestring, int, long)) and \
+                            isinstance(value[1], (basestring, int, long))):
+                        raise DatabaseError(key_type_error)
+                elif not isinstance(value,(basestring, int, long)):
+                    raise DatabaseError(key_type_error)
+                # for lookup type range we have to deal with a list
+                if lookup_type == 'range':
+                    value[0] = create_key(db_table, value[0])
+                    value[1] = create_key(db_table, value[1])
+                else:
+                    value = create_key(db_table, value)
+        if lookup_type not in OPERATORS_MAP:
+            raise DatabaseError("Lookup type %r isn't supported" % lookup_type)
+
+        # We check for negation after lookup_type isnull because it
+        # simplifies the code. All following lookup_type checks assume
+        # that they're not negated.
+        if lookup_type == 'isnull':
+            if (negated and value) or not value:
+                # TODO/XXX: is everything greater than None?
+                op = '>'
+            else:
+                op = '='
+            value = None
+        elif negated and lookup_type == 'exact':
+            if self.has_negated_exact_filter:
+                raise DatabaseError("You can't exclude more than one __exact "
+                                    "filter")
+            self.has_negated_exact_filter = True
+            self._combine_filters(column, db_type,
+                                  (('<', value), ('>', value)))
+            return
+        elif negated:
+            try:
+                op = NEGATION_MAP[lookup_type]
+            except KeyError:
+                raise DatabaseError("Lookup type %r can't be negated" % lookup_type)
+            if self.inequality_field and column != self.inequality_field:
+                raise DatabaseError("Can't have inequality filters on multiple "
+                    "columns (here: %r and %r)" % (self.inequality_field, column))
+            self.inequality_field = column
+        elif lookup_type == 'in':
+            # Create sub-query combinations, one for each value
+            if len(self.gae_query) * len(value) > 30:
+                raise DatabaseError("You can't query against more than "
+                                    "30 __in filter value combinations")
+            op_values = [('=', v) for v in value]
+            self._combine_filters(column, db_type, op_values)
+            return
+        elif lookup_type == 'startswith':
+            self._add_filter(column, '>=', db_type, value)
+            if isinstance(value, str):
+                value = value.decode('utf8')
+            if isinstance(value, Key):
+                value = list(value.to_path())
+                if isinstance(value[-1], str):
+                    value[-1] = value[-1].decode('utf8')
+                value[-1] += u'\ufffd'
+                value = Key.from_path(*value)
+            else:
+                value += u'\ufffd'
+            self._add_filter(column, '<=', db_type, value)
+            return
+        elif lookup_type in ('range', 'year'):
+            self._add_filter(column, '>=', db_type, value[0])
+            op = '<=' if lookup_type == 'range' else '<'
+            self._add_filter(column, op, db_type, value[1])
+            return
+        else:
+            op = OPERATORS_MAP[lookup_type]
+
+        self._add_filter(column, op, db_type, value)
+
+    # ----------------------------------------------
+    # Internal API
+    # ----------------------------------------------
+    def _add_filter(self, column, op, db_type, value):
+        for query in self.gae_query:
+            key = '%s %s' % (column, op)
+            value = self.convert_value_for_db(db_type, value)
+            if isinstance(value, Text):
+                raise DatabaseError('TextField is not indexed, by default, '
+                                    "so you can't filter on it. "
+                                    'Please add an index definition for the '
+                                    'column "%s" as described here:\n'
+                                    'http://www.allbuttonspressed.com/blog/django/2010/07/Managing-per-field-indexes-on-App-Engine'
+                                    % column)
+            if key in query:
+                existing_value = query[key]
+                if isinstance(existing_value, list):
+                    existing_value.append(value)
+                else:
+                    query[key] = [existing_value, value]
+            else:
+                query[key] = value
+
+    def _combine_filters(self, column, db_type, op_values):
+        gae_query = self.gae_query
+        combined = []
+        for query in gae_query:
+            for op, value in op_values:
+                self.gae_query = [Query(self.db_table,
+                                        keys_only=self.pks_only)]
+                self.gae_query[0].update(query)
+                self._add_filter(column, op, db_type, value)
+                combined.append(self.gae_query[0])
+        self.gae_query = combined
+
+    def _make_entity(self, entity):
+        if isinstance(entity, Key):
+            key = entity
+            entity = {}
+        else:
+            key = entity.key()
+
+        entity[self.query.get_meta().pk.column] = key
+        return entity
+
+    @safe_call
+    def _build_query(self):
+        if len(self.gae_query) > 1:
+            return MultiQuery(self.gae_query, self.gae_ordering)
+        query = self.gae_query[0]
+        query.Order(*self.gae_ordering)
+        return query
+
+    def get_matching_pk(self, low_mark=0, high_mark=None):
+        if not self.pk_filters:
+            return []
+
+        results = [result for result in Get(self.pk_filters)
+                   if result is not None and
+                       self.matches_filters(result)]
+        if self.ordering:
+            results.sort(cmp=self.order_pk_filtered)
+        if high_mark is not None and high_mark < len(results) - 1:
+            results = results[:high_mark]
+        if low_mark:
+            results = results[low_mark:]
+        return results
+
+    def order_pk_filtered(self, lhs, rhs):
+        left = dict(lhs)
+        left[self.query.get_meta().pk.column] = lhs.key().to_path()
+        right = dict(rhs)
+        right[self.query.get_meta().pk.column] = rhs.key().to_path()
+        return self._order_in_memory(left, right)
+
+    def matches_filters(self, entity):
+        item = dict(entity)
+        pk = self.query.get_meta().pk
+        value = self.convert_value_from_db(pk.db_type(connection=self.connection),
+            entity.key())
+        item[pk.column] = value
+        result = self._matches_filters(item, self.query.where)
+        return result
+
+class SQLCompiler(NonrelCompiler):
+    """
+    A simple App Engine query: no joins, no distinct, etc.
+    """
+    query_class = GAEQuery
+
+    def convert_value_from_db(self, db_type, value):
+        if isinstance(value, (list, tuple, set)) and \
+                db_type.startswith(('ListField:', 'SetField:')):
+            db_sub_type = db_type.split(':', 1)[1]
+            value = [self.convert_value_from_db(db_sub_type, subvalue)
+                     for subvalue in value]
+
+        if db_type.startswith('SetField:') and value is not None:
+            value = set(value)
+
+        if db_type.startswith('DictField:') and value is not None:
+            value = pickle.loads(value)
+            if ':' in db_type:
+                db_sub_type = db_type.split(':', 1)[1]
+                value = dict((key, self.convert_value_from_db(db_sub_type, value[key]))
+                             for key in value)
+
+        # the following GAE database types are all unicode subclasses, cast them
+        # to unicode so they appear like pure unicode instances for django
+        if isinstance(value, basestring) and value and db_type.startswith('decimal'):
+            value = decimal.Decimal(value)
+        elif isinstance(value, (Category, Email, Link, PhoneNumber, PostalAddress,
+                Text, unicode)):
+            value = unicode(value)
+        elif isinstance(value, Blob):
+            value = str(value)
+        elif isinstance(value, str):
+            # always retrieve strings as unicode (it is possible that old datasets
+            # contain non unicode strings, nevertheless work with unicode ones)
+            value = value.decode('utf-8')
+        elif isinstance(value, Key):
+            # for now we do not support KeyFields thus a Key has to be the own
+            # primary key
+            # TODO: GAE: support parents via GAEKeyField
+            assert value.parent() is None, "Parents are not yet supported!"
+            if db_type == 'integer':
+                if value.id() is None:
+                    raise DatabaseError('Wrong type for Key. Expected integer, found'
+                        'None')
+                else:
+                    value = value.id()
+            elif db_type == 'text':
+                if value.name() is None:
+                    raise DatabaseError('Wrong type for Key. Expected string, found'
+                        'None')
+                else:
+                    value = value.name()
+            else:
+                raise DatabaseError("%s fields cannot be keys on GAE" % db_type)
+        elif db_type == 'date' and isinstance(value, datetime.datetime):
+            value = value.date()
+        elif db_type == 'time' and isinstance(value, datetime.datetime):
+            value = value.time()
+        return value
+
+    def convert_value_for_db(self, db_type, value):
+        if isinstance(value, unicode):
+            value = unicode(value)
+        elif isinstance(value, str):
+            value = str(value)
+        elif isinstance(value, (list, tuple, set)) and \
+                db_type.startswith(('ListField:', 'SetField:')):
+            db_sub_type = db_type.split(':', 1)[1]
+            value = [self.convert_value_for_db(db_sub_type, subvalue)
+                     for subvalue in value]
+        elif isinstance(value, decimal.Decimal) and db_type.startswith("decimal:"):
+            value = self.connection.ops.value_to_db_decimal(value, *eval(db_type[8:]))
+        elif isinstance(value, dict) and db_type.startswith('DictField:'):
+            if ':' in db_type:
+                db_sub_type = db_type.split(':', 1)[1]
+                value = dict([(key, self.convert_value_for_db(db_sub_type, value[key]))
+                              for key in value])
+            value = Blob(pickle.dumps(value))
+
+        if db_type == 'gae_key':
+            return value
+        elif db_type == 'longtext':
+            # long text fields cannot be indexed on GAE so use GAE's database
+            # type Text
+            value = Text((isinstance(value, str) and value.decode('utf-8')) or value)
+        elif db_type == 'text':
+            value = (isinstance(value, str) and value.decode('utf-8')) or value
+        elif db_type == 'blob':
+            value = Blob(value)
+        elif type(value) is str:
+            # always store unicode strings
+            value = value.decode('utf-8')
+        elif db_type == 'date' or db_type == 'time' or db_type == 'datetime':
+            # here we have to check the db_type because GAE always stores datetimes
+            value = to_datetime(value)
+        return value
+
+class SQLInsertCompiler(NonrelInsertCompiler, SQLCompiler):
+    @safe_call
+    def insert(self, data, return_id=False):
+        gae_data = {}
+        opts = self.query.get_meta()
+        unindexed_fields = get_model_indexes(self.query.model)['unindexed']
+        unindexed_cols = [opts.get_field(name).column
+                          for name in unindexed_fields]
+        kwds = {'unindexed_properties': unindexed_cols}
+        for column, value in data.items():
+            if column == opts.pk.column:
+                if isinstance(value, basestring):
+                    kwds['name'] = value
+                else:
+                    kwds['id'] = value
+            elif isinstance(value, (tuple, list)) and not len(value):
+                # gae does not store emty lists (and even does not allow passing empty
+                # lists to Entity.update) so skip them
+                continue
+            else:
+                gae_data[column] = value
+
+        entity = Entity(self.query.get_meta().db_table, **kwds)
+        entity.update(gae_data)
+        key = Put(entity)
+        return key.id_or_name()
+
+class SQLUpdateCompiler(NonrelUpdateCompiler, SQLCompiler):
+    pass
+
+class SQLDeleteCompiler(NonrelDeleteCompiler, SQLCompiler):
+    pass
+
+def to_datetime(value):
+    """Convert a time or date to a datetime for datastore storage.
+
+    Args:
+    value: A datetime.time, datetime.date or string object.
+
+    Returns:
+    A datetime object with date set to 1970-01-01 if value is a datetime.time
+    A datetime object with date set to value.year - value.month - value.day and
+    time set to 0:00 if value is a datetime.date
+    """
+
+    if value is None:
+        return value
+    elif isinstance(value, datetime.datetime):
+        return value
+    elif isinstance(value, datetime.date):
+        return datetime.datetime(value.year, value.month, value.day)
+    elif isinstance(value, datetime.time):
+        return datetime.datetime(1970, 1, 1, value.hour, value.minute,
+            value.second, value.microsecond)
+
+def create_key(db_table, value):
+    if isinstance(value, (int, long)) and value < 1:
+        return None
+    return Key.from_path(db_table, value)

File djangoappengine/db/creation.py

+from .db_settings import get_model_indexes
+from djangotoolbox.db.creation import NonrelDatabaseCreation
+
+class StringType(object):
+    def __init__(self, internal_type):
+        self.internal_type = internal_type
+
+    def __mod__(self, field):
+        indexes = get_model_indexes(field['model'])
+        if field['name'] in indexes['indexed']:
+            return 'text'
+        elif field['name'] in indexes['unindexed']:
+            return 'longtext'
+        return self.internal_type
+
+def get_data_types():
+    # TODO: Add GAEKeyField and a corresponding db_type
+    string_types = ('text', 'longtext')
+    data_types = NonrelDatabaseCreation.data_types.copy()
+    for name, field_type in data_types.items():
+        if field_type in string_types:
+            data_types[name] = StringType(field_type)
+    return data_types
+
+class DatabaseCreation(NonrelDatabaseCreation):
+    # This dictionary maps Field objects to their associated GAE column
+    # types, as strings. Column-type strings can contain format strings; they'll
+    # be interpolated against the values of Field.__dict__ before being output.
+    # If a column type is set to None, it won't be included in the output.
+
+    data_types = get_data_types()
+
+    def create_test_db(self, *args, **kw):
+        """Destroys the test datastore. A new store will be recreated on demand"""
+        self.destroy_test_db()
+        self.connection.use_test_datastore = True
+        self.connection.flush()
+
+    def destroy_test_db(self, *args, **kw):
+        """Destroys the test datastore files."""
+        from .base import destroy_datastore, get_test_datastore_paths
+        destroy_datastore(*get_test_datastore_paths())

File djangoappengine/db/db_settings.py