Commits

Anonymous committed 7da1657 Merge

Comments (0)

Files changed (26)

+[patterns]
+**.txt = native
+**.pyva = native
+**.py = native
+**.c = native
+**.cpp = native
+**.cu = native
+**.h = native
+**.hpp = native
+**.tmpl = native
+**.html = native
+**.htm = native
+**.js = native
+**.manifest = native
+**.yaml = native
 syntax: glob
+build
+dist
+*.egg-info
 .project
 .pydevproject
 .settings
 *.tmp
 desktop.ini
 nbproject
+build
+dist
+Copyright (c) Waldemar Kornewald, Thomas Wanschik, and all contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+    1. Redistributions of source code must retain the above copyright notice, 
+       this list of conditions and the following disclaimer.
+
+    2. Redistributions in binary form must reproduce the above copyright 
+       notice, this list of conditions and the following disclaimer in the
+       documentation and/or other materials provided with the distribution.
+
+    3. Neither the name of All Buttons Pressed nor
+       the names of its contributors may be used to endorse or promote products
+       derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+include LICENSE
+include CHANGELOG.rst
+include README.rst

appstats/__init__.py

Empty file added.
+# Initialize Django
+from djangoappengine.main import main
+
+from google.appengine.ext.appstats.ui import main
+
+if __name__ == '__main__':
+    main()
-import os, sys
+import logging
+import os
+import sys
 
-# We allow a two-level project structure where your root folder contains
-# project-specific apps and the "common" subfolder contains common apps.
-COMMON_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
-PROJECT_DIR = os.path.dirname(COMMON_DIR)
-if os.path.basename(COMMON_DIR) == 'common-apps':
-    MAIN_DIRS = (PROJECT_DIR, COMMON_DIR)
-    print >>sys.stderr, '!!!!!!!!!!!!!!!!!!!!!!!!!!\n' \
-                        'Deprecation warning: the "common-apps" folder ' \
-                        'is deprecated. Please move all modules from ' \
-                        'there into the main project folder and remove ' \
-                        'the "common-apps" folder.\n' \
-                        '!!!!!!!!!!!!!!!!!!!!!!!!!!\n'
-else:
-    PROJECT_DIR = COMMON_DIR
-    MAIN_DIRS = (PROJECT_DIR,)
+PROJECT_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
+DATA_ROOT = os.path.join(PROJECT_DIR, '.gaedata')
 
 # Overrides for os.environ
-env_ext = {}
-if 'DJANGO_SETTINGS_MODULE' not in os.environ:
-    env_ext['DJANGO_SETTINGS_MODULE'] = 'settings'
+env_ext = {'DJANGO_SETTINGS_MODULE': 'settings'}
 
 def setup_env():
     """Configures app engine environment for command-line apps."""
         # may be. First look within the project for a local copy, then look for
         # where the Mac OS SDK installs it.
         paths = [os.path.join(PROJECT_DIR, '.google_appengine'),
-                 os.path.join(COMMON_DIR, '.google_appengine'),
+                 os.environ.get('APP_ENGINE_SDK'),
                  '/usr/local/google_appengine',
                  '/Applications/GoogleAppEngineLauncher.app/Contents/Resources/GoogleAppEngine-default.bundle/Contents/Resources/google_appengine']
         for path in os.environ.get('PATH', '').split(os.pathsep):
             path = r'%(PROGRAMFILES)s\Google\google_appengine' % os.environ
             paths.append(path)
         # Loop through all possible paths and look for the SDK dir.
-        SDK_PATH = None
-        for sdk_path in paths:
-            sdk_path = os.path.realpath(sdk_path)
-            if os.path.exists(sdk_path):
-                SDK_PATH = sdk_path
+        sdk_path = None
+        for path in paths:
+            if not path:
+                continue
+            path = os.path.expanduser(path)
+            path = os.path.realpath(path)
+            if os.path.exists(path):
+                sdk_path = path
                 break
-        if SDK_PATH is None:
+        if sdk_path is None:
             # The SDK could not be found in any known location.
             sys.stderr.write('The Google App Engine SDK could not be found!\n'
                              "Make sure it's accessible via your PATH "
-                             "environment and called google_appengine.")
+                             "environment and called google_appengine.\n")
             sys.exit(1)
         # Add the SDK and the libraries within it to the system path.
-        EXTRA_PATHS = [SDK_PATH]
-        lib = os.path.join(SDK_PATH, 'lib')
+        extra_paths = [sdk_path]
+        lib = os.path.join(sdk_path, 'lib')
         # Automatically add all packages in the SDK's lib folder:
         for dir in os.listdir(lib):
             path = os.path.join(lib, dir)
             detect = (os.path.join(path, dir), os.path.join(path, 'lib', dir))
             for path in detect:
                 if os.path.isdir(path) and not dir == 'django':
-                    EXTRA_PATHS.append(os.path.dirname(path))
+                    extra_paths.append(os.path.dirname(path))
                     break
-        sys.path = EXTRA_PATHS + sys.path
+        sys.path = extra_paths + sys.path
         from google.appengine.api import apiproxy_stub_map
 
     setup_project()
+    from .utils import have_appserver
+    if have_appserver:
+        # App Engine's threading.local is broken
+        setup_threading()
+    elif not os.path.exists(DATA_ROOT):
+        os.mkdir(DATA_ROOT)
     setup_logging()
 
-    # Patch Django to support loading management commands from zip files
-    from django.core import management
-    management.find_commands = find_commands
+    if not have_appserver:
+        # Patch Django to support loading management commands from zip files
+        from django.core import management
+        management.find_commands = find_commands
 
 def find_commands(management_dir):
     """
         pass
 
 def setup_logging():
-    import logging
-
     # Fix Python 2.6 logging module
     logging.logMultiprocessing = 0
 
     # Enable logging
-    from django.conf import settings
-    if settings.DEBUG:
-        logging.getLogger().setLevel(logging.DEBUG)
-    else:
-        logging.getLogger().setLevel(logging.INFO)
+    level = logging.DEBUG
+    from .utils import have_appserver
+    if have_appserver:
+        # We can't import settings at this point when running a normal
+        # manage.py command because this module gets imported from settings.py
+        from django.conf import settings
+        if not settings.DEBUG:
+            level = logging.INFO
+    logging.getLogger().setLevel(level)
 
 def setup_project():
     from .utils import have_appserver, on_production_server
     if have_appserver:
         # This fixes a pwd import bug for os.path.expanduser()
-        global env_ext
         env_ext['HOME'] = PROJECT_DIR
 
-    # Get the subprocess module into the dev_appserver sandbox.
+    # The dev_appserver creates a sandbox which restricts access to certain
+    # modules and builtins in order to emulate the production environment.
+    # Here we get the subprocess module back into the dev_appserver sandbox.
     # This module is just too important for development.
-    # The second part of this hack is in runserver.py which adds
-    # important environment variables like PATH etc.
-    if not on_production_server:
+    # Also we add the compiler/parser module back and enable https connections
+    # (seem to be broken on Windows because the _ssl module is disallowed).
+    if not have_appserver:
+        from google.appengine.tools import dev_appserver
         try:
+            # Backup os.environ. It gets overwritten by the dev_appserver,
+            # but it's needed by the subprocess module.
+            env = dev_appserver.DEFAULT_ENV
+            dev_appserver.DEFAULT_ENV = os.environ.copy()
+            dev_appserver.DEFAULT_ENV.update(env)
+            # Backup the buffer() builtin. The subprocess in Python 2.5 on
+            # Linux and OS X uses needs it, but the dev_appserver removes it.
+            dev_appserver.buffer = buffer
+        except AttributeError:
+            logging.warn('Could not patch the default environment. '
+                         'The subprocess module will not work correctly.')
+
+        try:
+            # Allow importing compiler/parser and _ssl modules (for https)
+            dev_appserver.HardenedModulesHook._WHITE_LIST_C_MODULES.extend(
+                ('parser', '_ssl'))
+        except AttributeError:
+            logging.warn('Could not patch modules whitelist. '
+                         'The compiler and parser modules will not work and '
+                         'SSL support is disabled.')
+    elif not on_production_server:
+        try:
+            # Restore the real subprocess module
             from google.appengine.api.mail_stub import subprocess
             sys.modules['subprocess'] = subprocess
-        except ImportError:
-            import logging
-            logging.warn('Could not add the subprocess module to the sandbox.')
+            # Re-inject the buffer() builtin into the subprocess module
+            from google.appengine.tools import dev_appserver
+            subprocess.buffer = dev_appserver.buffer
+        except Exception, e:
+            logging.warn('Could not add the subprocess module to the sandbox: %s' % e)
 
     os.environ.update(env_ext)
 
-    EXTRA_PATHS = list(MAIN_DIRS)
-    EXTRA_PATHS.append(os.path.dirname(PROJECT_DIR))
-    EXTRA_PATHS.append(os.path.join(os.path.dirname(__file__), 'lib'))
-
-    ZIP_PACKAGES_DIRS = tuple(os.path.join(dir, 'zip-packages')
-                              for dir in MAIN_DIRS)
+    extra_paths = [PROJECT_DIR, os.path.join(os.path.dirname(__file__), 'lib')]
+    zip_packages_dir = os.path.join(PROJECT_DIR, 'zip-packages')
 
     # We support zipped packages in the common and project folders.
-    for packages_dir in ZIP_PACKAGES_DIRS:
-        if os.path.isdir(packages_dir):
-            for zip_package in os.listdir(packages_dir):
-                EXTRA_PATHS.append(os.path.join(packages_dir, zip_package))
+    if os.path.isdir(zip_packages_dir):
+        for zip_package in os.listdir(zip_packages_dir):
+            extra_paths.append(os.path.join(zip_packages_dir, zip_package))
 
     # App Engine causes main.py to be reloaded if an exception gets raised
     # on the first request of a main.py instance, so don't call setup_project()
     # multiple times. We ensure this indirectly by checking if we've already
-    # modified sys.path.
-    if len(sys.path) < len(EXTRA_PATHS) or \
-            sys.path[:len(EXTRA_PATHS)] != EXTRA_PATHS:
-
-        sys.path = EXTRA_PATHS + sys.path
+    # modified sys.path, already.
+    if len(sys.path) < len(extra_paths) or \
+            sys.path[:len(extra_paths)] != extra_paths:
+        for path in extra_paths:
+            while path in sys.path:
+                sys.path.remove(path)
+        sys.path = extra_paths + sys.path
-import datetime
+from ..utils import appid, have_appserver, on_production_server
+from ..boot import DATA_ROOT
 from .creation import DatabaseCreation
-from ..utils import appid, have_appserver, on_production_server
+from django.db.backends.util import format_number
 from djangotoolbox.db.base import NonrelDatabaseFeatures, \
     NonrelDatabaseOperations, NonrelDatabaseWrapper, NonrelDatabaseClient, \
     NonrelDatabaseValidation, NonrelDatabaseIntrospection
-import logging, os
-from django.db.backends.util import format_number
+from urllib2 import HTTPError, URLError
+import logging
+import os
+import time
+
+REMOTE_API_SCRIPT = '$PYTHON_LIB/google/appengine/ext/remote_api/handler.py'
 
 def auth_func():
     import getpass
-    return raw_input('Login via Google Account:'), getpass.getpass('Password:')
+    return raw_input('Login via Google Account (see note above if login fails): '), getpass.getpass('Password: ')
 
 def rpc_server_factory(*args, ** kwargs):
     from google.appengine.tools import appengine_rpc
     """
     from google.appengine.tools import dev_appserver_main
     datastore_path = options.get('datastore_path',
-                                 dev_appserver_main.DEFAULT_ARGS['datastore_path'].replace(
-                                 'dev_appserver', 'django_%s' % appid))
+                                 os.path.join(DATA_ROOT, 'datastore'))
     blobstore_path = options.get('blobstore_path',
-                                 dev_appserver_main.DEFAULT_ARGS['blobstore_path'].replace(
-                                 'dev_appserver', 'django_%s' % appid))
+                                 os.path.join(DATA_ROOT, 'blobstore'))
     history_path = options.get('history_path',
-                               dev_appserver_main.DEFAULT_ARGS['history_path'].replace(
-                               'dev_appserver', 'django_%s' % appid))
+                               os.path.join(DATA_ROOT, 'history'))
     return datastore_path, blobstore_path, history_path
 
 def get_test_datastore_paths(inmemory=True):
                 logging.error("Failed to clear datastore: %s" % error)
 
 class DatabaseFeatures(NonrelDatabaseFeatures):
-    pass
+    allows_primary_key_0 = True
+    supports_dicts = True
 
 class DatabaseOperations(NonrelDatabaseOperations):
     compiler_module = __name__.rsplit('.', 1)[0] + '.compiler'
         self.validation = DatabaseValidation(self)
         self.introspection = DatabaseIntrospection(self)
         options = self.settings_dict
-        self.use_test_datastore = options.get('use_test_datastore', False)
-        self.test_datastore_inmemory = options.get('test_datastore_inmemory', True)
-        self.remote = options.get('remote', False)
+        self.use_test_datastore = False
+        self.test_datastore_inmemory = True
+        self.remote = options.get('REMOTE', False)
         if on_production_server:
             self.remote = False
-        self.remote_app_id = options.get('remote_id', appid)
-        self.remote_host = options.get('remote_host', '%s.appspot.com' % self.remote_app_id)
-        self.remote_url = options.get('remote_url', '/remote_api')
+        self.remote_app_id = options.get('REMOTE_APP_ID', appid)
+        self.high_replication = options.get('HIGH_REPLICATION', False)
+        self.domain = options.get('DOMAIN', 'appspot.com')
+        self.remote_api_path = options.get('REMOTE_API_PATH', None)
+        self.secure_remote_api = options.get('SECURE_REMOTE_API', True)
         self._setup_stubs()
 
     def _get_paths(self):
             self.setup_remote()
 
     def setup_remote(self):
+        if not self.remote_api_path:
+            from ..utils import appconfig
+            for handler in appconfig.handlers:
+                if handler.script == REMOTE_API_SCRIPT:
+                    self.remote_api_path = handler.url.split('(', 1)[0]
+                    break
         self.remote = True
-        logging.info('Setting up remote_api for "%s" at http://%s%s' %
-                     (self.remote_app_id, self.remote_host, self.remote_url)
-                     )
+        server = '%s.%s' % (self.remote_app_id, self.domain)
+        remote_url = 'https://%s%s' % (server, self.remote_api_path)
+        logging.info('Setting up remote_api for "%s" at %s' %
+                     (self.remote_app_id, remote_url))
+        if not have_appserver:
+            print('Connecting to remote_api handler.\n\n'
+                  'IMPORTANT: Check your login method settings in the '
+                  'App Engine Dashboard if you have problems logging in. '
+                  'Login is only supported for Google Accounts.\n')
         from google.appengine.ext.remote_api import remote_api_stub
-        from google.appengine.ext import db
-        remote_api_stub.ConfigureRemoteDatastore(self.remote_app_id,
-            self.remote_url, auth_func, self.remote_host,
+        remote_app_id = self.remote_app_id
+        if self.high_replication:
+            remote_app_id = 's~' + remote_app_id
+        remote_api_stub.ConfigureRemoteApi(remote_app_id,
+            self.remote_api_path, auth_func, servername=server,
+            secure=self.secure_remote_api,
             rpc_server_factory=rpc_server_factory)
-        logging.info('Now using the remote datastore for "%s" at http://%s%s' %
-                     (self.remote_app_id, self.remote_host, self.remote_url))
+        retry_delay = 1
+        while retry_delay <= 16:
+            try:
+                remote_api_stub.MaybeInvokeAuthentication()
+            except HTTPError, e:
+                if not have_appserver:
+                    print 'Retrying in %d seconds...' % retry_delay
+                time.sleep(retry_delay)
+                retry_delay *= 2
+            else:
+                break
+        else:
+            try:
+                remote_api_stub.MaybeInvokeAuthentication()
+            except HTTPError, e:
+                raise URLError("%s\n"
+                               "Couldn't reach remote_api handler at %s.\n"
+                               "Make sure you've deployed your project and "
+                               "installed a remote_api handler in app.yaml. "
+                               "Note that login is only supported for "
+                               "Google Accounts. Make sure you've configured "
+                               "the correct authentication method in the "
+                               "App Engine Dashboard."
+                               % (e, remote_url))
+        logging.info('Now using the remote datastore for "%s" at %s' %
+                     (self.remote_app_id, remote_url))
 
     def flush(self):
         """Helper function to remove the current datastore and re-open the stubs"""
-from .db_settings import get_indexes
+from .db_settings import get_model_indexes
 
 import datetime
 import sys
 from djangotoolbox.db.basecompiler import NonrelQuery, NonrelCompiler, \
     NonrelInsertCompiler, NonrelUpdateCompiler, NonrelDeleteCompiler
 
+import cPickle as pickle
+
 import decimal
 
 # Valid query types (a dictionary is used for speedy lookups).
             pks_only = True
         self.db_table = self.query.get_meta().db_table
         self.pks_only = pks_only
-        self.gae_query = [Query(self.db_table, keys_only=self.pks_only)]
+        start_cursor = getattr(self.query, '_gae_start_cursor', None)
+        end_cursor = getattr(self.query, '_gae_end_cursor', None)
+        self.gae_query = [Query(self.db_table, keys_only=self.pks_only,
+                                cursor=start_cursor, end_cursor=end_cursor)]
 
     # This is needed for debugging
     def __repr__(self):
     @safe_call
     def fetch(self, low_mark, high_mark):
         query = self._build_query()
+        executed = False
         if self.excluded_pks and high_mark is not None:
             high_mark += len(self.excluded_pks)
         if self.pk_filters is not None:
                 if low_mark:
                     kw['offset'] = low_mark
                 results = query.Run(**kw)
+                executed = True
             elif high_mark > low_mark:
                 results = query.Get(high_mark - low_mark, low_mark)
+                executed = True
             else:
                 results = ()
 
                 continue
             yield self._make_entity(entity)
 
+        if executed and not isinstance(query, MultiQuery):
+            self.query._gae_cursor = query.GetCompiledCursor()
+
     @safe_call
     def count(self, limit=None):
         if self.pk_filters is not None:
             return len(self.get_matching_pk(0, limit))
         if self.excluded_pks:
-            return len(list(self.fetch(0, 300)))
-        return self._build_query().Count(limit)
+            return len(list(self.fetch(0, 2000)))
+        kw = {}
+        if limit is not None:
+            kw['limit'] = limit
+        return self._build_query().Count(**kw)
 
     @safe_call
     def delete(self):
         for query in self.gae_query:
             key = '%s %s' % (column, op)
             value = self.convert_value_for_db(db_type, value)
+            if isinstance(value, Text):
+                raise DatabaseError('TextField is not indexed, by default, '
+                                    "so you can't filter on it. Please add "
+                                    'an index definition for the column %s '
+                                    'on the model %s.%s as described here:\n'
+                                    'http://www.allbuttonspressed.com/blog/django/2010/07/Managing-per-field-indexes-on-App-Engine'
+                                    % (column, self.query.model.__module__, self.query.model.__name__))
             if key in query:
                 existing_value = query[key]
                 if isinstance(existing_value, list):
 
     @safe_call
     def _build_query(self):
+        for query in self.gae_query:
+            query.Order(*self.gae_ordering)
         if len(self.gae_query) > 1:
             return MultiQuery(self.gae_query, self.gae_ordering)
-        query = self.gae_query[0]
-        query.Order(*self.gae_ordering)
-        return query
+        return self.gae_query[0]
 
     def get_matching_pk(self, low_mark=0, high_mark=None):
         if not self.pk_filters:
     query_class = GAEQuery
 
     def convert_value_from_db(self, db_type, value):
-        if isinstance(value, (list, tuple)) and len(value) and \
-                db_type.startswith('ListField:'):
+        if isinstance(value, (list, tuple, set)) and \
+                db_type.startswith(('ListField:', 'SetField:')):
             db_sub_type = db_type.split(':', 1)[1]
             value = [self.convert_value_from_db(db_sub_type, subvalue)
                      for subvalue in value]
 
+        if db_type.startswith('SetField:') and value is not None:
+            value = set(value)
+
+        if db_type.startswith('DictField:') and value is not None:
+            value = pickle.loads(value)
+            if ':' in db_type:
+                db_sub_type = db_type.split(':', 1)[1]
+                value = dict((key, self.convert_value_from_db(db_sub_type, value[key]))
+                             for key in value)
+
         # the following GAE database types are all unicode subclasses, cast them
         # to unicode so they appear like pure unicode instances for django
         if isinstance(value, basestring) and value and db_type.startswith('decimal'):
             value = unicode(value)
         elif isinstance(value, str):
             value = str(value)
-        elif isinstance(value, (list, tuple)) and len(value) and \
-                db_type.startswith('ListField:'):
+        elif isinstance(value, (list, tuple, set)) and \
+                db_type.startswith(('ListField:', 'SetField:')):
             db_sub_type = db_type.split(':', 1)[1]
             value = [self.convert_value_for_db(db_sub_type, subvalue)
                      for subvalue in value]
         elif isinstance(value, decimal.Decimal) and db_type.startswith("decimal:"):
             value = self.connection.ops.value_to_db_decimal(value, *eval(db_type[8:]))
+        elif isinstance(value, dict) and db_type.startswith('DictField:'):
+            if ':' in db_type:
+                db_sub_type = db_type.split(':', 1)[1]
+                value = dict([(key, self.convert_value_for_db(db_sub_type, value[key]))
+                              for key in value])
+            value = Blob(pickle.dumps(value))
 
         if db_type == 'gae_key':
             return value
     def insert(self, data, return_id=False):
         gae_data = {}
         opts = self.query.get_meta()
-        indexes = get_indexes().get(self.query.model, {})
-        unindexed_fields = indexes.get('unindexed', ())
+        unindexed_fields = get_model_indexes(self.query.model)['unindexed']
         unindexed_cols = [opts.get_field(name).column
                           for name in unindexed_fields]
         kwds = {'unindexed_properties': unindexed_cols}
-from .db_settings import get_indexes
+from .db_settings import get_model_indexes
 from djangotoolbox.db.creation import NonrelDatabaseCreation
 
 class StringType(object):
         self.internal_type = internal_type
 
     def __mod__(self, field):
-        indexes = get_indexes().get(field['model'], {})
-        if field['name'] in indexes.get('indexed', ()):
+        indexes = get_model_indexes(field['model'])
+        if field['name'] in indexes['indexed']:
             return 'text'
-        elif field['name'] in indexes.get('unindexed', ()):
+        elif field['name'] in indexes['unindexed']:
             return 'longtext'
         return self.internal_type
 

db/db_settings.py

 from django.conf import settings
 from django.utils.importlib import import_module
 
+# TODO: add autodiscover() and make API more like dbindexer's register_index
+
 _MODULE_NAMES = getattr(settings, 'GAE_SETTINGS_MODULES', ())
 
 FIELD_INDEXES = None
 
 # TODO: add support for eventual consistency setting on specific models
 
+def get_model_indexes(model):
+    indexes = get_indexes()
+    model_index = {'indexed': [], 'unindexed': []}
+    for item in reversed(model.mro()):
+        config = indexes.get(item, {})
+        model_index['indexed'].extend(config.get('indexed', ()))
+        model_index['unindexed'].extend(config.get('unindexed', ()))
+    return model_index
+
 def get_indexes():
     global FIELD_INDEXES
     if FIELD_INDEXES is None:
         field_indexes = {}
         for name in _MODULE_NAMES:
-            try:
-                field_indexes.update(import_module(name).FIELD_INDEXES)
-            except (ImportError, AttributeError):
-                pass
+            field_indexes.update(import_module(name).FIELD_INDEXES)
         FIELD_INDEXES = field_indexes
     return FIELD_INDEXES
+from google.appengine.datastore.datastore_query import Cursor
+
+class CursorQueryMixin(object):
+    def clone(self, *args, **kwargs):
+        kwargs['_gae_cursor'] = getattr(self, '_gae_cursor', None)
+        kwargs['_gae_start_cursor'] = getattr(self, '_gae_start_cursor', None)
+        kwargs['_gae_end_cursor'] = getattr(self, '_gae_end_cursor', None)
+        return super(CursorQueryMixin, self).clone(*args, **kwargs)
+
+def get_cursor(queryset):
+    # Evaluate QuerySet
+    len(queryset)
+    cursor = getattr(queryset.query, '_gae_cursor', None)
+    return Cursor.to_websafe_string(cursor)
+
+def set_cursor(queryset, start=None, end=None):
+    queryset = queryset.all()
+    class CursorQuery(CursorQueryMixin, queryset.query.__class__):
+        pass
+    queryset.query = queryset.query.clone(klass=CursorQuery)
+    if start is not None:
+        start = Cursor.from_websafe_string(start)
+    queryset.query._gae_start_cursor = start
+    if end is not None:
+        end = Cursor.from_websafe_string(end)
+    queryset.query._gae_end_cursor = end
+    return queryset
+from django.conf import settings
+
+if 'django.contrib.auth' in settings.INSTALLED_APPS:
+    from dbindexer.api import register_index
+    from django.contrib.auth.models import User
+
+    register_index(User, {
+        'username': 'iexact',
+        'email': 'iexact',
+    })

deferred/handler.py

-import os, sys
+# Initialize Django
+from djangoappengine.main.main import make_profileable
 
-parent_dir = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
-if parent_dir not in sys.path:
-    sys.path.insert(0, parent_dir)
+from google.appengine.ext.deferred.handler import main
 
-# Initialize Django
-from djangoappengine.main import main as gaemain
+main = make_profileable(main)
 
-# Import and run the actual handler
-from google.appengine.ext.deferred.handler import main
 if __name__ == '__main__':
     main()
         if message.bcc:
             gmsg.bcc = list(message.bcc)
         if message.attachments:
-            gmsg.attachments = [(a[0], a[1]) for a in message.attachments]
-        if isinstance(message, EmailMultiAlternatives):  # look for HTML
+            # Must be populated with (filename, filecontents) tuples
+            attachments = []
+            for attachment in message.attachments:
+                if isinstance(attachment, MIMEBase):
+                    attachments.append((attachment.get_filename(),
+                                        attachment.get_payload(decode=True)))
+                else:
+                    gmsg.attachments.append((a[0], a[1]))
+            gmsg.attachments = attachments
+        # Look for HTML alternative content
+        if isinstance(message, EmailMultiAlternatives):
             for content, mimetype in message.alternatives:
                 if mimetype == 'text/html':
                     gmsg.html = content
-import os, sys
+import os
+import sys
 
 # Add parent folder to sys.path, so we can import boot.
 # App Engine causes main.py to be reloaded if an exception gets raised
-# on the first request of a main.py instance, so don't add parent_dir multiple
+# on the first request of a main.py instance, so don't add project_dir multiple
 # times.
-parent_dir = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
-if parent_dir not in sys.path:
-    sys.path.insert(0, parent_dir)
+project_dir = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
+if project_dir not in sys.path or sys.path.index(project_dir) > 0:
+    while project_dir in sys.path:
+        sys.path.remove(project_dir)
+    sys.path.insert(0, project_dir)
 
-# Remove the standard version of Django
+for path in sys.path[:]:
+    if path != project_dir and os.path.isdir(os.path.join(path, 'django')):
+        sys.path.remove(path)
+        break
+
+# Remove the standard version of Django.
 if 'django' in sys.modules and sys.modules['django'].VERSION < (1, 2):
     for k in [k for k in sys.modules
-              if k.startswith('django\.') or k == 'django']:
+              if k.startswith('django.') or k == 'django']:
         del sys.modules[k]
 
-from djangoappengine import boot
-boot.setup_project()
-boot.setup_threading()
-boot.setup_logging()
+from djangoappengine.boot import setup_env, setup_logging, env_ext
+setup_env()
 
-import django.core.handlers.wsgi
-from google.appengine.ext.webapp import util
+from django.core.handlers.wsgi import WSGIHandler
+from google.appengine.ext.webapp.util import run_wsgi_app
 from django.conf import settings
 
 def log_traceback(*args, **kwargs):
         sys.path = path_backup[:]
     except:
         path_backup = sys.path[:]
-    os.environ.update(boot.env_ext)
-    boot.setup_logging()
+    os.environ.update(env_ext)
+    setup_logging()
 
-    # Create a Django application for WSGI.
-    application = django.core.handlers.wsgi.WSGIHandler()
+    # Create a Django application for WSGI
+    application = WSGIHandler()
+
+    # Add the staticfiles handler if necessary
+    if settings.DEBUG and 'django.contrib.staticfiles' in settings.INSTALLED_APPS:
+        from django.contrib.staticfiles.handlers import StaticFilesHandler
+        application = StaticFilesHandler(application)
 
     # Run the WSGI CGI handler with that application.
-    util.run_wsgi_app(application)
+    run_wsgi_app(application)
 
-def profile_main():
+def profile_main(func):
     import logging, cProfile, pstats, random, StringIO
     only_forced_profile = getattr(settings, 'ONLY_FORCED_PROFILE', False)
     profile_percentage = getattr(settings, 'PROFILE_PERCENTAGE', None)
                 'profile=forced' not in os.environ.get('QUERY_STRING')) or \
             (not only_forced_profile and profile_percentage and
                 float(profile_percentage) / 100.0 <= random.random()):
-        return real_main()
+        return func()
 
     prof = cProfile.Profile()
-    prof = prof.runctx('real_main()', globals(), locals())
+    prof = prof.runctx('func()', globals(), locals())
     stream = StringIO.StringIO()
     stats = pstats.Stats(prof, stream=stream)
     sort_by = getattr(settings, 'SORT_PROFILE_RESULTS_BY', 'time')
         stats.print_callers()
     logging.info('Profile data:\n%s', stream.getvalue())
 
-main = getattr(settings, 'ENABLE_PROFILER', False) and profile_main or real_main
+def make_profileable(func):
+    if getattr(settings, 'ENABLE_PROFILER', False):
+        return lambda: profile_main(func)
+    return func
+
+main = make_profileable(real_main)
 
 if __name__ == '__main__':
     main()

management/commands/deploy.py

 # CHANGED: show warning if profiler is enabled, so you don't mistakenly upload
 # with non-production settings. Also, added --nosyncdb switch.
 
-import sys
-import logging
-
+from ...boot import PROJECT_DIR
 from django.conf import settings
 from django.core.management import call_command
 from django.core.management.base import BaseCommand
+import logging
+import sys
+import time
+
 
 def run_appcfg(argv):
-    # import this so that we run through the checks at the beginning
-    # and report the appropriate errors
-    import appcfg
-
     # We don't really want to use that one though, it just executes this one
     from google.appengine.tools import appcfg
 
     # Reset the logging level to WARN as appcfg will spew tons of logs on INFO
     logging.getLogger().setLevel(logging.WARN)
-    
+
     new_args = argv[:]
     new_args[1] = 'update'
-    new_args.append('.')
+    new_args.append(PROJECT_DIR)
     syncdb = True
     if '--nosyncdb' in new_args:
         syncdb = False
     appcfg.main(new_args)
 
     if syncdb:
-        from django.core.management import call_command
-        from django.db import connection
-        connection.setup_remote()
         print 'Running syncdb.'
+        # Wait a little bit for deployment to finish
+        for countdown in range(9, 0, -1):
+            sys.stdout.write('%s\r' % countdown)
+            time.sleep(1)
+        from django.db import connections
+        for connection in connections.all():
+            if hasattr(connection, 'setup_remote'):
+                connection.setup_remote()
         call_command('syncdb', remote=True, interactive=True)
 
-    from django.conf import settings
     if getattr(settings, 'ENABLE_PROFILER', False):
         print '--------------------------\n' \
               'WARNING: PROFILER ENABLED!\n' \

management/commands/runserver.py

 
 
 import logging
-import os
 import sys
 
+from django.db import connections
+from ...boot import PROJECT_DIR
+from ...db.base import DatabaseWrapper
 from django.core.management.base import BaseCommand
-from django.db import connection
+from django.core.exceptions import ImproperlyConfigured
 
 
 def start_dev_appserver(argv):
             try:
                 addr, port = addrport.split(":")
             except ValueError:
-                addr, port = None, addrport
-            if not port.isdigit():
-                print "Error: '%s' is not a valid port number." % port
-                sys.exit(1)
+                addr = addrport
         else:
             args.append(argv[2])
         args.extend(argv[3:])
                      '--smtp_port', str(settings.EMAIL_PORT),
                      '--smtp_user', settings.EMAIL_HOST_USER,
                      '--smtp_password', settings.EMAIL_HOST_PASSWORD])
+
     # Pass the application specific datastore location to the server.
-    p = connection._get_paths()
-    if '--datastore_path' not in args:
-        args.extend(['--datastore_path', p[0]])
-    if '--blobstore_path' not in args:
-        args.extend(['--blobstore_path', p[1]])
-    if '--history_path' not in args:
-        args.extend(['--history_path', p[2]])
+    for name in connections:
+        connection = connections[name]
+        if isinstance(connection, DatabaseWrapper):
+            p = connection._get_paths()
+            if '--datastore_path' not in args:
+                args.extend(['--datastore_path', p[0]])
+            if '--blobstore_path' not in args:
+                args.extend(['--blobstore_path', p[1]])
+            if '--history_path' not in args:
+                args.extend(['--history_path', p[2]])
+            break
 
     # Reset logging level to INFO as dev_appserver will spew tons of debug logs
     logging.getLogger().setLevel(logging.INFO)
 
-    # Allow to run subprocesses
-    from google.appengine.tools import dev_appserver
-    try:
-        env = dev_appserver.DEFAULT_ENV
-        dev_appserver.DEFAULT_ENV = os.environ.copy()
-        dev_appserver.DEFAULT_ENV.update(env)
-    except AttributeError:
-        logging.warn('Could not patch the default environment. '
-                     'The subprocess module will not work correctly.')
-
     # Append the current working directory to the arguments.
-    dev_appserver_main.main([progname] + args + [os.getcwdu()])
-
+    dev_appserver_main.main([progname] + args + [PROJECT_DIR])
 
 class Command(BaseCommand):
     """Overrides the default Django runserver command.
+# Initialize App Engine SDK if necessary
+try:
+    from google.appengine.api import api_proxy_stub_map
+except ImportError:
+    from .boot import setup_env
+    setup_env()
+
 from djangoappengine.utils import on_production_server, have_appserver
 
 DEBUG = not on_production_server
 DATABASES = {
     'default': {
         'ENGINE': 'djangoappengine.db',
-        'NAME': '',
-        'USER': '',
-        'PASSWORD': '',
-        'HOST': '',
-        'PORT': '',
-        'SUPPORTS_TRANSACTIONS': False,
+
+        # Other settings which you might want to override in your settings.py
+
+        # Activates high-replication support for remote_api
+        # 'HIGH_REPLICATION': True,
+
+        # Switch to the App Engine for Business domain
+        # 'DOMAIN': 'googleplex.com',
     },
 }
 
+from setuptools import setup, find_packages
+
+DESCRIPTION = 'App Engine backends for Django-nonrel'
+LONG_DESCRIPTION = None
+try:
+    LONG_DESCRIPTION = open('README.rst').read()
+except:
+    pass
+
+setup(name='djangoappengine',
+      version='1.0',
+      package_dir={'djangoappengine': '.'},
+      packages=['djangoappengine'] + ['djangoappengine.' + name for name in find_packages()],
+      author='Waldemar Kornewald',
+      author_email='wkornewald@gmail.com',
+      url='http://www.allbuttonspressed.com/projects/djangoappengine',
+      description=DESCRIPTION,
+      long_description=LONG_DESCRIPTION,
+      platforms=['any'],
+      classifiers=[
+          'Development Status :: 5 - Production/Stable',
+          'Environment :: Web Environment',
+          'Framework :: Django',
+          'Intended Audience :: Developers',
+          'Operating System :: OS Independent',
+          'Programming Language :: Python',
+          'Topic :: Software Development :: Libraries :: Application Frameworks',
+          'Topic :: Software Development :: Libraries :: Python Modules',
+          'License :: OSI Approved :: BSD License',
+      ],
+)
     def write(self, content):
         raise NotImplementedError()
 
-    def close(self):
-        pass
+    @property
+    def file(self):
+        if not hasattr(self, '_file'):
+            self._file = BlobReader(self.blobstore_info.key())
+        return self._file
 
 class BlobstoreFileUploadHandler(FileUploadHandler):
     """
     """
     def __init__(self, blobinfo, charset):
         super(BlobstoreUploadedFile, self).__init__(
-            blobinfo, blobinfo.filename, blobinfo.content_type, blobinfo.size,
-            charset)
+            BlobReader(blobinfo.key()), blobinfo.filename,
+            blobinfo.content_type, blobinfo.size, charset)
         self.blobstore_info = blobinfo
 
     def open(self, mode=None):
         pass
 
-    def close(self):
-        pass
+    def chunks(self, chunk_size=1024*128):
+        self.file.seek(0)
+        while True:
+            content = self.read(chunk_size)
+            if not content:
+                break
+            yield content
 
-    def chunks(self, chunk_size=None):
-        self.file.seek(0)
-        yield self.read()
-
-    def multiple_chunks(self, chunk_size=None):
-        # Since it's in memory, we'll never have multiple chunks.
-        return False
+    def multiple_chunks(self, chunk_size=1024*128):
+        return True

tests/backend.py

File contents unchanged.

tests/field_options.py

 
         time = datetime.datetime.now().time()
         entity.time = time
-        try:
-            entity.save()
-        except:
-            self.fail()
+        entity.save()
 
-        # check if primary_key=True is set correct for the saved entity
+        # check if primary_key=True is set correctly for the saved entity
         self.assertEquals(entity.pk, u'app-engine@scholardocs.com')
         gae_entity = Get(Key.from_path(FieldsWithOptionsModel._meta.db_table,
             entity.pk))
         self.assertTrue(gae_entity is not None)
         self.assertEquals(gae_entity.key().name(), u'app-engine@scholardocs.com')
         
-        # check if default values are set correct on the db level, primary_key field
-        # is not stored at the db level
+        # check if default values are set correctly on the db level,
+        # primary_key field is not stored at the db level
         for field in FieldsWithOptionsModel._meta.local_fields:
             if field.default and field.default != NOT_PROVIDED and not \
                     field.primary_key:
             elif field.column == 'time':
                 self.assertEquals(gae_entity[field.column], datetime.datetime(
                     1970, 1, 1, time.hour, time.minute, time.second, time.microsecond))
-            elif field.null:
+            elif field.null and field.editable:
                 self.assertEquals(gae_entity[field.column], None)
 
         # check if default values are set correct on the model instance level
                 self.assertEquals(getattr(entity, field.column), field.default)
             elif field.column == 'time':
                 self.assertEquals(getattr(entity, field.column), time)
-            elif field.null:
+            elif field.null and field.editable:
                 self.assertEquals(getattr(entity, field.column), None)
 
         # check if nullable field with default values can be set to None
         # TODO: check db_column option
         # TODO: change the primary key and check if a new instance with the
         # changed primary key will be saved (not in this test class)
-        
-from .testmodels import FieldsWithOptionsModel, EmailModel, DateTimeModel, OrderedModel
-import datetime, time
-from django.test import TestCase
+from ..db.utils import get_cursor, set_cursor
+from .testmodels import FieldsWithOptionsModel, EmailModel, DateTimeModel, \
+    OrderedModel, BlobModel
+from django.db import models
 from django.db.models import Q
 from django.db.utils import DatabaseError
+from django.test import TestCase
+from django.utils import unittest
+from google.appengine.api.datastore import Get, Key
+import datetime
+import time
 
 class FilterTest(TestCase):
     floats = [5.3, 2.6, 9.1, 1.58]
                           email='rinnengan@sage.de').order_by('email')],
                           ['rinnengan@sage.de'])
 
-        # test using exact
-        self.assertEquals(FieldsWithOptionsModel.objects.filter(
-                          email__exact='rinnengan@sage.de')[0].email,
-                          'rinnengan@sage.de')
-
-        self.assertEquals(FieldsWithOptionsModel.objects.filter(
-                           pk='app-engine@scholardocs.com')[0].email,
-                          'app-engine@scholardocs.com')
-
     def test_is_null(self):
         self.assertEquals(FieldsWithOptionsModel.objects.filter(
             floating_point__isnull=True).count(), 0)
                             'email')[::2]],
                           ['app-engine@scholardocs.com', 'rinnengan@sage.de',])
 
+    def test_cursor(self):
+        results = list(FieldsWithOptionsModel.objects.all())
+        cursor = None
+        for item in results:
+            query = FieldsWithOptionsModel.objects.all()[:1]
+            if cursor is not None:
+                query = set_cursor(query, cursor)
+            next = query[0]
+            self.assertEqual(next.pk, item.pk)
+            cursor = get_cursor(query)
+        query = set_cursor(FieldsWithOptionsModel.objects.all(), cursor)
+        self.assertEqual(list(query[:1]), [])
+
     def test_Q_objects(self):
         self.assertEquals([entity.email for entity in
                           FieldsWithOptionsModel.objects.filter(
                                       'rasengan@naruto.com'])],
                           ['app-engine@scholardocs.com', 'rasengan@naruto.com'])
 
+    def test_in_with_order_by(self):
+        class Post(models.Model):
+            writer = models.IntegerField()
+            order = models.IntegerField()
+        Post(writer=1, order=1).save()
+        Post(writer=1, order=2).save()
+        Post(writer=1, order=3).save()
+        Post(writer=2, order=4).save()
+        Post(writer=2, order=5).save()
+        posts = Post.objects.filter(writer__in= [1,2]).order_by('order')
+        orders = [post.order for post in posts]
+        self.assertEqual(orders, range(1, 6))
+        posts = Post.objects.filter(writer__in= [1,2]).order_by('-order')
+        orders = [post.order for post in posts]
+        self.assertEqual(orders, range(5, 0, -1))
+
     def test_inequality(self):
         self.assertEquals([entity.email for entity in
                            FieldsWithOptionsModel.objects.exclude(
     def test_latest(self):
         self.assertEquals(FieldsWithOptionsModel.objects.latest('time').floating_point,
                             1.58)
+
+    def test_blob(self):
+        x = BlobModel(data='lalala')
+        x.full_clean()
+        x.save()
+        e = Get(Key.from_path(BlobModel._meta.db_table, x.pk))
+        self.assertEqual(e['data'], x.data)
+        x = BlobModel.objects.all()[0]
+        self.assertEqual(e['data'], x.data)

tests/testmodels.py

 from django.db import models
 from ..db.db_settings import get_indexes
+from djangotoolbox.fields import BlobField
 
 class EmailModel(models.Model):
     email = models.EmailField()
     floating_point = models.FloatField()
     boolean = models.BooleanField()
     null_boolean = models.NullBooleanField()
-    text = models.CharField(max_length=3)
+    text = models.CharField(max_length=32)
     email = models.EmailField()
     comma_seperated_integer = models.CommaSeparatedIntegerField(max_length=10)
     ip_address = models.IPAddressField()
     class Meta:
         ordering = ('-priority',)
 
+class BlobModel(models.Model):
+    data = BlobField()
+
 class DecimalModel(models.Model):
     decimal = models.DecimalField(max_digits=9, decimal_places=2)
 
+from django.conf import settings
+from django.http import HttpResponse
+from django.utils.importlib import import_module
+
+def warmup(request):
+    """
+    Provides default procedure for handling warmup requests on App Engine.
+    Just add this view to your main urls.py.
+    """
+    for app in settings.INSTALLED_APPS:
+        for name in ('urls', 'views'):
+            try:
+                import_module('%s.%s' % (app, name))
+            except ImportError:
+                pass
+    content_type = 'text/plain; charset=%s' % settings.DEFAULT_CHARSET
+    return HttpResponse('Warmup done', content_type=content_type)