Commits

Anonymous committed 90be51c

Reorganising south into a subdirectory, as per #88. Compatability module hack included.

Comments (0)

Files changed (57)

 """
-South - Useable migrations for Django apps
+Temporary South module while we move directory structure.
 """
-
-__version__ = "0.4"
-__authors__ = ["Andrew Godwin <andrew@aeracode.org>", "Andy McCurdy <andy@andymccurdy.com>"]
+import sys, os
+sys.path.insert(0, os.path.dirname(__file__))
+print >> sys.stderr, "Depreciation warning: South has now moved to the south/ subdirectory. You will need to reconfigure your svn:external or library paths in your application."
+del sys.modules['south']
+import south

db/__init__.py

-
-# Establish the common DatabaseOperations instance, which we call 'db'.
-# This code somewhat lifted from django evolution
-from django.conf import settings
-import sys
-module_name = ['south.db', settings.DATABASE_ENGINE]
-try:
-    module = __import__('.'.join(module_name),{},{},[''])
-except ImportError:
-    sys.stderr.write("There is no South database module for the engine '%s'. Please either choose a supported one, or remove South from INSTALLED_APPS.\n" % settings.DATABASE_ENGINE)
-    sys.exit(1)
-db = module.DatabaseOperations()

db/generic.py

-
-import datetime
-import re
-
-from django.core.management.color import no_style
-from django.db import connection, transaction, models
-from django.db.backends.util import truncate_name
-from django.db.models.fields import NOT_PROVIDED
-from django.dispatch import dispatcher
-from django.conf import settings
-
-
-def alias(attrname):
-    """
-    Returns a function which calls 'attrname' - for function aliasing.
-    We can't just use foo = bar, as this breaks subclassing.
-    """
-    def func(self, *args, **kwds):
-        return getattr(self, attrname)(*args, **kwds)
-    return func
-
-
-class DatabaseOperations(object):
-
-    """
-    Generic SQL implementation of the DatabaseOperations.
-    Some of this code comes from Django Evolution.
-    """
-
-    # We assume the generic DB can handle DDL transactions. MySQL wil change this.
-    has_ddl_transactions = True
-
-    def __init__(self):
-        self.debug = False
-        self.deferred_sql = []
-        self.dry_run = False
-        self.pending_create_signals = []
-
-    def execute(self, sql, params=[]):
-        """
-        Executes the given SQL statement, with optional parameters.
-        If the instance's debug attribute is True, prints out what it executes.
-        """
-        cursor = connection.cursor()
-        if self.debug:
-            print "   = %s" % sql, params
-
-        if self.dry_run:
-            return []
-
-        cursor.execute(sql, params)
-        try:
-            return cursor.fetchall()
-        except:
-            return []
-    
-    
-    def execute_many(self, sql, regex=r"(?mx) ([^';]* (?:'[^']*'[^';]*)*)", comment_regex=r"(?mx) (?:^\s*$)|(?:--.*$)"):
-        """
-        Takes a SQL file and executes it as many separate statements.
-        (Some backends, such as Postgres, don't work otherwise.)
-        """
-        # Be warned: This function is full of dark magic. Make sure you really
-        # know regexes before trying to edit it.
-        # First, strip comments
-        sql = "\n".join([x.strip().replace("%", "%%") for x in re.split(comment_regex, sql) if x.strip()])
-        # Now execute each statement
-        for st in re.split(regex, sql)[1:][::2]:
-            self.execute(st)
-
-    def add_deferred_sql(self, sql):
-        """
-        Add a SQL statement to the deferred list, that won't be executed until
-        this instance's execute_deferred_sql method is run.
-        """
-        self.deferred_sql.append(sql)
-
-
-    def execute_deferred_sql(self):
-        """
-        Executes all deferred SQL, resetting the deferred_sql list
-        """
-        for sql in self.deferred_sql:
-            self.execute(sql)
-
-        self.deferred_sql = []
-
-
-    def clear_deferred_sql(self):
-        """
-        Resets the deferred_sql list to empty.
-        """
-        self.deferred_sql = []
-    
-    
-    def clear_run_data(self, pending_creates = None):
-        """
-        Resets variables to how they should be before a run. Used for dry runs.
-        If you want, pass in an old panding_creates to reset to.
-        """
-        self.clear_deferred_sql()
-        self.pending_create_signals = pending_creates or []
-    
-    
-    def get_pending_creates(self):
-        return self.pending_create_signals
-
-
-    def create_table(self, table_name, fields):
-        """
-        Creates the table 'table_name'. 'fields' is a tuple of fields,
-        each repsented by a 2-part tuple of field name and a
-        django.db.models.fields.Field object
-        """
-        qn = connection.ops.quote_name
-
-        # allow fields to be a dictionary
-        # removed for now - philosophical reasons (this is almost certainly not what you want)
-        #try:
-        #    fields = fields.items()
-        #except AttributeError:
-        #    pass
-
-        columns = [
-            self.column_sql(table_name, field_name, field)
-            for field_name, field in fields
-        ]
-
-        self.execute('CREATE TABLE %s (%s);' % (qn(table_name), ', '.join([col for col in columns if col])))
-
-    add_table = alias('create_table') # Alias for consistency's sake
-
-
-    def rename_table(self, old_table_name, table_name):
-        """
-        Renames the table 'old_table_name' to 'table_name'.
-        """
-        if old_table_name == table_name:
-            # No Operation
-            return
-        qn = connection.ops.quote_name
-        params = (qn(old_table_name), qn(table_name))
-        self.execute('ALTER TABLE %s RENAME TO %s;' % params)
-
-
-    def delete_table(self, table_name, cascade=True):
-        """
-        Deletes the table 'table_name'.
-        """
-        qn = connection.ops.quote_name
-        params = (qn(table_name), )
-        if cascade:
-            self.execute('DROP TABLE %s CASCADE;' % params)
-        else:
-            self.execute('DROP TABLE %s;' % params)
-
-    drop_table = alias('delete_table')
-
-
-    def clear_table(self, table_name):
-        """
-        Deletes all rows from 'table_name'.
-        """
-        qn = connection.ops.quote_name
-        params = (qn(table_name), )
-        self.execute('DELETE FROM %s;' % params)
-
-    add_column_string = 'ALTER TABLE %s ADD COLUMN %s;'
-
-    def add_column(self, table_name, name, field, keep_default=True):
-        """
-        Adds the column 'name' to the table 'table_name'.
-        Uses the 'field' paramater, a django.db.models.fields.Field instance,
-        to generate the necessary sql
-
-        @param table_name: The name of the table to add the column to
-        @param name: The name of the column to add
-        @param field: The field to use
-        """
-        qn = connection.ops.quote_name
-        sql = self.column_sql(table_name, name, field)
-        if sql:
-            params = (
-                qn(table_name),
-                sql,
-            )
-            sql = self.add_column_string % params
-            self.execute(sql)
-
-            # Now, drop the default if we need to
-            if not keep_default and field.default:
-                field.default = NOT_PROVIDED
-                self.alter_column(table_name, name, field, explicit_name=False)
-
-    alter_string_set_type = 'ALTER COLUMN %(column)s TYPE %(type)s'
-    alter_string_set_null = 'ALTER COLUMN %(column)s DROP NOT NULL'
-    alter_string_drop_null = 'ALTER COLUMN %(column)s SET NOT NULL'
-    allows_combined_alters = True
-
-    def alter_column(self, table_name, name, field, explicit_name=True):
-        """
-        Alters the given column name so it will match the given field.
-        Note that conversion between the two by the database must be possible.
-        Will not automatically add _id by default; to have this behavour, pass
-        explicit_name=False.
-
-        @param table_name: The name of the table to add the column to
-        @param name: The name of the column to alter
-        @param field: The new field definition to use
-        """
-
-        # hook for the field to do any resolution prior to it's attributes being queried
-        if hasattr(field, 'south_init'):
-            field.south_init()
-
-        qn = connection.ops.quote_name
-        
-        # Add _id or whatever if we need to
-        if not explicit_name:
-            field.set_attributes_from_name(name)
-            name = field.column
-
-        # First, change the type
-        params = {
-            "column": qn(name),
-            "type": field.db_type(),
-        }
-
-        # SQLs is a list of (SQL, values) pairs.
-        sqls = [(self.alter_string_set_type % params, [])]
-
-        # Next, set any default
-        if not field.null and field.has_default():
-            default = field.get_default()
-            sqls.append(('ALTER COLUMN %s SET DEFAULT %%s ' % (qn(name),), [default]))
-        else:
-            sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (qn(name),), []))
-
-
-        # Next, nullity
-        params = {
-            "column": qn(name),
-            "type": field.db_type(),
-        }
-        if field.null:
-            sqls.append((self.alter_string_set_null % params, []))
-        else:
-            sqls.append((self.alter_string_drop_null % params, []))
-
-
-        # TODO: Unique
-
-        if self.allows_combined_alters:
-            sqls, values = zip(*sqls)
-            self.execute(
-                "ALTER TABLE %s %s;" % (qn(table_name), ", ".join(sqls)),
-                flatten(values),
-            )
-        else:
-            # Databases like e.g. MySQL don't like more than one alter at once.
-            for sql, values in sqls:
-                self.execute("ALTER TABLE %s %s;" % (qn(table_name), sql), values)
-
-
-    def column_sql(self, table_name, field_name, field, tablespace=''):
-        """
-        Creates the SQL snippet for a column. Used by add_column and add_table.
-        """
-        qn = connection.ops.quote_name
-
-        field.set_attributes_from_name(field_name)
-
-        # hook for the field to do any resolution prior to it's attributes being queried
-        if hasattr(field, 'south_init'):
-            field.south_init()
-
-        sql = field.db_type()
-        if sql:        
-            field_output = [qn(field.column), sql]
-            field_output.append('%sNULL' % (not field.null and 'NOT ' or ''))
-            if field.primary_key:
-                field_output.append('PRIMARY KEY')
-            elif field.unique:
-                # Instead of using UNIQUE, add a unique index with a predictable name
-                self.add_deferred_sql(
-                    self.create_index_sql(
-                        table_name,
-                        [field.column],
-                        unique = True,
-                        db_tablespace = tablespace,
-                    )
-                )
-
-            tablespace = field.db_tablespace or tablespace
-            if tablespace and connection.features.supports_tablespaces and field.unique:
-                # We must specify the index tablespace inline, because we
-                # won't be generating a CREATE INDEX statement for this field.
-                field_output.append(connection.ops.tablespace_sql(tablespace, inline=True))
-
-            sql = ' '.join(field_output)
-            sqlparams = ()
-            # if the field is "NOT NULL" and a default value is provided, create the column with it
-            # this allows the addition of a NOT NULL field to a table with existing rows
-            if not field.null and field.has_default():
-                default = field.get_default()
-                # If the default is actually None, don't add a default term
-                if default is not None:
-                    # If the default is a callable, then call it!
-                    if callable(default):
-                        default = default()
-                    # Now do some very cheap quoting. TODO: Redesign return values to avoid this.
-                    if isinstance(default, basestring):
-                        default = "'%s'" % default.replace("'", "''")
-                    elif isinstance(default, datetime.date):
-                        default = "'%s'" % default
-                    sql += " DEFAULT %s"
-                    sqlparams = (default)
-
-            if field.rel and self.supports_foreign_keys:
-                self.add_deferred_sql(
-                    self.foreign_key_sql(
-                        table_name,
-                        field.column,
-                        field.rel.to._meta.db_table,
-                        field.rel.to._meta.get_field(field.rel.field_name).column
-                    )
-                )
-
-            if field.db_index and not field.unique:
-                self.add_deferred_sql(self.create_index_sql(table_name, [field.column]))
-
-        if hasattr(field, 'post_create_sql'):
-            style = no_style()
-            for stmt in field.post_create_sql(style, table_name):
-                self.add_deferred_sql(stmt)
-
-        if sql:
-            return sql % sqlparams
-        else:
-            return None
-
-
-    supports_foreign_keys = True
-
-    def foreign_key_sql(self, from_table_name, from_column_name, to_table_name, to_column_name):
-        """
-        Generates a full SQL statement to add a foreign key constraint
-        """
-        qn = connection.ops.quote_name
-        constraint_name = '%s_refs_%s_%x' % (from_column_name, to_column_name, abs(hash((from_table_name, to_table_name))))
-        return 'ALTER TABLE %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % (
-            qn(from_table_name),
-            qn(truncate_name(constraint_name, connection.ops.max_name_length())),
-            qn(from_column_name),
-            qn(to_table_name),
-            qn(to_column_name),
-            connection.ops.deferrable_sql() # Django knows this
-        )
-
-
-    max_index_name_length = 63
-    
-    def create_index_name(self, table_name, column_names):
-        """
-        Generate a unique name for the index
-        """
-        index_unique_name = ''
-        if len(column_names) > 1:
-            index_unique_name = '_%x' % abs(hash((table_name, ','.join(column_names))))
-        
-        return ('%s_%s%s' % (table_name, column_names[0], index_unique_name))[:self.max_index_name_length]
-
-
-    def create_index_sql(self, table_name, column_names, unique=False, db_tablespace=''):
-        """
-        Generates a create index statement on 'table_name' for a list of 'column_names'
-        """
-        qn = connection.ops.quote_name
-        if not column_names:
-            print "No column names supplied on which to create an index"
-            return ''
-
-        if db_tablespace and connection.features.supports_tablespaces:
-            tablespace_sql = ' ' + connection.ops.tablespace_sql(db_tablespace)
-        else:
-            tablespace_sql = ''
-
-        index_name = self.create_index_name(table_name, column_names)
-        qn = connection.ops.quote_name
-        return 'CREATE %sINDEX %s ON %s (%s)%s;' % (
-            unique and 'UNIQUE ' or '',
-            qn(index_name),
-            qn(table_name),
-            ','.join([qn(field) for field in column_names]),
-            tablespace_sql
-        )
-
-    def create_index(self, table_name, column_names, unique=False, db_tablespace=''):
-        """ Executes a create index statement """
-        sql = self.create_index_sql(table_name, column_names, unique, db_tablespace)
-        self.execute(sql)
-
-
-    drop_index_string = 'DROP INDEX %(index_name)s'
-
-    def delete_index(self, table_name, column_names, db_tablespace=''):
-        """
-        Deletes an index created with create_index.
-        This is possible using only columns due to the deterministic
-        index naming function which relies on column names.
-        """
-        if isinstance(column_names, (str, unicode)):
-            column_names = [column_names]
-        name = self.create_index_name(table_name, column_names)
-        qn = connection.ops.quote_name
-        sql = self.drop_index_string % {"index_name": qn(name), "table_name": qn(table_name)}
-        self.execute(sql)
-
-    drop_index = alias('delete_index')
-
-    delete_column_string = 'ALTER TABLE %s DROP COLUMN %s CASCADE;'
-
-    def delete_column(self, table_name, name):
-        """
-        Deletes the column 'column_name' from the table 'table_name'.
-        """
-        qn = connection.ops.quote_name
-        params = (qn(table_name), qn(name))
-        self.execute(self.delete_column_string % params, [])
-
-    drop_column = alias('delete_column')
-
-
-    def rename_column(self, table_name, old, new):
-        """
-        Renames the column 'old' from the table 'table_name' to 'new'.
-        """
-        raise NotImplementedError("rename_column has no generic SQL syntax")
-
-    
-    drop_primary_key_string = "ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s"
-    
-    def drop_primary_key(self, table_name):
-        """
-        Drops the old primary key.
-        """
-        qn = connection.ops.quote_name
-        self.execute(self.drop_primary_key_string % {
-            "table": qn(table_name),
-            "constraint": qn(table_name+"_pkey"),
-        })
-
-    delete_primary_key = alias('drop_primary_key')
-
-
-    create_primary_key_string = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)"
-    
-    def create_primary_key(self, table_name, columns):
-        """
-        Creates a new primary key on the specified columns.
-        """
-        if not isinstance(columns, (list, tuple)):
-            columns = [columns]
-        qn = connection.ops.quote_name
-        self.execute(self.create_primary_key_string % {
-            "table": qn(table_name),
-            "constraint": qn(table_name+"_pkey"),
-            "columns": ", ".join(map(qn, columns)),
-        })
-    
-    
-    def start_transaction(self):
-        """
-        Makes sure the following commands are inside a transaction.
-        Must be followed by a (commit|rollback)_transaction call.
-        """
-        if self.dry_run:
-            return
-        transaction.commit_unless_managed()
-        transaction.enter_transaction_management()
-        transaction.managed(True)
-
-
-    def commit_transaction(self):
-        """
-        Commits the current transaction.
-        Must be preceded by a start_transaction call.
-        """
-        if self.dry_run:
-            return
-        transaction.commit()
-        transaction.leave_transaction_management()
-
-
-    def rollback_transaction(self):
-        """
-        Rolls back the current transaction.
-        Must be preceded by a start_transaction call.
-        """
-        if self.dry_run:
-            return
-        transaction.rollback()
-        transaction.leave_transaction_management()
-
-
-    def send_create_signal(self, app_label, model_names):
-        self.pending_create_signals.append((app_label, model_names))
-
-
-    def send_pending_create_signals(self):
-        for (app_label, model_names) in self.pending_create_signals:
-            self.really_send_create_signal(app_label, model_names)
-        self.pending_create_signals = []
-
-
-    def really_send_create_signal(self, app_label, model_names):
-        """
-        Sends a post_syncdb signal for the model specified.
-
-        If the model is not found (perhaps it's been deleted?),
-        no signal is sent.
-
-        TODO: The behavior of django.contrib.* apps seems flawed in that
-        they don't respect created_models.  Rather, they blindly execute
-        over all models within the app sending the signal.  This is a
-        patch we should push Django to make  For now, this should work.
-        """
-        if self.debug:
-            print " - Sending post_syncdb signal for %s: %s" % (app_label, model_names)
-        app = models.get_app(app_label)
-        if not app:
-            return
-
-        created_models = []
-        for model_name in model_names:
-            model = models.get_model(app_label, model_name)
-            if model:
-                created_models.append(model)
-
-        if created_models:
-            # syncdb defaults -- perhaps take these as options?
-            verbosity = 1
-            interactive = True
-
-            if hasattr(dispatcher, "send"):
-                dispatcher.send(signal=models.signals.post_syncdb, sender=app,
-                                app=app, created_models=created_models,
-                                verbosity=verbosity, interactive=interactive)
-            else:
-                models.signals.post_syncdb.send(sender=app,
-                                                app=app, created_models=created_models,
-                                                verbosity=verbosity, interactive=interactive)
-
-    def mock_model(self, model_name, db_table, db_tablespace='', 
-                   pk_field_name='id', pk_field_type=models.AutoField,
-                   pk_field_args=[], pk_field_kwargs={}):
-        """
-        Generates a MockModel class that provides enough information
-        to be used by a foreign key/many-to-many relationship.
-
-        Migrations should prefer to use these rather than actual models
-        as models could get deleted over time, but these can remain in
-        migration files forever.
-        """
-        class MockOptions(object):
-            def __init__(self):
-                self.db_table = db_table
-                self.db_tablespace = db_tablespace or settings.DEFAULT_TABLESPACE
-                self.object_name = model_name
-                self.module_name = model_name.lower()
-
-                if pk_field_type == models.AutoField:
-                    pk_field_kwargs['primary_key'] = True
-
-                self.pk = pk_field_type(*pk_field_args, **pk_field_kwargs)
-                self.pk.set_attributes_from_name(pk_field_name)
-                self.abstract = False
-
-            def get_field_by_name(self, field_name):
-                # we only care about the pk field
-                return (self.pk, self.model, True, False)
-
-            def get_field(self, name):
-                # we only care about the pk field
-                return self.pk
-
-        class MockModel(object):
-            _meta = None
-
-        # We need to return an actual class object here, not an instance
-        MockModel._meta = MockOptions()
-        MockModel._meta.model = MockModel
-        return MockModel
-
-# Single-level flattening of lists
-def flatten(ls):
-    nl = []
-    for l in ls:
-        nl += l
-    return nl
-

db/mysql.py

-
-from django.db import connection
-from django.conf import settings
-from south.db import generic
-
-class DatabaseOperations(generic.DatabaseOperations):
-
-    """
-    MySQL implementation of database operations.
-    """
-    
-    alter_string_set_type = ''
-    alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;'
-    alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;'
-    drop_index_string = 'DROP INDEX %(index_name)s ON %(table_name)s'
-    drop_primary_key_string = "ALTER TABLE %(table)s DROP PRIMARY KEY"
-    allows_combined_alters = False
-    has_ddl_transactions = False
-    
-    def execute(self, sql, params=[]):
-        if hasattr(settings, "DATABASE_STORAGE_ENGINE") and \
-           settings.DATABASE_STORAGE_ENGINE:
-            generic.DatabaseOperations.execute(self, "SET storage_engine=%s;" %
-                settings.DATABASE_STORAGE_ENGINE)
-        return generic.DatabaseOperations.execute(self, sql, params)
-    execute.__doc__ = generic.DatabaseOperations.execute.__doc__
-
-    def rename_column(self, table_name, old, new):
-        if old == new or self.dry_run:
-            return []
-        
-        qn = connection.ops.quote_name
-        
-        rows = [x for x in self.execute('DESCRIBE %s' % (qn(table_name),)) if x[0] == old]
-        
-        if not rows:
-            raise ValueError("No column '%s' in '%s'." % (old, table_name))
-        
-        params = (
-            qn(table_name),
-            qn(old),
-            qn(new),
-            rows[0][1],
-            rows[0][2] == "YES" and "NULL" or "NOT NULL",
-            rows[0][3] == "PRI" and "PRIMARY KEY" or "",
-            rows[0][4] and "DEFAULT " or "",
-            rows[0][4] and "%s" or "",
-            rows[0][5] or "",
-        )
-        
-        sql = 'ALTER TABLE %s CHANGE COLUMN %s %s %s %s %s %s %s %s;' % params
-        
-        if rows[0][4]:
-            self.execute(sql, (rows[0][4],))
-        else:
-            self.execute(sql)
-    
-    def delete_column(self, table_name, name):
-        qn = connection.ops.quote_name
-        db_name = settings.DATABASE_NAME
-        
-        # See if there is a foreign key on this column
-        cursor = connection.cursor()
-        get_fkeyname_query = "SELECT tc.constraint_name FROM \
-                              information_schema.table_constraints tc, \
-                              information_schema.key_column_usage kcu \
-                              WHERE tc.table_name=kcu.table_name \
-                              AND tc.table_schema=kcu.table_schema \
-                              AND tc.constraint_name=kcu.constraint_name \
-                              AND tc.constraint_type='FOREIGN KEY' \
-                              AND tc.table_schema='%s' \
-                              AND tc.table_name='%s' \
-                              AND kcu.column_name='%s'"
-
-        result = cursor.execute(get_fkeyname_query % (db_name, table_name, name))
-        
-        # if a foreign key exists, we need to delete it first
-        if result > 0:
-            assert result == 1 #we should only have one result
-            fkey_name = cursor.fetchone()[0]
-            drop_query = "ALTER TABLE %s DROP FOREIGN KEY %s"
-            cursor.execute(drop_query % (qn(table_name), qn(fkey_name)))
-
-        super(DatabaseOperations, self).delete_column(table_name, name)
-
-    
-    def rename_table(self, old_table_name, table_name):
-        """
-        Renames the table 'old_table_name' to 'table_name'.
-        """
-        if old_table_name == table_name:
-            # No Operation
-            return
-        qn = connection.ops.quote_name
-        params = (qn(old_table_name), qn(table_name))
-        self.execute('RENAME TABLE %s TO %s;' % params)

db/postgresql_psycopg2.py

-
-from django.db import connection
-from south.db import generic
-
-class DatabaseOperations(generic.DatabaseOperations):
-
-    """
-    PsycoPG2 implementation of database operations.
-    """
-
-    def rename_column(self, table_name, old, new):
-        if old == new:
-            return []
-        qn = connection.ops.quote_name
-        params = (qn(table_name), qn(old), qn(new))
-        self.execute('ALTER TABLE %s RENAME COLUMN %s TO %s;' % params)
-    
-    def rename_table(self, old_table_name, table_name):
-        "will rename the table and an associated ID sequence and primary key index"
-        # First, rename the table
-        generic.DatabaseOperations.rename_table(self, old_table_name, table_name)
-        # Then, try renaming the ID sequence
-        # (if you're using other AutoFields... your problem, unfortunately)
-        self.commit_transaction()
-        self.start_transaction()
-        try:
-            generic.DatabaseOperations.rename_table(self, old_table_name+"_id_seq", table_name+"_id_seq")
-        except:
-            if self.debug:
-                print "   ~ No such sequence (ignoring error)"
-            self.rollback_transaction()
-        else:
-            self.commit_transaction()
-        self.start_transaction()
-
-        # Rename primary key index, will not rename other indices on
-        # the table that are used by django (e.g. foreign keys). Until
-        # figure out how, you need to do this yourself.
-        try:
-            generic.DatabaseOperations.rename_table(self, old_table_name+"_pkey", table_name+ "_pkey")
-        except:
-            if self.debug:
-                print "   ~ No such primary key (ignoring error)"
-            self.rollback_transaction()
-        else:
-            self.commit_transaction()
-        self.start_transaction()
-
-
-    def rename_index(self, old_index_name, index_name):
-        "Rename an index individually"
-        generic.DatabaseOperations.rename_table(self, old_index_name, index_name)

db/sql_server/__init__.py

Empty file removed.

db/sql_server/pyodbc.py

-from django.db import connection
-from django.db.models.fields import *
-from south.db import generic
-
-class DatabaseOperations(generic.DatabaseOperations):
-    """
-    django-pyodbc (sql_server.pyodbc) implementation of database operations.
-    """
-    
-    add_column_string = 'ALTER TABLE %s ADD %s;'
-    alter_string_set_type = 'ALTER COLUMN %(column)s %(type)s'
-    allows_combined_alters = False
-    delete_column_string = 'ALTER TABLE %s DROP COLUMN %s;'
-
-    def create_table(self, table_name, fields):
-        # Tweak stuff as needed
-        for name,f in fields:
-            if isinstance(f, BooleanField):
-                if f.default == True:
-                    f.default = 1
-                if f.default == False:
-                    f.default = 0
-
-        # Run
-        generic.DatabaseOperations.create_table(self, table_name, fields)

db/sqlite3.py

-
-from django.db import connection
-from south.db import generic
-
-class DatabaseOperations(generic.DatabaseOperations):
-
-    """
-    SQLite3 implementation of database operations.
-    """
-
-    # SQLite ignores foreign key constraints. I wish I could.
-    supports_foreign_keys = False
-    
-    # You can't add UNIQUE columns with an ALTER TABLE.
-    def add_column(self, table_name, name, field, *args, **kwds):
-        # Run ALTER TABLE with no unique column
-        unique, field._unique, field.db_index = field.unique, False, False
-        generic.DatabaseOperations.add_column(self, table_name, name, field, *args, **kwds)
-        # If it _was_ unique, make an index on it.
-        if unique:
-            self.create_index(table_name, [name], unique=True)
-    
-    # SQLite doesn't have ALTER COLUMN
-    def alter_column(self, table_name, name, field, explicit_name=True):
-        """
-        Not supported under SQLite.
-        """
-        raise NotImplementedError("SQLite does not support altering columns.")
-    
-    # Nor DROP COLUMN
-    def delete_column(self, table_name, name):
-        """
-        Not supported under SQLite.
-        """
-        raise NotImplementedError("SQLite does not support deleting columns.")
-    
-    # Nor RENAME COLUMN
-    def rename_column(self, table_name, old, new):
-        """
-        Not supported under SQLite.
-        """
-        raise NotImplementedError("SQLite does not support renaming columns.")
-    
-    # No cascades on deletes
-    def delete_table(self, table_name, cascade=True):
-        generic.DatabaseOperations.delete_table(self, table_name, False)
-    

management/__init__.py

Empty file removed.

management/commands/__init__.py

Empty file removed.

management/commands/migrate.py

-from django.core.management.base import BaseCommand
-from django.core.management.color import no_style
-from django.conf import settings
-from django.db import models
-from optparse import make_option
-from south import migration
-import sys
-
-class Command(BaseCommand):
-    option_list = BaseCommand.option_list + (
-        make_option('--list', action='store_true', dest='list', default=False,
-            help='List migrations noting those that have been applied'),
-        make_option('--skip', action='store_true', dest='skip', default=False,
-            help='Will skip over out-of-order missing migrations'),
-        make_option('--merge', action='store_true', dest='merge', default=False,
-            help='Will run out-of-order missing migrations as they are - no rollbacks.'),
-        make_option('--only', action='store_true', dest='only', default=False,
-            help='Only runs or rolls back the migration specified, and none around it.'),
-        make_option('--fake', action='store_true', dest='fake', default=False,
-            help="Pretends to do the migrations, but doesn't actually execute them."),
-        make_option('--db-dry-run', action='store_true', dest='db_dry_run', default=False,
-            help="Doesn't execute the SQL generated by the db methods, and doesn't store a record that the migration(s) occurred. Useful to test migrations before applying them."),
-    )
-    if '--verbosity' not in [opt.get_opt_string() for opt in BaseCommand.option_list]:
-        option_list += (
-            make_option('--verbosity', action='store', dest='verbosity', default='1',
-            type='choice', choices=['0', '1', '2'],
-            help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
-        )
-    help = "Runs migrations for all apps."
-
-    def handle(self, app=None, target=None, skip=False, merge=False, only=False, backwards=False, fake=False, db_dry_run=False, list=False, **options):
-
-        # Work out what the resolve mode is
-        resolve_mode = merge and "merge" or (skip and "skip" or None)
-        # Turn on db debugging
-        from south.db import db
-        db.debug = True
-        
-        # NOTE: THIS IS DUPLICATED FROM django.core.management.commands.syncdb
-        # This code imports any module named 'management' in INSTALLED_APPS.
-        # The 'management' module is the preferred way of listening to post_syncdb
-        # signals, and since we're sending those out with create_table migrations,
-        # we need apps to behave correctly.
-        for app_name in settings.INSTALLED_APPS:
-            try:
-                __import__(app_name + '.management', {}, {}, [''])
-            except ImportError, exc:
-                msg = exc.args[0]
-                if not msg.startswith('No module named') or 'management' not in msg:
-                    raise
-        # END DJANGO DUPE CODE
-        
-        # Migrate each app
-        if app:
-            apps = [migration.get_app(app)]
-        else:
-            apps = migration.get_migrated_apps()
-        silent = options.get('verbosity', 0) == 0
-        
-        if list and apps:
-            list_migrations(apps)
-        
-        if not list:
-            for app in apps:
-                result = migration.migrate_app(
-                    app,
-                    resolve_mode = resolve_mode,
-                    target_name = target,
-                    fake = fake,
-                    db_dry_run = db_dry_run,
-                    silent = silent,
-                    load_inital_data = True,
-                )
-                if result is False:
-                    return
-
-
-def list_migrations(apps):
-    from south.models import MigrationHistory
-    apps = list(apps)
-    names = [migration.get_app_name(app) for app in apps]
-    applied_migrations = MigrationHistory.objects.filter(app_name__in=names)
-    applied_migrations = ['%s.%s' % (mi.app_name,mi.migration) for mi in applied_migrations]
-
-    print
-    for app in apps:
-        print migration.get_app_name(app)
-        all_migrations = migration.get_migration_names(app)
-        for migration_name in all_migrations:
-            long_form = '%s.%s' % (migration.get_app_name(app),migration_name)
-            if long_form in applied_migrations:
-                print format_migration_list_item(migration_name)
-            else:
-                print format_migration_list_item(migration_name, applied=False)
-        print
-
-
-def format_migration_list_item(name, applied=True):
-    if applied:
-        return '   * %s' % name
-    return '     %s' % name

management/commands/startmigration.py

-"""
-Startmigration command, version 2.
-"""
-
-import sys
-import os
-import re
-import string
-import random
-import inspect
-import parser
-from optparse import make_option
-
-from django.core.management.base import BaseCommand
-from django.core.management.color import no_style
-from django.db import models
-from django.db.models.fields.related import RECURSIVE_RELATIONSHIP_CONSTANT
-from django.contrib.contenttypes.generic import GenericRelation
-from django.db.models.fields import FieldDoesNotExist
-from django.conf import settings
-
-try:
-    set
-except NameError:
-    from sets import Set as set
-
-from south import migration, modelsparser
-
-
-class Command(BaseCommand):
-    option_list = BaseCommand.option_list + (
-        make_option('--model', action='append', dest='added_model_list', type='string',
-            help='Generate a Create Table migration for the specified model.  Add multiple models to this migration with subsequent --model parameters.'),
-        make_option('--add-field', action='append', dest='added_field_list', type='string',
-            help='Generate an Add Column migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
-        make_option('--initial', action='store_true', dest='initial', default=False,
-            help='Generate the initial schema for the app.'),
-        make_option('--auto', action='store_true', dest='auto', default=False,
-            help='Attempt to automatically detect differences from the last migration.'),
-        make_option('--freeze', action='append', dest='freeze_list', type='string',
-            help='Freeze the specified model(s). Pass in either an app name (to freeze the whole app) or a single model, as appname.modelname.'),
-    )
-    help = "Creates a new template migration for the given app"
-    
-    def handle(self, app=None, name="", added_model_list=None, added_field_list=None, initial=False, freeze_list=None, auto=False, **options):
-        
-        # Any supposed lists that are None become empty lists
-        added_model_list = added_model_list or []
-        added_field_list = added_field_list or []
-        
-        # Make sure options are compatable
-        if initial and (added_model_list or added_field_list or auto):
-            print "You cannot use --initial and other options together"
-            return
-        if auto and (added_model_list or added_field_list or initial):
-            print "You cannot use --auto and other options together"
-            return
-        
-        # specify the default name 'initial' if a name wasn't specified and we're
-        # doing a migration for an entire app
-        if not name and initial:
-            name = 'initial'
-        
-        # if not name, there's an error
-        if not name:
-            print "You must name this migration"
-            return
-        
-        if not app:
-            print "Please provide an app in which to create the migration."
-            return
-        
-        # Make sure the app is short form
-        app = app.split(".")[-1]
-        
-        # See if the app exists
-        app_models_module = models.get_app(app)
-        if not app_models_module:
-            print "App '%s' doesn't seem to exist, isn't in INSTALLED_APPS, or has no models." % app
-            return
-        
-        # If they've set SOUTH_AUTO_FREEZE_APP = True, then add this app to freeze_list
-        if hasattr(settings, 'SOUTH_AUTO_FREEZE_APP') and settings.SOUTH_AUTO_FREEZE_APP:
-            if freeze_list and app not in freeze_list:
-                freeze_list += [app]
-            else:
-                freeze_list = [app]
-        
-        # Make the migrations directory if it's not there
-        app_module_path = app_models_module.__name__.split('.')[0:-1]
-        try:
-            app_module = __import__('.'.join(app_module_path), {}, {}, [''])
-        except ImportError:
-            print "Couldn't find path to App '%s'." % app
-            return
-            
-        migrations_dir = os.path.join(
-            os.path.dirname(app_module.__file__),
-            "migrations",
-        )
-        
-        # Make sure there's a migrations directory and __init__.py
-        if not os.path.isdir(migrations_dir):
-            print "Creating migrations directory at '%s'..." % migrations_dir
-            os.mkdir(migrations_dir)
-        init_path = os.path.join(migrations_dir, "__init__.py")
-        if not os.path.isfile(init_path):
-            # Touch the init py file
-            print "Creating __init__.py in '%s'..." % migrations_dir
-            open(init_path, "w").close()
-        
-        # See what filename is next in line. We assume they use numbers.
-        migrations = migration.get_migration_names(migration.get_app(app))
-        highest_number = 0
-        for migration_name in migrations:
-            try:
-                number = int(migration_name.split("_")[0])
-                highest_number = max(highest_number, number)
-            except ValueError:
-                pass
-        
-        # Make the new filename
-        new_filename = "%04i%s_%s.py" % (
-            highest_number + 1,
-            "".join([random.choice(string.letters.lower()) for i in range(0)]), # Possible random stuff insertion
-            name,
-        )
-        
-        # Find the source file encoding, using PEP 0263's method
-        encoding = None
-        first_two_lines = inspect.getsourcelines(app_models_module)[0][:2]
-        for line in first_two_lines:
-            if re.search("coding[:=]\s*([-\w.]+)", line):
-                encoding = line
-        
-        # Initialise forwards, backwards and models to blank things
-        forwards = ""
-        backwards = ""
-        frozen_models = {} # Frozen models, used by the Fake ORM
-        stub_models = {} # Frozen models, but only enough for relation ends (old mock models)
-        complete_apps = set() # Apps that are completely frozen - useable for diffing.
-        
-        # Sets of actions
-        added_models = set()
-        deleted_models = [] # Special: contains instances _not_ string keys
-        added_fields = set()
-        deleted_fields = [] # Similar to deleted_models
-        changed_fields = [] # (mkey, fname, old_def, new_def)
-        
-        # --initial means 'add all models in this app'.
-        if initial:
-            for model in models.get_models(app_models_module):
-                added_models.add("%s.%s" % (app, model._meta.object_name))
-        
-        # Added models might be 'model' or 'app.model'.
-        for modelname in added_model_list:
-            if "." in modelname:
-                added_models.add(modelname)
-            else:
-                added_models.add("%s.%s" % (app, modelname))
-        
-        # Fields need translating from "model.field" to (app.model, field)
-        for fielddef in added_field_list:
-            try:
-                modelname, fieldname = fielddef.split(".", 1)
-            except ValueError:
-                print "The field specification '%s' is not in modelname.fieldname format." % fielddef
-            else:
-                added_fields.add(("%s.%s" % (app, modelname), fieldname))
-        
-        # Add anything frozen (I almost called the dict Iceland...)
-        if freeze_list:
-            for item in freeze_list:
-                if "." in item:
-                    # It's a specific model
-                    app_name, model_name = item.split(".", 1)
-                    model = models.get_model(app_name, model_name)
-                    if model is None:
-                        print "Cannot find the model '%s' to freeze it." % item
-                        return
-                    frozen_models[model] = None
-                else:
-                    # Get everything in an app!
-                    frozen_models.update(dict([(x, None) for x in models.get_models(models.get_app(item))]))
-                    complete_apps.add(item.split(".")[-1])
-            # For every model in the freeze list, add in dependency stubs
-            for model in frozen_models:
-                stub_models.update(model_dependencies(model))
-        
-        
-        ### Automatic Detection ###
-        if auto:
-            # Get the last migration for this app
-            last_models = None
-            app_module = migration.get_app(app)
-            if app_module is None:
-                print "You cannot use automatic detection on the first migration of an app. Try --initial instead."
-            else:
-                migrations = list(migration.get_migration_classes(app_module))
-                if not migrations:
-                    print "You cannot use automatic detection on the first migration of an app. Try --initial instead."
-                else:
-                    if hasattr(migrations[-1], "complete_apps") and \
-                       app in migrations[-1].complete_apps:
-                        last_models = migrations[-1].models
-                        last_orm = migrations[-1].orm
-                    else:
-                        print "You cannot use automatic detection, since the previous migration does not have this whole app frozen.\nEither make migrations using '--freeze %s' or set 'SOUTH_AUTO_FREEZE_APP = True' in your settings.py." % app
-            
-            # Right, did we manage to get the last set of models?
-            if last_models is None:
-                return
-            
-            # Good! Get new things.
-            new = dict([
-                (model_key(model), prep_for_freeze(model))
-                for model in models.get_models(app_models_module)
-            ])
-            # And filter other apps out of the old
-            old = dict([
-                (key, fields)
-                for key, fields in last_models.items()
-                if key.split(".", 1)[0] == app
-            ])
-            am, dm, af, df, cf = models_diff(old, new)
-            
-            if not (am or dm or af or df or cf):
-                print "Nothing seems to have changed."
-                return
-            
-            # Add items to the todo lists
-            added_models.update(am)
-            added_fields.update(af)
-            changed_fields.extend(cf)
-            
-            # Deleted models are from the past, and so we use instances instead.
-            for mkey in dm:
-                model = last_orm[mkey]
-                fields = last_models[mkey]
-                if "Meta" in fields:
-                    del fields['Meta']
-                deleted_models.append((model, fields, last_models))
-            
-            # For deleted fields, we tag the instance on the end too
-            for mkey, fname in df:
-                deleted_fields.append((
-                    mkey,
-                    fname,
-                    last_orm[mkey]._meta.get_field_by_name(fname)[0],
-                    last_models[mkey][fname],
-                    last_models,
-                ))
-        
-        
-        ### Added model ###
-        for mkey in added_models:
-            
-            print " + Added model '%s'" % (mkey,)
-            
-            model = model_unkey(mkey)
-            
-            # Add the model's dependencies to the stubs
-            stub_models.update(model_dependencies(model))
-            # Get the field definitions
-            fields = modelsparser.get_model_fields(model)
-            # Turn the (class, args, kwargs) format into a string
-            fields = triples_to_defs(app, model, fields)
-            # Make the code
-            forwards += CREATE_TABLE_SNIPPET % (
-                model._meta.object_name,
-                model._meta.db_table,
-                "\n            ".join(["('%s', %s)," % (fname, fdef) for fname, fdef in fields.items()]),
-                model._meta.app_label,
-                model._meta.object_name,
-            )
-            # And the backwards code
-            backwards += DELETE_TABLE_SNIPPET % (
-                model._meta.object_name, 
-                model._meta.db_table
-            )
-            # Now add M2M fields to be done
-            for field in model._meta.local_many_to_many:
-                added_fields.add((mkey, field.attname))
-        
-        
-        ### Added fields ###
-        for mkey, field_name in added_fields:
-            
-            print " + Added field '%s.%s'" % (mkey, field_name)
-            
-            # Get the model
-            model = model_unkey(mkey)
-            # Get the field
-            try:
-                field = model._meta.get_field(field_name)
-            except FieldDoesNotExist:
-                print "Model '%s' doesn't have a field '%s'" % (mkey, field_name)
-                return
-            
-            # ManyToMany fields need special attention.
-            if isinstance(field, models.ManyToManyField):
-                # Add a stub model for each side
-                stub_models[model] = None
-                stub_models[field.rel.to] = None
-                # And a field defn, that's actually a table creation
-                forwards += CREATE_M2MFIELD_SNIPPET % (
-                    model._meta.object_name,
-                    field.name,
-                    field.m2m_db_table(),
-                    field.m2m_column_name()[:-3], # strip off the '_id' at the end
-                    model._meta.object_name,
-                    field.m2m_reverse_name()[:-3], # strip off the '_id' at the ned
-                    field.rel.to._meta.object_name
-                    )
-                backwards += DELETE_M2MFIELD_SNIPPET % (
-                    model._meta.object_name,
-                    field.name,
-                    field.m2m_db_table()
-                )
-                continue
-            
-            # Add any dependencies
-            stub_models.update(field_dependencies(field))
-            
-            # Work out the definition
-            triple = remove_useless_attributes(
-                modelsparser.get_model_fields(model)[field_name])
-            
-            field_definition = make_field_constructor(app, field, triple)
-            
-            forwards += CREATE_FIELD_SNIPPET % (
-                model._meta.object_name,
-                field.name,
-                model._meta.db_table,
-                field.name,
-                field_definition,
-            )
-            backwards += DELETE_FIELD_SNIPPET % (
-                model._meta.object_name,
-                field.name,
-                model._meta.db_table,
-                field.column,
-            )
-        
-        
-        ### Deleted fields ###
-        for mkey, field_name, field, triple, last_models in deleted_fields:
-            
-            print " - Deleted field '%s.%s'" % (mkey, field_name)
-            
-            # Get the model
-            model = model_unkey(mkey)
-            
-            # ManyToMany fields need special attention.
-            if isinstance(field, models.ManyToManyField):
-                # Add a stub model for each side, if they're not already there
-                # (if we just added old versions, we might override new ones)
-                if model not in stub_models:
-                    stub_models[model] = last_models
-                if field.rel.to not in last_models:
-                    stub_models[field.rel.to] = last_models
-                # And a field defn, that's actually a table deletion
-                forwards += DELETE_M2MFIELD_SNIPPET % (
-                    model._meta.object_name,
-                    field.name,
-                    field.m2m_db_table()
-                )
-                backwards += CREATE_M2MFIELD_SNIPPET % (
-                    model._meta.object_name,
-                    field.name,
-                    field.m2m_db_table(),
-                    field.m2m_column_name()[:-3], # strip off the '_id' at the end
-                    model._meta.object_name,
-                    field.m2m_reverse_name()[:-3], # strip off the '_id' at the ned
-                    field.rel.to._meta.object_name
-                    )
-                continue
-            
-            # Add any dependencies
-            deps = field_dependencies(field, last_models)
-            deps.update(stub_models)
-            stub_models = deps
-            
-            # Work out the definition
-            triple = remove_useless_attributes(triple)
-            field_definition = make_field_constructor(app, field, triple)
-            
-            forwards += DELETE_FIELD_SNIPPET % (
-                model._meta.object_name,
-                field.name,
-                model._meta.db_table,
-                field.column,
-            )
-            backwards += CREATE_FIELD_SNIPPET % (
-                model._meta.object_name,
-                field.name,
-                model._meta.db_table,
-                field.name,
-                field_definition,
-            )
-        
-        
-        ### Deleted model ###
-        for model, fields, last_models in deleted_models:
-            
-            print " - Deleted model '%s.%s'" % (model._meta.app_label,model._meta.object_name)
-            
-            # Add the model's dependencies to the stubs
-            deps = model_dependencies(model, last_models)
-            deps.update(stub_models)
-            stub_models = deps
-            
-            # Turn the (class, args, kwargs) format into a string
-            fields = triples_to_defs(app, model, fields)
-            
-            # Make the code
-            forwards += DELETE_TABLE_SNIPPET % (
-                model._meta.object_name, 
-                model._meta.db_table
-            )
-            # And the backwards code
-            backwards += CREATE_TABLE_SNIPPET % (
-                model._meta.object_name,
-                model._meta.db_table,
-                "\n            ".join(["('%s', %s)," % (fname, fdef) for fname, fdef in fields.items()]),
-                model._meta.app_label,
-                model._meta.object_name,
-            )
-        
-        
-        ### Changed fields ###
-        for mkey, field_name, old_triple, new_triple in changed_fields:
-            print " ~ Changed field '%s.%s'." % (mkey, field_name)
-            
-            model = model_unkey(mkey)
-            old_def = triples_to_defs(app, model, {
-                field_name: old_triple,
-            })[field_name]
-            new_def = triples_to_defs(app, model, {
-                field_name: new_triple,
-            })[field_name]
-            
-            # We need to create the field, to see if it needs _id
-            field = model._meta.get_field_by_name(field_name)[0]
-            
-            forwards += CHANGE_FIELD_SNIPPET % (
-                model._meta.object_name,
-                field_name,
-                model._meta.db_table,
-                field.get_attname(),
-                new_def,
-            )
-            
-            backwards += CHANGE_FIELD_SNIPPET % (
-                model._meta.object_name,
-                field_name,
-                model._meta.db_table,
-                field.get_attname(),
-                old_def,
-            )
-        
-        
-        # Default values for forwards/backwards
-        if (not forwards) and (not backwards):
-            forwards = '"Write your forwards migration here"'
-            backwards = '"Write your backwards migration here"'
-        
-        all_models = {}
-        
-        # Fill out frozen model definitions
-        for model, last_models in frozen_models.items():
-            all_models[model_key(model)] = prep_for_freeze(model, last_models)
-        
-        # Fill out stub model definitions
-        for model, last_models in stub_models.items():
-            key = model_key(model)
-            if key in all_models:
-                continue # We'd rather use full models than stubs.
-            all_models[key] = prep_for_stub(model, last_models)
-        
-        # Do some model cleanup, and warnings
-        for modelname, model in all_models.items():
-            for fieldname, fielddef in model.items():
-                # Remove empty-after-cleaning Metas.
-                if fieldname == "Meta" and not fielddef:
-                    del model['Meta']
-                # Warn about undefined fields
-                elif fielddef is None:
-                    print "WARNING: Cannot get definition for '%s' on '%s'. Please edit the migration manually." % (
-                        fieldname,
-                        modelname,
-                    )
-                    model[fieldname] = FIELD_NEEDS_DEF_SNIPPET
-        
-        # Write the migration file
-        fp = open(os.path.join(migrations_dir, new_filename), "w")
-        fp.write(MIGRATION_SNIPPET % (
-            encoding or "", '.'.join(app_module_path), 
-            forwards, 
-            backwards, 
-            pprint_frozen_models(all_models),
-            complete_apps and "complete_apps = [%s]" % (", ".join(map(repr, complete_apps))) or ""
-        ))
-        fp.close()
-        print "Created %s." % new_filename
-
-
-### Cleaning functions for freezing
-
-def prep_for_freeze(model, last_models=None):
-    if last_models:
-        fields = last_models[model_key(model)]
-    else:
-        fields = modelsparser.get_model_fields(model, m2m=True)
-    # Remove useless attributes (like 'choices')
-    for name, field in fields.items():
-        fields[name] = remove_useless_attributes(field)
-    # See if there's a Meta
-    if last_models:
-        meta = last_models[model_key(model)].get("Meta", {})
-    else:
-        meta = modelsparser.get_model_meta(model)
-    if meta:
-        fields['Meta'] = remove_useless_meta(meta)
-    return fields
-
-
-def prep_for_stub(model, last_models=None):
-    if last_models:
-        fields = last_models[model_key(model)]
-    else:
-        fields = modelsparser.get_model_fields(model)
-    # Now, take only the PK (and a 'we're a stub' field) and freeze 'em
-    pk = model._meta.pk.name
-    fields = {
-        pk: remove_useless_attributes(fields[pk]),
-        "_stub": True,
-    }
-    # Meta is important too.
-    if last_models:
-        meta = last_models[model_key(model)].get("Meta", {})
-    else:
-        meta = modelsparser.get_model_meta(model)
-    if meta:
-        fields['Meta'] = remove_useless_meta(meta)
-    return fields
-
-
-### Module handling functions
-
-def model_key(model):
-    "For a given model, return 'appname.modelname'."
-    return ("%s.%s" % (model._meta.app_label, model._meta.object_name)).lower()
-
-def model_unkey(key):
-    "For 'appname.modelname', return the model."
-    app, modelname = key.split(".", 1)
-    model = models.get_model(app, modelname)
-    if not model:
-        print "Couldn't find model '%s' in app '%s'" % (modelname, app)
-        sys.exit(1)
-    return model
-
-### Dependency resolvers
-
-def model_dependencies(model, last_models=None):
-    """
-    Returns a set of models this one depends on to be defined; things like
-    OneToOneFields as ID, ForeignKeys everywhere, etc.
-    """
-    depends = {}
-    for field in model._meta.fields + model._meta.many_to_many:
-        depends.update(field_dependencies(field, last_models))
-    return depends
-
-def field_dependencies(field, last_models=None):
-    depends = {}
-    if isinstance(field, (models.OneToOneField, models.ForeignKey, models.ManyToManyField)):
-        if last_models:
-            depends[field.rel.to] = last_models
-        else:
-            depends[field.rel.to] = None
-    return depends
-    
-
-
-### Prettyprinters
-
-def pprint_frozen_models(models):
-    return "{\n        %s\n    }" % ",\n        ".join([
-        "%r: %s" % (name, pprint_fields(fields))
-        for name, fields in models.items()
-    ])
-
-def pprint_fields(fields):
-    return "{\n            %s\n        }" % ",\n            ".join([
-        "%r: %r" % (name, defn)
-        for name, defn in sorted(fields.items())
-    ])
-
-
-### Output sanitisers
-
-
-USELESS_KEYWORDS = ["choices", "help_text"]
-USELESS_DB_KEYWORDS = ["related_name", "upload_to"] # Important for ORM, not for DB.
-
-def remove_useless_attributes(field, db=False):
-    "Removes useless (for database) attributes from the field's defn."
-    keywords = db and USELESS_DB_KEYWORDS or USELESS_KEYWORDS
-    if field:
-        for name in keywords:
-            if name in field[2]:
-                del field[2][name]
-    return field
-
-USELESS_META = ["verbose_name", "verbose_name_plural"]
-def remove_useless_meta(meta):
-    "Removes useless (for database) attributes from the table's meta."
-    if meta:
-        for name in USELESS_META:
-            if name in meta:
-                del meta[name]
-    return meta
-
-
-### Turns (class, args, kwargs) triples into function defs.
-
-def make_field_constructor(default_app, field, triple):
-    """
-    Given the defualt app, the field class,
-    and the defn triple (or string), make the defition string.
-    """
-    # It might be a defn string already...
-    if isinstance(triple, (str, unicode)):
-        return triple
-    # OK, do it the hard way
-    if hasattr(field, "rel") and hasattr(field.rel, "to") and field.rel.to:
-        rel_to = field.rel.to
-    else:
-        rel_to = None
-    args = [poss_ormise(default_app, rel_to, arg) for arg in triple[1]]
-    kwds = ["%s=%s" % (k, poss_ormise(default_app, rel_to, v)) for k,v in triple[2].items()]
-    return "%s(%s)" % (triple[0], ", ".join(args+kwds))
-
-QUOTES = ['"""', "'''", '"', "'"]
-
-def poss_ormise(default_app, rel_to, arg):
-    """
-    Given the name of something that needs orm. stuck on the front and
-    a python eval-able string, possibly add orm. to it.
-    """
-    orig_arg = arg
-    # If it's not a relative field, short-circuit out
-    if not rel_to:
-        return arg
-    # Get the name of the other model
-    rel_name = rel_to._meta.object_name
-    # Is it in a different app? If so, use proper addressing.
-    if rel_to._meta.app_label != default_app:
-        real_name = "orm['%s.%s']" % (rel_to._meta.app_label, rel_name)
-    else:
-        real_name = "orm.%s" % rel_name
-    # If it's surrounded by quotes, get rid of those
-    for quote_type in QUOTES:
-        l = len(quote_type)
-        if arg[:l] == quote_type and arg[-l:] == quote_type:
-            arg = arg[l:-l]
-            break
-    # Now see if we can replace it.
-    if arg.lower() == rel_name.lower():
-        return real_name
-    # Or perhaps it's app.model?
-    if arg.lower() == rel_to._meta.app_label.lower() + "." + rel_name.lower():
-        return real_name
-    return orig_arg
-
-
-### Diffing functions between sets of models
-
-def models_diff(old, new):
-    """
-    Returns the difference between the old and new sets of models as a 5-tuple:
-    added_models, deleted_models, added_fields, deleted_fields, changed_fields
-    """
-    
-    added_models = set()
-    deleted_models = set()
-    ignored_models = set() # Stubs for backwards
-    added_fields = set()
-    deleted_fields = set()
-    changed_fields = []
-    
-    # See if anything's vanished
-    for key in old:
-        if key not in new:
-            if "_stub" not in old[key]:
-                deleted_models.add(key)
-            else:
-                ignored_models.add(key)
-    
-    # Or appeared
-    for key in new:
-        if key not in old:
-            added_models.add(key)
-    
-    # Now, for every model that's stayed the same, check its fields.
-    for key in old:
-        if key not in deleted_models and key not in ignored_models:
-            still_there = set()
-            # Find fields that have vanished.
-            for fieldname in old[key]:
-                if fieldname != "Meta" and fieldname not in new[key]:
-                    deleted_fields.add((key, fieldname))
-                else:
-                    still_there.add(fieldname)
-            # And ones that have appeared
-            for fieldname in new[key]:
-                if fieldname != "Meta" and fieldname not in old[key]:
-                    added_fields.add((key, fieldname))
-            # For the ones that exist in both models, see if they were changed
-            for fieldname in still_there:
-                if fieldname != "Meta" and \
-                   remove_useless_attributes(new[key][fieldname], True) != \
-                   remove_useless_attributes(old[key][fieldname], True):
-                    changed_fields.append((key, fieldname, old[key][fieldname], new[key][fieldname]))
-    
-    return added_models, deleted_models, added_fields, deleted_fields, changed_fields
-
-
-### Creates SQL snippets for various common operations
-
-
-def triples_to_defs(app, model, fields):
-    # Turn the (class, args, kwargs) format into a string
-    for field, triple in fields.items():
-        triple = remove_useless_attributes(triple)
-        if triple is None:
-            print "WARNING: Cannot get definition for '%s' on '%s'. Please edit the migration manually." % (
-                field,
-                model_key(model),
-            )
-            fields[field] = FIELD_NEEDS_DEF_SNIPPET
-        else:
-            fields[field] = make_field_constructor(
-                app,
-                model._meta.get_field_by_name(field)[0],
-                triple,
-            )
-    return fields
-
-
-### Various code snippets we need to use
-
-MIGRATION_SNIPPET = """%s
-from south.db import db
-from django.db import models
-from %s.models import *
-
-class Migration:
-    
-    def forwards(self, orm):
-        %s
-    
-    
-    def backwards(self, orm):
-        %s
-    
-    
-    models = %s
-    
-    %s
-"""
-CREATE_TABLE_SNIPPET = '''
-        # Adding model '%s'
-        db.create_table(%r, (
-            %s
-        ))
-        db.send_create_signal(%r, [%r])
-        '''
-DELETE_TABLE_SNIPPET = '''
-        # Deleting model '%s'
-        db.delete_table(%r)
-        '''
-CREATE_FIELD_SNIPPET = '''
-        # Adding field '%s.%s'
-        db.add_column(%r, %r, %s)
-        '''
-DELETE_FIELD_SNIPPET = '''
-        # Deleting field '%s.%s'
-        db.delete_column(%r, %r)
-        '''
-CHANGE_FIELD_SNIPPET = '''
-        # Changing field '%s.%s'
-        db.alter_column(%r, %r, %s)
-        '''
-CREATE_M2MFIELD_SNIPPET = '''
-        # Adding ManyToManyField '%s.%s'
-        db.create_table('%s', (
-            ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
-            ('%s', models.ForeignKey(%s, null=False)),
-            ('%s', models.ForeignKey(%s, null=False))
-        ))
-        '''
-DELETE_M2MFIELD_SNIPPET = '''
-        # Dropping ManyToManyField '%s.%s'
-        db.delete_table('%s')
-        '''
-FIELD_NEEDS_DEF_SNIPPET = "<< PUT FIELD DEFINITION HERE >>"

management/commands/syncdb.py

-from django.core.management.base import NoArgsCommand, BaseCommand 
-from django.core.management.color import no_style
-from django.utils.datastructures import SortedDict
-from optparse import make_option
-from south import migration
-from django.core.management.commands import syncdb
-from django.conf import settings
-from django.db import models
-from django.db.models.loading import cache
-from django.core import management
-import sys
-
-def get_app_name(app):
-    return '.'.join( app.__name__.split('.')[0:-1] )
-
-class Command(NoArgsCommand):
-    option_list = NoArgsCommand.option_list + (
-        make_option('--noinput', action='store_false', dest='interactive', default=True,
-            help='Tells Django to NOT prompt the user for input of any kind.'),
-        make_option('--migrate', action='store_true', dest='migrate', default=False,
-            help='Tells South to also perform migrations after the sync. Default for during testing, and other internal calls.'),
-    )
-    if '--verbosity' not in [opt.get_opt_string() for opt in BaseCommand.option_list]:
-        option_list += (
-            make_option('--verbosity', action='store', dest='verbosity', default='1',
-            type='choice', choices=['0', '1', '2'],
-            help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
-        )
-    help = "Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created, except those which use migrations."
-
-    def handle_noargs(self, **options):
-        # Work out what uses migrations and so doesn't need syncing
-        apps_needing_sync = []
-        apps_migrated = []
-        for app in models.get_apps():
-            app_name = get_app_name(app)
-            migrations = migration.get_app(app)
-            if migrations is None:
-                apps_needing_sync.append(app_name)
-            else:
-                # This is a migrated app, leave it
-                apps_migrated.append(app_name)
-        verbosity = int(options.get('verbosity', 0))
-        # Run syncdb on only the ones needed
-        if verbosity > 0:
-            print "Syncing..."
-        old_installed, settings.INSTALLED_APPS = settings.INSTALLED_APPS, apps_needing_sync
-        old_app_store, cache.app_store = cache.app_store, SortedDict([
-            (k, v) for (k, v) in cache.app_store.items()
-            if get_app_name(k) in apps_needing_sync
-        ])
-        syncdb.Command().execute(**options)
-        settings.INSTALLED_APPS = old_installed
-        cache.app_store = old_app_store
-        # Migrate if needed
-        if options.get('migrate', True):
-            if verbosity > 0:
-                print "Migrating..."
-            management.call_command('migrate', **options)
-        # Be obvious about what we did
-        if verbosity > 0:
-            print "\nSynced:\n > %s" % "\n > ".join(apps_needing_sync)
-        
-        if options.get('migrate', True):
-            if verbosity > 0:
-                print "\nMigrated:\n - %s" % "\n - ".join(apps_migrated)
-        else:
-            if verbosity > 0:
-                print "\nNot synced (use migrations):\n - %s" % "\n - ".join(apps_migrated)
-                print "(use ./manage.py migrate to migrate these)"

management/commands/test.py

-from django.core import management
-from django.core.management.commands import test
-from django.core.management.commands import syncdb
-from django.conf import settings
-
-class Command(test.Command):
-    
-    def handle(self, *args, **kwargs):
-        if not hasattr(settings, "SOUTH_TESTS_MIGRATE") or not settings.SOUTH_TESTS_MIGRATE:
-            # point at the core syncdb command when creating tests
-            # tests should always be up to date with the most recent model structure
-            management.get_commands()
-            management._commands['syncdb'] = 'django.core'
-        super(Command, self).handle(*args, **kwargs)

migration.py

-
-import datetime
-import os
-import sys
-import traceback
-import inspect
-from django.conf import settings
-from django.db import models
-from django.core.exceptions import ImproperlyConfigured
-from django.core.management import call_command
-from models import MigrationHistory
-from south.db import db
-from south.orm import FakeORM
-
-
-def get_app(app):
-    """
-    Returns the migrations module for the given app model name/module, or None
-    if it does not use migrations.
-    """
-    if isinstance(app, (str, unicode)):
-        # If it's a string, use the models module
-        app = models.get_app(app)
-    mod = __import__(app.__name__[:-7], {}, {}, ['migrations'])
-    if hasattr(mod, 'migrations'):
-        return getattr(mod, 'migrations')
-
-
-def get_migrated_apps():
-    """
-    Returns all apps with migrations.
-    """
-    for mapp in models.get_apps():
-        app = get_app(mapp)
-        if app:
-            yield app
-
-
-def get_app_name(app):
-    """
-    Returns the _internal_ app name for the given app module.
-    i.e. for <module django.contrib.auth.models> will return 'auth'
-    """
-    return app.__name__.split('.')[-2]
-
-
-def get_app_fullname(app):
-    """
-    Returns the full python name of an app - e.g. django.contrib.auth
-    """
-    return app.__name__[:-11]
-
-
-def short_from_long(app_name):
-    return app_name.split(".")[-1]
-
-
-def get_migration_names(app):
-    """
-    Returns a list of migration file names for the given app.
-    """
-    return sorted([
-        filename[:-3]
-        for filename in os.listdir(os.path.dirname(app.__file__))
-        if filename.endswith(".py") and filename != "__init__.py" and not filename.startswith(".")
-    ])
-
-
-def get_migration_classes(app):
-    """
-    Returns a list of migration classes (one for each migration) for the app.
-    """
-    for name in get_migration_names(app):
-        yield get_migration(app, name)
-
-
-def get_migration(app, name):
-    """
-    Returns the migration class implied by 'name'.
-    """
-    try:
-        module = __import__(app.__name__ + "." + name, '', '', ['Migration'])
-        migclass = module.Migration
-        migclass.orm = FakeORM(migclass, get_app_name(app))
-        module._ = lambda x: x  # Fake i18n
-        return migclass
-    except ImportError:
-        print " ! Migration %s:%s probably doesn't exist." % (get_app_name(app), name)
-        print " - Traceback:"
-        raise
-    except Exception, e:
-        print "While loading migration '%s.%s':" % (get_app_name(app), name)
-        raise
-
-
-def all_migrations():
-    return dict([
-        (app, dict([(name, get_migration(app, name)) for name in get_migration_names(app)]))
-        for app in get_migrated_apps()
-    ])
-
-
-def dependency_tree():
-    tree = all_migrations()
-    
-    # Annotate tree with 'backwards edges'
-    for app, classes in tree.items():
-        for name, cls in classes.items():
-            cls.needs = []
-            if not hasattr(cls, "needed_by"):
-                cls.needed_by = []
-            if hasattr(cls, "depends_on"):
-                for dapp, dname in cls.depends_on:
-                    dapp = get_app(dapp)
-                    if dapp not in tree:
-                        print "Migration %s in app %s depends on unmigrated app %s." % (
-                            name,
-                            get_app_name(app),
-                            dapp,
-                        )
-                        sys.exit(1)
-                    if dname not in tree[dapp]:
-                        print "Migration %s in app %s depends on nonexistent migration %s in app %s." % (
-                            name,
-                            get_app_name(app),
-                            dname,
-                            get_app_name(dapp),
-                        )
-                        sys.exit(1)
-                    cls.needs.append((dapp, dname))
-                    if not hasattr(tree[dapp][dname], "needed_by"):
-                        tree[dapp][dname].needed_by = []
-                    tree[dapp][dname].needed_by.append((app, name))
-    
-    # Sanity check whole tree
-    for app, classes in tree.items():
-        for name, cls in classes.items():
-            cls.dependencies = dependencies(tree, app, name)
-    
-    return tree
-
-
-def nice_trace(trace):
-    return " -> ".join([str((get_app_name(a), n)) for a, n in trace])
-
-
-def dependencies(tree, app, name, trace=[]):
-    # Copy trace to stop pass-by-ref problems
-    trace = trace[:]
-    # Sanity check
-    for papp, pname in trace:
-        if app == papp:
-            if pname == name:
-                print "Found circular dependency: %s" % nice_trace(trace + [(app,name)])
-                sys.exit(1)
-            else:
-                # See if they depend in the same app the wrong way
-                migrations = get_migration_names(app)
-                if migrations.index(name) > migrations.index(pname):
-                    print "Found a lower migration (%s) depending on a higher migration (%s) in the same app (%s)." % (pname, name, get_app_name(app))
-                    print "Path: %s" % nice_trace(trace + [(app,name)])
-                    sys.exit(1)
-    # Get the dependencies of a migration
-    deps = []
-    migration = tree[app][name]
-    for dapp, dname in migration.needs:
-        deps.extend(
-            dependencies(tree, dapp, dname, trace+[(app,name)])
-        )
-    return deps
-
-
-def remove_duplicates(l):
-    m = []
-    for x in l:
-        if x not in m:
-            m.append(x)
-    return m
-
-
-def needed_before_forwards(tree, app, name, sameapp=True):
-    """
-    Returns a list of migrations that must be applied before (app, name),
-    in the order they should be applied.
-    Used to make sure a migration can be applied (and to help apply up to it).
-    """
-    app_migrations = get_migration_names(app)
-    needed = []
-    if sameapp:
-        for aname in app_migrations[:app_migrations.index(name)]:
<