jdunck avatar jdunck committed 0306693 Merge

Merging upstream default to fuzz testing head.

Comments (0)

Files changed (19)

docs/ormfreezing.rst

 relationships, but if you want to add other models in, simply pass ``--freeze appname``
 to the ``./manage.py datamigration`` command.
 
+Also note that the ``backwards()`` method gets the ORM as frozen by the previous
+migration except for migrations that define ``symmetrical = True`` (new in South 1.0)
+
 Frozen Meta Attributes
 ----------------------
 

south/creator/freezer.py

 import sys
 
 from django.db import models
+from django.db.models.base import ModelBase, Model
 from django.contrib.contenttypes.generic import GenericRelation
 
 from south.orm import FakeORM
-from south.utils import auto_model
+from south.utils import get_attribute, auto_through
 from south import modelsinspector
 
 def freeze_apps(apps):
     checked_models = checked_models or set()
     # Get deps for each field
     for field in model._meta.fields + model._meta.many_to_many:
-        depends.update(field_dependencies(field))
+        depends.update(field_dependencies(field, checked_models))
     # Add in any non-abstract bases
     for base in model.__bases__:
-        if issubclass(base, models.Model) and (base is not models.Model) and not base._meta.abstract:
+        if issubclass(base, models.Model) and hasattr(base, '_meta') and not base._meta.abstract:
             depends.add(base)
     # Now recurse
     new_to_check = depends - checked_models
 def field_dependencies(field, checked_models=None):
     checked_models = checked_models or set()
     depends = set()
-    if isinstance(field, (models.OneToOneField, models.ForeignKey, models.ManyToManyField, GenericRelation)):
-        if field.rel.to in checked_models:
-            return depends
-        checked_models.add(field.rel.to)
-        depends.add(field.rel.to)
-        depends.update(field_dependencies(field.rel.to._meta.pk, checked_models))
-        # Also include M2M throughs
-        if isinstance(field, models.ManyToManyField):
-            if field.rel.through:
-                if hasattr(field.rel, "through_model"): # 1.1 and below
-                    depends.add(field.rel.through_model)
-                else:
-                    # Make sure it's not an automatic one
-                    if not auto_model(field.rel.through):
-                        depends.add(field.rel.through) # 1.2 and up
+    arg_defs, kwarg_defs = modelsinspector.matching_details(field)
+    for attrname, options in arg_defs + kwarg_defs.values():
+        if options.get("ignore_if_auto_through", False) and auto_through(field):
+            continue
+        if options.get("is_value", False):
+            value = attrname
+        elif attrname == 'rel.through' and hasattr(getattr(field, 'rel', None), 'through_model'):
+            # Hack for django 1.1 and below, where the through model is stored
+            # in rel.through_model while rel.through stores only the model name.
+            value = field.rel.through_model
+        else:
+            try:
+                value = get_attribute(field, attrname)
+            except AttributeError:
+                if options.get("ignore_missing", False):
+                    continue
+                raise
+        if isinstance(value, Model):
+            value = value.__class__
+        if not isinstance(value, ModelBase):
+            continue
+        if getattr(value._meta, "proxy", False):
+            value = value._meta.proxy_for_model
+        if value in checked_models:
+            continue
+        checked_models.add(value)
+        depends.add(value)
+        depends.update(model_dependencies(value, checked_models))
+
     return depends
 
 ### Prettyprinters

south/db/generic.py

     delete_column_string = 'ALTER TABLE %s DROP COLUMN %s CASCADE;'
     create_primary_key_string = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)"
     delete_primary_key_sql = "ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s"
+    add_check_constraint_fragment = "ADD CONSTRAINT %(constraint)s CHECK (%(check)s)"
     backend_name = None
     default_schema_name = "public"
 
         except TypeError:
             return field.db_type()
         
+    def _alter_add_column_mods(self, field, name, params, sqls):
+        """
+        Subcommand of alter_column that modifies column definitions beyond
+        the type string -- e.g. adding constraints where they cannot be specified
+        as part of the type (overrideable)
+        """
+        pass
+
     def _alter_set_defaults(self, field, name, params, sqls): 
         "Subcommand of alter_column that sets default values (overrideable)"
         # Next, set any default
             field.column = name
 
         if not ignore_constraints:
-            # Drop all check constraints. TODO: Add the right ones back.
+            # Drop all check constraints. Note that constraints will be added back
+            # with self.alter_string_set_type and self.alter_string_drop_null.
             if self.has_check_constraints:
                 check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK")
                 for constraint in check_constraints:
         if params["type"] is not None:
             sqls.append((self.alter_string_set_type % params, []))
         
+        # Add any field- and backend- specific modifications
+        self._alter_add_column_mods(field, name, params, sqls)
         # Next, nullity
         if field.null:
             sqls.append((self.alter_string_set_null % params, []))
                 field_output.append('UNIQUE')
 
             tablespace = field.db_tablespace or tablespace
-            if tablespace and self._get_connection().features.supports_tablespaces and field.unique:
+            if tablespace and getattr(self._get_connection().features, "supports_tablespaces", False) and field.unique:
                 # We must specify the index tablespace inline, because we
                 # won't be generating a CREATE INDEX statement for this field.
                 field_output.append(self._get_connection().ops.tablespace_sql(tablespace, inline=True))
         MockModel._meta.model = MockModel
         return MockModel
 
+    def _db_positive_type_for_alter_column(self, field):
+        """
+        A helper for subclasses overriding _db_type_for_alter_column:
+        Remove the check constraint from the type string for PositiveInteger
+        and PositiveSmallInteger fields.
+        @param field: The field to generate type for
+        """
+        super_result = super(type(self), self)._db_type_for_alter_column(field)
+        if isinstance(field, (models.PositiveSmallIntegerField, models.PositiveIntegerField)):
+            return super_result.split(" ", 1)[0]
+        return super_result
+        
+    def _alter_add_positive_check(self, field, name, params, sqls):
+        """
+        A helper for subclasses overriding _alter_add_column_mods:
+        Add a check constraint verifying positivity to PositiveInteger and
+        PositiveSmallInteger fields.
+        """
+        super(type(self), self)._alter_add_column_mods(field, name, params, sqls)
+        if isinstance(field, (models.PositiveSmallIntegerField, models.PositiveIntegerField)):
+            uniq_hash = abs(hash(tuple(params.values()))) 
+            d = dict(
+                     constraint = "CK_%s_PSTV_%s" % (name, hex(uniq_hash)[2:]),
+                     check = "%s >= 0" % self.quote_name(name))
+            sqls.append((self.add_check_constraint_fragment % d, []))
+    
+
 
 # Single-level flattening of lists
 def flatten(ls):

south/db/mysql.py

         # See if there is a foreign key on this column
         result = 0
         for kind, cname in self.lookup_constraint(db_name, table_name, name):
-            if kind == 'FOREIGN_KEY':
+            if kind == 'FOREIGN KEY':
                 result += 1
                 fkey_name = cname
         if result:

south/db/oracle.py

 import os.path
 import sys
 import re
+import warnings
 import cx_Oracle
 
 
 from django.db import connection, models
 from django.db.backends.util import truncate_name
 from django.core.management.color import no_style
-from django.db.backends.oracle.base import get_sequence_name
 from django.db.models.fields import NOT_PROVIDED
 from django.db.utils import DatabaseError
+
+# In revision r16016 function get_sequence_name has been transformed into
+# method of DatabaseOperations class. To make code backward-compatible we
+# need to handle both situations.
+try:
+    from django.db.backends.oracle.base import get_sequence_name\
+        as original_get_sequence_name
+except ImportError:
+    original_get_sequence_name = None
+
 from south.db import generic
 
-print >> sys.stderr, " ! WARNING: South's Oracle support is still alpha."
-print >> sys.stderr, " !          Be wary of possible bugs."
+warnings.warn("! WARNING: South's Oracle support is still alpha. "
+              "Be wary of possible bugs.")
 
 class DatabaseOperations(generic.DatabaseOperations):    
     """
     alter_string_set_default =  'ALTER TABLE %(table_name)s MODIFY %(column)s DEFAULT %(default)s;'
     add_column_string =         'ALTER TABLE %s ADD %s;'
     delete_column_string =      'ALTER TABLE %s DROP COLUMN %s;'
+    add_constraint_string =     'ALTER TABLE %(table_name)s ADD CONSTRAINT %(constraint)s %(clause)s'
 
     allows_combined_alters = False
     
         'R': 'FOREIGN KEY'
     }
 
+    def get_sequence_name(self, table_name):
+        if original_get_sequence_name is None:
+            return self._get_connection().ops._get_sequence_name(table_name)
+        else:
+            return original_get_sequence_name(table_name)
+
     def adj_column_sql(self, col):
-        col = re.sub('(?P<constr>CHECK \(.*\))(?P<any>.*)(?P<default>DEFAULT [0|1])', 
-                     lambda mo: '%s %s%s'%(mo.group('default'), mo.group('constr'), mo.group('any')), col) #syntax fix for boolean field only
+        col = re.sub('(?P<constr>CHECK \(.*\))(?P<any>.*)(?P<default>DEFAULT \d+)', 
+                     lambda mo: '%s %s%s'%(mo.group('default'), mo.group('constr'), mo.group('any')), col) #syntax fix for boolean/integer field only
         col = re.sub('(?P<not_null>(NOT )?NULL) (?P<misc>(.* )?)(?P<default>DEFAULT.+)',
                      lambda mo: '%s %s %s'%(mo.group('default'),mo.group('not_null'),mo.group('misc') or ''), col) #fix order of NULL/NOT NULL and DEFAULT
         return col
         EXECUTE IMMEDIATE 'DROP SEQUENCE "%(sq_name)s"';
     END IF;
 END;
-/""" % {'sq_name': get_sequence_name(table_name)}
+/""" % {'sq_name': self.get_sequence_name(table_name)}
         self.execute(sequence_sql)
 
     @generic.invalidate_table_constraints
         qn_col = self.quote_name(name)
 
         # First, change the type
+        # This will actually also add any CHECK constraints needed,
+        # since e.g. 'type' for a BooleanField is 'NUMBER(1) CHECK (%(qn_column)s IN (0,1))'
         params = {
             'table_name':qn,
             'column': qn_col,
         }
         if field.null:
             params['nullity'] = 'NULL'
-        sqls = [self.alter_string_set_type % params]
 
         if not field.null and field.has_default():
             params['default'] = field.get_default()
 
-        sqls.append(self.alter_string_set_default % params)
+        sql_templates = [
+            (self.alter_string_set_type, params),
+            (self.alter_string_set_default, params.copy()),
+        ]
 
-        #UNIQUE constraint
+        # UNIQUE constraint
         unique_constraint = list(self._constraints_affecting_columns(table_name, [name], 'UNIQUE'))
         if field.unique and not unique_constraint:
             self.create_unique(table_name, [name])
         elif not field.unique and unique_constraint:
             self.delete_unique(table_name, [name])
 
-        #CHECK constraint is not handled
+        # drop CHECK constraints. Make sure this is executed before the ALTER TABLE statements
+        # generated above, since those statements recreate the constraints we delete here.
+        check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK")
+        for constraint in check_constraints:
+            self.execute(self.delete_check_sql % {
+                'table': self.quote_name(table_name),
+                'constraint': self.quote_name(constraint),
+            })
 
-        for sql in sqls:
+        for sql_template, params in sql_templates:
             try:
-                self.execute(sql)
+                self.execute(sql_template % params)
             except DatabaseError, exc:
-                # Oracle complains if a column is already NULL/NOT NULL 
-                if str(exc).find('ORA-01442') == -1 and str(exc).find('ORA-01451') == -1:
+                description = str(exc)
+                # Oracle complains if a column is already NULL/NOT NULL
+                if 'ORA-01442' in description or 'ORA-01451' in description:
+                    # so we just drop NULL/NOT NULL part from target sql and retry
+                    params['nullity'] = ''
+                    sql = sql_template % params
+                    self.execute(sql)
+                else:
                     raise
 
     @generic.copy_column_constraints

south/db/postgresql_psycopg2.py

         "Rename an index individually"
         generic.DatabaseOperations.rename_table(self, old_index_name, index_name)
 
-    def _db_type_for_alter_column(self, field):
-        """
-        Returns a field's type suitable for ALTER COLUMN.
-        Strips CHECKs from PositiveSmallIntegerField) and PositiveIntegerField
-        @param field: The field to generate type for
-        """
-        super_result = super(DatabaseOperations, self)._db_type_for_alter_column(field)
-        if isinstance(field, models.PositiveSmallIntegerField) or isinstance(field, models.PositiveIntegerField):
-            return super_result.split(" ")[0]
-        return super_result
+    _db_type_for_alter_column = generic.alias("_db_positive_type_for_alter_column")
+    _alter_add_column_mods = generic.alias("_alter_add_positive_check")

south/db/sql_server/pyodbc.py

     drop_constraint_string = 'ALTER TABLE %(table_name)s DROP CONSTRAINT %(constraint_name)s'
     delete_column_string = 'ALTER TABLE %s DROP COLUMN %s'
 
-    create_check_constraint_sql = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s CHECK (%(check)s)"
+    #create_check_constraint_sql = "ALTER TABLE %(table)s " + \
+    #                              generic.DatabaseOperations.add_check_constraint_fragment 
     create_foreign_key_sql = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s " + \
                              "FOREIGN KEY (%(column)s) REFERENCES %(target)s"
     create_unique_sql = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s UNIQUE (%(columns)s)"
         ret_val = super(DatabaseOperations, self).alter_column(table_name, name, field, explicit_name, ignore_constraints=True)
         
         if not ignore_constraints:
+            unique_field_handled = False
             for cname, (ctype,args) in constraints.items():
                 params = dict(table = table,
                               constraint = qn(cname))
                 if ctype=='UNIQUE':
-                    #TODO: This preserves UNIQUE constraints, but does not yet create them when necessary
+                    if len(args)==1:
+                        unique_field_handled = True # 
                     if len(args)>1 or field.unique:
                         params['columns'] = ", ".join(map(qn,args))
                         sql = self.create_unique_sql % params
+                    else:
+                        continue
                 elif ctype=='PRIMARY KEY':
                     params['columns'] = ", ".join(map(qn,args))
                     sql = self.create_primary_key_string % params
                 else:
                     raise NotImplementedError("Don't know how to handle constraints of type "+ type)                    
                 self.execute(sql, [])
+            # Create unique constraint if necessary
+            if field.unique and not unique_field_handled:
+                self.create_unique(table_name, (name,))
             # Create foreign key if necessary
             if field.rel and self.supports_foreign_keys:
                 self.execute(
         params = (self.quote_name(old_table_name), self.quote_name(table_name))
         self.execute('EXEC sp_rename %s, %s' % params)
 
-    # Copied from South's psycopg2 backend
-    def _db_type_for_alter_column(self, field):
-        """
-        Returns a field's type suitable for ALTER COLUMN.
-        Strips CHECKs from PositiveSmallIntegerField) and PositiveIntegerField
-        @param field: The field to generate type for
-        """
-        super_result = super(DatabaseOperations, self)._db_type_for_alter_column(field)
-        if isinstance(field, models.PositiveSmallIntegerField) or isinstance(field, models.PositiveIntegerField):
-            return super_result.split(" ")[0]
-        return super_result
+    _db_type_for_alter_column = generic.alias("_db_positive_type_for_alter_column")
+    _alter_add_column_mods = generic.alias("_alter_add_positive_check")
 
     @invalidate_table_constraints
     def delete_foreign_key(self, table_name, column):

south/db/sqlite3.py

             field.column: self._column_sql_for_create(table_name, name, field, False),
         })
 
+    def _get_full_table_description(self, connection, cursor, table_name):
+        cursor.execute('PRAGMA table_info(%s)' % connection.ops.quote_name(table_name))
+        # cid, name, type, notnull, dflt_value, pk
+        return [{'name': field[1],
+                 'type': field[2],
+                 'null_ok': not field[3],
+                 'dflt_value': field[4],
+                 'pk': field[5]     # undocumented
+                 } for field in cursor.fetchall()]
+
     @generic.invalidate_table_constraints
     def _remake_table(self, table_name, added={}, renames={}, deleted=[], altered={}, primary_key_override=None, uniques_deleted=[]):
         """
         indexes = self._get_connection().introspection.get_indexes(cursor, table_name)
         multi_indexes = self._get_multi_indexes(table_name)
         # Work out new column defs.
-        for column_info in self._get_connection().introspection.get_table_description(cursor, table_name):
-            name = column_info[0]
+        for column_info in self._get_full_table_description(self._get_connection(), cursor, table_name):
+            name = column_info['name']
             if name in deleted:
                 continue
             # Get the type, ignoring PRIMARY KEY (we need to be consistent)
-            type = column_info[1].replace("PRIMARY KEY", "")
-            # Add on unique or primary key if needed.
-            if indexes[name]['unique'] and name not in uniques_deleted:
-                type += " UNIQUE"
+            type = column_info['type'].replace("PRIMARY KEY", "")
+            # Add on primary key, not null or unique if needed.
             if (primary_key_override and primary_key_override == name) or \
                (not primary_key_override and indexes[name]['primary_key']):
                 type += " PRIMARY KEY"
+            elif not column_info['null_ok']:
+                type += " NOT NULL"
+            if indexes[name]['unique'] and name not in uniques_deleted:
+                type += " UNIQUE"
+
+            if column_info['dflt_value'] is not None:
+                type += " DEFAULT " + column_info['dflt_value']
+
             # Deal with a rename
             if name in renames:
                 name = renames[name]
         #    sql += " UNIQUE"
         return sql
     
-    def alter_column(self, table_name, name, field, explicit_name=True):
+    def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
         """
-        Changes a column's SQL definition
+        Changes a column's SQL definition.
+
+        Note that this sqlite3 implementation ignores the ignore_constraints argument.
+        The argument is accepted for API compatibility with the generic
+        DatabaseOperations.alter_column() method.
         """
         # Remake the table correctly
         self._remake_table(table_name, altered={

south/introspection_plugins/geodjango.py

                     "srid": ["srid", {"default": 4326}],
                     "spatial_index": ["spatial_index", {"default": True}],
                     "dim": ["dim", {"default": 2}],
+                    "geography": ["geography", {"default": False}],
                 },
             ),
         ]

south/management/commands/convert_to_south.py

         model_list = models.get_models(app_module)
         if not model_list:
             print "This application has no models; this command is for applications that already have models syncdb'd."
-            print "Make some models, and then use ./manage.py startmigration %s --initial instead." % app
+            print "Make some models, and then use ./manage.py schemamigration %s --initial instead." % app
             return
         
         # Ask South if it thinks it's already got migrations

south/management/commands/datamigration.py

     models = %(frozen_models)s
 
     %(complete_apps)s
+    symmetrical = True
 """

south/migration/__init__.py

         # Finally, fire off the post-migrate signal
         if success:
             post_migrate.send(None, app=app_label)
-    elif verbosity:
-        # Say there's nothing.
-        print '- Nothing to migrate.'
+    else:
+        if verbosity:
+            # Say there's nothing.
+            print '- Nothing to migrate.'
         # If we have initial data enabled, and we're at the most recent
         # migration, do initial data.
         # Note: We use a fake Forwards() migrator here. It's never used really.

south/migration/base.py

             return False
 
     def prev_orm(self):
+        if getattr(self.migration_class(), 'symmetrical', False):
+            return self.orm()
         previous = self.previous()
         if previous is None:
             # First migration? The 'previous ORM' is empty.

south/modelsinspector.py

     
     # Go through all bases (that are themselves models, but not Model)
     for base in model.__bases__:
-        if base != models.Model and issubclass(base, models.Model):
+        if hasattr(base, '_meta') and issubclass(base, models.Model):
             if not base._meta.abstract:
                 # Looks like we need their fields, Ma.
                 inherited_fields.update(get_model_fields(base))
     # This is called _ormbases as the _bases variable was previously used
     # for a list of full class paths to bases, so we can't conflict.
     for base in model.__bases__:
-        if base != models.Model and issubclass(base, models.Model):
+        if hasattr(base, '_meta') and issubclass(base, models.Model):
             if not base._meta.abstract:
                 # OK, that matches our terms.
                 if "_ormbases" not in meta_def:

south/tests/__init__.py

     from south.tests.autodetection import *
     from south.tests.logger import *
     from south.tests.inspector import *
+    from south.tests.freezer import *

south/tests/db.py

         # Make sure we can't do the same query on an empty table
         try:
             cursor.execute("SELECT * FROM nottheretest1")
-            self.fail("Non-existent table could be selected!")
         except:
             pass
+        else:
+            self.fail("Non-existent table could be selected!")
     
     def test_delete(self):
         """
         Test deletion of tables.
         """
+        cursor = connection.cursor()
         db.create_table("test_deltable", [('email_confirmed', models.BooleanField(default=False))])
         db.delete_table("test_deltable")
         # Make sure it went
         try:
-            cursor.execute("SELECT * FROM test1")
-            self.fail("Just-deleted table could be selected!")
+            cursor.execute("SELECT * FROM test_deltable")
         except:
             pass
+        else:
+            self.fail("Just-deleted table could be selected!")
     
     def test_nonexistent_delete(self):
         """
         """
         try:
             db.delete_table("test_nonexistdeltable")
-            self.fail("Non-existent table could be deleted!")
         except:
             pass
+        else:
+            self.fail("Non-existent table could be deleted!")
     
     def test_foreign_keys(self):
         """
         db.start_transaction()
         try:
             cursor.execute("SELECT spam FROM test_rn")
-            self.fail("Just-renamed column could be selected!")
         except:
             pass
+        else:
+            self.fail("Just-renamed column could be selected!")
         db.rollback_transaction()
         db.delete_table("test_rn")
         db.start_transaction()
         db.start_transaction()
         try:
             cursor.execute("SELECT eggs FROM test_drn")
-            self.fail("Dry-renamed new column could be selected!")
         except:
             pass
+        else:
+            self.fail("Dry-renamed new column could be selected!")
         db.rollback_transaction()
         db.delete_table("test_drn")
         db.start_transaction()
         db.start_transaction()
         try:
             cursor.execute("SELECT spam FROM testtr")
-            self.fail("Just-renamed column could be selected!")
         except:
             pass
+        else:
+            self.fail("Just-renamed column could be selected!")
         db.rollback_transaction()
         db.delete_table("testtr2")
         db.start_transaction()
         db.start_transaction()
         try:
             db.execute("INSERT INTO test_alter_unique VALUES (1, 42)")
-            self.fail("Could insert the same integer twice into a field with unique=True.")
         except:
             pass
+        else:
+            self.fail("Could insert the same integer twice into a field with unique=True.")
         db.rollback_transaction()
-        
+
         # remove constraint
         db.alter_column("test_alter_unique", "eggs", models.IntegerField())
         # make sure the insertion works now
         db.start_transaction()
         try:
             db.execute("INSERT INTO test_alter_unique VALUES (1, 42)")
-            self.fail("Unique constraint not created during alter_column()")
         except:
             pass
+        else:
+            self.fail("Unique constraint not created during alter_column()")
         db.rollback_transaction()
         
         # Delete the unique index/constraint
         db.add_column("test_add_unique_fk", "mock2", models.OneToOneField(db.mock_model('Mock', 'mock'), null=True))
         
         db.delete_table("test_add_unique_fk")
-
+        
+    def test_column_constraint(self):
+        """
+        Tests that the value constraint of PositiveIntegerField is enforced on
+        the database level.
+        """
+        db.create_table("test_column_constraint", [
+            ('spam', models.PositiveIntegerField()),
+        ])
+        db.execute_deferred_sql()
+        
+        # Make sure we can't insert negative values
+        db.commit_transaction()
+        db.start_transaction()
+        try:
+            db.execute("INSERT INTO test_column_constraint VALUES (-42)")
+        except:
+            pass
+        else:
+            self.fail("Could insert a negative value into a PositiveIntegerField.")
+        db.rollback_transaction()
+        
+        # remove constraint
+        db.alter_column("test_column_constraint", "spam", models.IntegerField())
+        # make sure the insertion works now
+        db.execute('INSERT INTO test_column_constraint VALUES (-42)')
+        db.execute('DELETE FROM test_column_constraint')
+        
+        # add it back again
+        db.alter_column("test_column_constraint", "spam", models.PositiveIntegerField())
+        # it should fail again
+        db.start_transaction()
+        try:
+            db.execute("INSERT INTO test_column_constraint VALUES (-42)")
+        except:
+            pass
+        else:
+            self.fail("Could insert a negative value after changing an IntegerField to a PositiveIntegerField.")
+        db.rollback_transaction()
+        
+        db.delete_table("test_column_constraint")
+        db.start_transaction()
+        
 class TestCacheGeneric(unittest.TestCase):
     base_ops_cls = generic.DatabaseOperations
     def setUp(self):

south/tests/fakeapp/models.py

 from django.db import models
 from django.contrib.auth.models import User as UserAlias
 
+from south.modelsinspector import add_introspection_rules
+
 def default_func():
     return "yays"
 
 # Special case.
 class Other2(models.Model):
     # Try loading a field without a newline after it (inspect hates this)
-    close_but_no_cigar = models.PositiveIntegerField(primary_key=True)
+    close_but_no_cigar = models.PositiveIntegerField(primary_key=True)
+
+class CustomField(models.IntegerField):
+    def __init__(self, an_other_model, **kwargs):
+        super(CustomField, self).__init__(**kwargs)
+        self.an_other_model = an_other_model
+
+add_introspection_rules([
+    (
+        [CustomField],
+        [],
+        {'an_other_model': ('an_other_model', {})},
+    ),
+], ['^south\.tests\.fakeapp\.models\.CustomField'])
+
+class BaseModel(models.Model):
+    pass
+
+class SubModel(BaseModel):
+    others = models.ManyToManyField(Other1)
+    custom = CustomField(Other2)
+
+class CircularA(models.Model):
+    c = models.ForeignKey('CircularC')
+
+class CircularB(models.Model):
+    a = models.ForeignKey(CircularA)
+
+class CircularC(models.Model):
+    b = models.ForeignKey(CircularB)
+
+class Recursive(models.Model):
+   self = models.ForeignKey('self')

south/tests/freezer.py

+import unittest
+
+from south.creator.freezer import model_dependencies
+from south.tests.fakeapp import models
+
+class TestFreezer(unittest.TestCase):
+    def test_dependencies(self):
+        self.assertEqual(set(model_dependencies(models.SubModel)),
+                         set([models.BaseModel, models.Other1, models.Other2]))
+
+        self.assertEqual(set(model_dependencies(models.CircularA)),
+                         set([models.CircularA, models.CircularB, models.CircularC]))
+
+        self.assertEqual(set(model_dependencies(models.Recursive)),
+                         set([models.Recursive]))

south/tests/logic.py

     
     installed_apps = ["fakeapp", "otherfakeapp"]
 
-    def assertListEqual(self, list1, list2):
+    def assertListEqual(self, list1, list2, msg=None):
         list1 = list(list1)
         list2 = list(list2)
         list1.sort()
         list2.sort()
-        return self.assertEqual(list1, list2)
+        return self.assert_(list1 == list2, "%s is not equal to %s" % (list1, list2))
 
     def test_find_ghost_migrations(self):
         pass
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.