Commits

Andrew Godwin committed 8969081 Merge

Merging in development up to 0.6.2

  • Participants
  • Parent commits 8090e35, 554530f
  • Branches 0.7

Comments (0)

Files changed (19)

 # Ignore dist builds and egg info
 dist/*
 South.egg-info/*
+# Ignore test log
+south/tests/test.log
 35f3f8efa71821515027276919ccd38e1e5f473c migration-refactor
 0ea73a5bb1f67b2c2718c7832065551085dec97e stableish
 2d053731761e7639f7be11fbdefc8d8da2c1bae2 stableish
+751ee7d091f942ca175c870dbbf41f2b2e7f1459 stableish
+751ee7d091f942ca175c870dbbf41f2b2e7f1459 stableish
+95eb414d55616a910edbb299274d3ac4d5d85c3d stableish
+95eb414d55616a910edbb299274d3ac4d5d85c3d 0.6.2
+95eb414d55616a910edbb299274d3ac4d5d85c3d stableish
+7dc7f1ee13e779b509505c4d46b50f40512ca358 stableish
+95eb414d55616a910edbb299274d3ac4d5d85c3d 0.6.2
+7dc7f1ee13e779b509505c4d46b50f40512ca358 0.6.2
 #!/usr/bin/python
 
-from setuptools import setup, find_packages
+from distutils.core import setup
 from south import __version__
 
 setup(

File south/__init__.py

 South - Useable migrations for Django apps
 """
 
-__version__ = "0.6.1"
+__version__ = "0.6.2"
 __authors__ = ["Andrew Godwin <andrew@aeracode.org>", "Andy McCurdy <andy@andymccurdy.com>"]

File south/db/generic.py

 import string
 import random
 import re
+import sys
 
 from django.core.management.color import no_style
 from django.db import connection, transaction, models
 from django.conf import settings
 from django.utils.datastructures import SortedDict
 
+from south.logger import get_logger
 
 def alias(attrname):
     """
         if self.debug:
             print "   = %s" % sql, params
 
+        get_logger().debug('south execute "%s" with params "%s"' % (sql, params))
+        
         if self.dry_run:
             return []
 
         #    fields = fields.items()
         #except AttributeError:
         #    pass
+        
+        if len(table_name) > 63:
+            print "   ! WARNING: You have a table name longer than 63 characters; this will not fully work on PostgreSQL or MySQL."
 
         columns = [
             self.column_sql(table_name, field_name, field)
         if self.has_check_constraints:
             check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK")
             for constraint in check_constraints:
-                self.execute(self.delete_check_sql % {'table':table_name, 'constraint': constraint})
+                self.execute(self.delete_check_sql % {'table': qn(table_name), 'constraint': qn(constraint)})
 
         # First, change the type
         params = {
         if not isinstance(columns, (list, tuple)):
             columns = [columns]
         
-        name = self.create_index_name(table_name, columns)
+        name = self.create_index_name(table_name, columns, suffix="_uniq")
         
         cols = ", ".join(map(qn, columns))
         self.execute("ALTER TABLE %s ADD CONSTRAINT %s UNIQUE (%s)" % (qn(table_name), qn(name), cols))
             sqlparams = ()
             # if the field is "NOT NULL" and a default value is provided, create the column with it
             # this allows the addition of a NOT NULL field to a table with existing rows
-            if not field.null and getattr(field, '_suppress_default', True) and field.has_default():
+            if not field.null and not getattr(field, '_suppress_default', False) and field.has_default():
                 default = field.get_default()
                 # If the default is actually None, don't add a default term
                 if default is not None:
                     # Now do some very cheap quoting. TODO: Redesign return values to avoid this.
                     if isinstance(default, basestring):
                         default = "'%s'" % default.replace("'", "''")
-                    elif isinstance(default, datetime.date):
+                    elif isinstance(default, (datetime.date, datetime.time, datetime.datetime)):
                         default = "'%s'" % default
                     sql += " DEFAULT %s"
                     sqlparams = (default)
+            elif (not field.null and field.blank) or ((field.get_default() == '') and (not getattr(field, '_suppress_default', False))):
+                if field.empty_strings_allowed and connection.features.interprets_empty_strings_as_nulls:
+                    sql += " DEFAULT ''"
+                # Error here would be nice, but doesn't seem to play fair.
+                #else:
+                #    raise ValueError("Attempting to add a non null column that isn't character based without an explicit default value.")
 
             if field.rel and self.supports_foreign_keys:
                 self.add_deferred_sql(
 
     def delete_foreign_key(self, table_name, column):
         "Drop a foreign key constraint"
+        qn = connection.ops.quote_name
         if self.dry_run:
             return # We can't look at the DB to get the constraints
         constraints = list(self._constraints_affecting_columns(table_name, [column], "FOREIGN KEY"))
         if not constraints:
             raise ValueError("Cannot find a FOREIGN KEY constraint on table %s, column %s" % (table_name, column))
         for constraint_name in constraints:
-            self.execute(self.delete_foreign_key_sql % (table_name, constraint_name))
+            self.execute(self.delete_foreign_key_sql % (qn(table_name), qn(constraint_name)))
     
     drop_foreign_key = alias('delete_foreign_key')
 
     
-    def create_index_name(self, table_name, column_names):
+    def create_index_name(self, table_name, column_names, suffix=""):
         """
         Generate a unique name for the index
         """
         index_unique_name = ''
+        
         if len(column_names) > 1:
             index_unique_name = '_%x' % abs(hash((table_name, ','.join(column_names))))
         
-        return ('%s_%s%s' % (table_name, column_names[0], index_unique_name))[:self.max_index_name_length]
+        # If the index name is too long, truncate it
+        index_name = ('%s_%s%s%s' % (table_name, column_names[0], index_unique_name, suffix))
+        if len(index_name) > self.max_index_name_length:
+            part = ('_%s%s%s' % (column_names[0], index_unique_name, suffix))
+            index_name = '%s%s' % (table_name[:(self.max_index_name_length-len(part))], part)
+        
+        return index_name
 
 
     def create_index_sql(self, table_name, column_names, unique=False, db_tablespace=''):

File south/db/mysql.py

         This particular override stops us sending DEFAULTs for BLOB/TEXT columns.
         """
         if field.db_type().upper() in ["BLOB", "TEXT", "LONGTEXT"]:
-            field._suppress_default = False
+            field._suppress_default = True
         return field

File south/db/sqlite3.py

 import inspect
+import re
+
 from django.db import connection
+from django.db.models import ForeignKey
+
 from south.db import generic
 
+# from how .schema works as shown on http://www.sqlite.org/sqlite.html
+GET_TABLE_DEF_SQL = """    
+SELECT sql FROM
+       (SELECT * FROM sqlite_master UNION ALL
+        SELECT * FROM sqlite_temp_master)
+    WHERE tbl_name LIKE '%s'
+      AND type!='meta' AND sql NOT NULL AND name NOT LIKE 'sqlite_%%%%'
+    ORDER BY substr(type,2,1), name;"""
+    
 class DatabaseOperations(generic.DatabaseOperations):
 
     """
     # SQLite ignores foreign key constraints. I wish I could.
     supports_foreign_keys = False
     defered_alters = {}
+    def __init__(self):
+        super(DatabaseOperations, self).__init__()
+        # holds fields defintions gotten from the sql schema.  the key is the table name and then
+        # it's a list of 2 item lists.  the two items in the list are fieldname, sql definition
+        self._fields = {}
+
+    def _populate_current_structure(self, table_name, force=False):
+        # get if we don't have it already or are being forced to refresh it
+        if force or not table_name in self._fields.keys():
+            cursor = connection.cursor()
+            cursor.execute(GET_TABLE_DEF_SQL % table_name)
+            create_table = cursor.fetchall()[0][0]
+            first = create_table.find('(')
+            last = create_table.rfind(')')
+            # rip out the CREATE TABLE xxx ( ) and only get the field definitions plus
+            # add the trailing comma to make the next part easier
+            fields_part = create_table[first+1: last] + ','
+            # pull out the field name and definition for each field
+            self._fields[table_name] = re.findall(r'"(\S+?)"(.*?),', fields_part, re.DOTALL)
+        
+    def _rebuild_table(self, table_name, new_fields):
+        """
+        rebuilds the table using the new definitions.  only one change 
+        can be made per call and it must be either a rename, alter or
+        delete
+        """
+        self._populate_current_structure(table_name)
+        
+        current_fields = self._fields[table_name]
+        temp_table_name = '%s_temp' % table_name
+        operation = None
+        changed_field = None
+        
+        if len(current_fields) != len(new_fields):
+            if len(current_fields) - len(new_fields) != 1:
+                raise ValueError('only one field can be deleted at a time, found %s missing fields' % str(len(current_fields) - len(new_fields)))
+            operation = 'delete'
+            current_field_names = [f[0] for f in current_fields]
+            new_field_names = [f[0] for f in new_fields]
+            # find the deleted field
+            for f in current_field_names:
+                if not f in new_field_names:
+                    changed_field = f
+                    break
+        else:
+            found = False
+            for current, new in zip(current_fields, new_fields):
+                if current[0] != new[0]:
+                    if found:
+                        raise ValueError('can only handle one change per call, found more than one')
+                    operation = 'rename'
+                    changed_field = (current[0], new[0])
+                    found = True
+                elif current[1] != new[1]:
+                    if found:
+                        raise ValueError('can only handle one change per call, found more than one')
+                    operation = 'alter'
+                    changed_field = current[0]
+                    found = True
+            if not found:
+                raise ValueError('no changed found')
+        # create new table as temp
+        create = 'CREATE TABLE "%s" ( %s )'
+        fields_sql = ','.join(['"%s" %s' % (f[0], f[1]) for f in new_fields])
+        sql = create % (temp_table_name, fields_sql)
+        
+        cursor = connection.cursor()
+        cursor.execute(sql)
+        
+        # copy over data
+        # rename, redef or delete?
+        if operation in ['rename', 'alter']:
+            sql = 'insert into %s select * from %s' % (temp_table_name, table_name)
+        elif operation == 'delete':
+            new_field_names = ','.join(['"%s"' % f[0] for f in new_fields])
+            sql = 'insert into %s select %s from %s' % (temp_table_name, new_field_names, table_name)
+        cursor.execute(sql)
+                                
+        # remove existing table
+        self.delete_table(table_name)
+        
+        # rename new table
+        self.rename_table(temp_table_name, table_name)
+        
+        # repopulate field info
+        self._populate_current_structure(table_name, force=True)
+
     def _defer_alter_sqlite_table(self, table_name, field_renames={}):
         table_renames = self.defered_alters.get(table_name, {})
         table_renames.update(field_renames)
         # Run ALTER TABLE with no unique column
         unique, field._unique, field.db_index = field.unique, False, False
         # If it's not nullable, and has no default, raise an error (SQLite is picky)
-        if not field.null and (not field.has_default() or field.get_default() is None):
+        if (not field.null and 
+            (not field.has_default() or field.get_default() is None) and
+            not field.empty_strings_allowed):
             raise ValueError("You cannot add a null=False column without a default value.")
+        # Don't try and drop the default, it'll fail
+        kwds['keep_default'] = True
         generic.DatabaseOperations.add_column(self, table_name, name, field, *args, **kwds)
         # If it _was_ unique, make an index on it.
         if unique:
         self.delete_table(temp_name, cascade=False)
     
     def alter_column(self, table_name, name, field, explicit_name=True):
-        
-        raise NotImplementedError("The SQLite backend does not yet support alter_column.")
-        # Do initial setup
-        if hasattr(field, 'south_init'):
-            field.south_init()
-        field.set_attributes_from_name(name)
-        
-        self._defer_alter_sqlite_table(table_name, {name: field.column})
+        self._populate_current_structure(table_name)
+        new_fields = []
+        for field_name, field_def in self._fields[table_name]:
+            if field_name == name:
+                if isinstance(field, ForeignKey):
+                    field_name = name[:-3] # exclude the _id when calling column_sql
+                else:
+                    field_name = name
+                new_fields.append((name, self.column_sql(table_name, field_name, field)))
+            else:
+                new_fields.append((field_name, field_def))
+        self._rebuild_table(table_name, new_fields)
+                
 
     def delete_column(self, table_name, column_name):
-        
-        raise NotImplementedError("The SQLite backend does not yet support delete_column.")
-        self._defer_alter_sqlite_table(table_name)
+        self._populate_current_structure(table_name)
+        new_fields = []
+        for field_name, field_def in self._fields[table_name]:
+            if field_name != column_name:
+                new_fields.append((field_name, field_def))
+        self._rebuild_table(table_name, new_fields)
     
     def rename_column(self, table_name, old, new):
-        self._defer_alter_sqlite_table(table_name, {old: new})
-    
+        self._populate_current_structure(table_name)
+        new_fields = []
+        for field_name, field_def in self._fields[table_name]:
+            if field_name == old:
+                new_fields.append((new, field_def))
+            else:
+                new_fields.append((field_name, field_def))
+        self._rebuild_table(table_name, new_fields)
+            
     # Nor unique creation
     def create_unique(self, table_name, columns):
         """
         self.defered_alters = {}
 
         generic.DatabaseOperations.execute_deferred_sql(self)
+
+    

File south/introspection_plugins/geodjango.py

 
 from south.modelsinspector import add_introspection_rules
 
-
-def has_spatialite():
-    "Checks for the presence of SpataiLite"
-    try:
-        from ctypes.util import find_library
-    except ImportError:
-        return False
-    from django.conf import settings
-    return bool(getattr(settings, 'SPATIALITE_LIBRARY_PATH', find_library('spatialite')))
-
-
-def has_geos():
-    try:
-        from django.contrib.gis.geos import libgeos
-    except (ImportError, OSError):
-        return False
-    else:
-        return True
-
-
-# First, work out if GIS is enabled
-# (If it isn't importing the field will fail)
-has_gis = has_geos() and \
-          ((settings.DATABASE_ENGINE in ["postgresql", "postgresql_psycopg2", "mysql"]) or \
-          (settings.DATABASE_ENGINE == "sqlite3" and has_spatialite()))
+has_gis = "django.contrib.gis" in settings.INSTALLED_APPS
 
 if has_gis:
     # Alright,import the field
                 [],
                 {
                     "srid": ["_srid", {"default": 4326}],
-                    "spatial_index": ["_spatial_index", {"default": True}],
+                    "spatial_index": ["_index", {"default": True}],
                     "dim": ["_dim", {"default": 2}],
                 },
             ),

File south/logger.py

+import sys
+import logging
+from django.conf import settings
+
+class NullHandler(logging.Handler):
+    def emit(self, record):
+        pass
+
+h = NullHandler()
+
+_logger = logging.getLogger("south")
+_logger.addHandler(h)
+_logger.setLevel(logging.DEBUG)
+# TODO: Add a log formatter?
+
+def get_logger():
+    debug_on = getattr(settings, "SOUTH_LOGGING_ON", False)
+    logging_file = getattr(settings, "SOUTH_LOGGING_FILE", False)
+    
+    if debug_on:
+        if logging_file:
+            _logger.addHandler( logging.FileHandler(logging_file) )
+            _logger.setLevel(logging.DEBUG)
+        else:
+            raise IOError, "SOUTH_LOGGING_ON is True. You also need a SOUTH_LOGGING_FILE setting."
+    return _logger

File south/management/commands/convert_to_south.py

         
         print 
         print "App '%s' converted. Note that South assumed the application's models matched the database" % app
-        print "(i.e. you haven't changed it since last syncdb); if you have, you should delete the %s/migrations"
+        print "(i.e. you haven't changed it since last syncdb); if you have, you should delete the %s/migrations" % app
         print "directory, revert models.py so it matches the database, and try again."

File south/management/commands/startmigration.py

             help='Generate a Create Table migration for the specified model.  Add multiple models to this migration with subsequent --model parameters.'),
         make_option('--add-field', action='append', dest='added_field_list', type='string',
             help='Generate an Add Column migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
+        make_option('--add-index', action='append', dest='added_index_list', type='string',
+            help='Generate an Add Index migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
         make_option('--initial', action='store_true', dest='initial', default=False,
             help='Generate the initial schema for the app.'),
         make_option('--auto', action='store_true', dest='auto', default=False,
     help = "Creates a new template migration for the given app"
     usage_str = "Usage: ./manage.py startmigration appname migrationname [--initial] [--auto] [--model ModelName] [--add-field ModelName.field_name] [--freeze] [--stdout]"
     
-    def handle(self, app=None, name="", added_model_list=None, added_field_list=None, initial=False, freeze_list=None, auto=False, stdout=False, **options):
+    def handle(self, app=None, name="", added_model_list=None, added_field_list=None, initial=False, freeze_list=None, auto=False, stdout=False, added_index_list=None, **options):
         
         # Any supposed lists that are None become empty lists
         added_model_list = added_model_list or []
         added_field_list = added_field_list or []
-        
+        added_index_list = added_index_list or []
+
         # --stdout means name = -
         if stdout:
             name = "-"
                 pass
         
         # Make the new filename
-        new_filename = "%04i%s_%s.py" % (
+        new_filename = "%04i_%s.py" % (
             highest_number + 1,
-            "".join([random.choice(string.letters.lower()) for i in range(0)]), # Possible random stuff insertion
             name,
         )
         
         changed_fields = [] # (mkey, fname, old_def, new_def)
         added_uniques = set() # (mkey, field_names)
         deleted_uniques = set() # (mkey, field_names)
+
+        added_indexes = set()
+        deleted_indexes = []
+        
         
         # --initial means 'add all models in this app'.
         if initial:
             else:
                 added_fields.add(("%s.%s" % (app, modelname), fieldname))
         
+        # same thing as above, but for indexes
+        for fielddef in added_index_list:
+            try:
+                modelname, fieldname = fielddef.split(".", 1)
+            except ValueError:
+                print "The field specification '%s' is not in modelname.fieldname format." % fielddef
+            else:
+                added_indexes.add(("%s.%s" % (app, modelname), fieldname))
+        
         # Add anything frozen (I almost called the dict Iceland...)
         if freeze_list:
             for item in freeze_list:
             # Add items to the todo lists
             added_models.update(am)
             added_fields.update(af)
-            changed_fields.extend(cf)
+            changed_fields.extend([(m, fn, ot, nt, last_orm) for m, fn, ot, nt in cf])
             
             # Deleted models are from the past, and so we use instances instead.
             for mkey in dm:
                 model._meta.app_label,
                 model._meta.object_name,
             )
-        
+
+        ### Added indexes. going here, since it might add to added_uniques ###
+        for mkey, field_name in added_indexes:
+            # Get the model
+            model = model_unkey(mkey)
+            # Get the field
+            try:
+                field = model._meta.get_field(field_name)
+            except FieldDoesNotExist:
+                print "Model '%s' doesn't have a field '%s'" % (mkey, field_name)
+                return
+
+            if field.unique:
+                ut = (mkey, (field.name,))
+                added_uniques.add(ut)
+
+            elif field.db_index:
+                # Create migrations
+                forwards += CREATE_INDEX_SNIPPET % (
+                    model._meta.object_name,
+                    field.name,
+                    model._meta.db_table,
+                    field.name,
+                )
+
+                backwards += DELETE_INDEX_SNIPPET % (
+                    model._meta.object_name,
+                    field.name,
+                    model._meta.db_table,
+                    field.column,
+                )
+                print " + Added index for '%s.%s'" % (mkey, field_name)
+
+            else:
+                print "Field '%s.%s' does not have db_index or unique set to True" % (mkey, field_name)
+                return
         
         ### Changed fields ###
-        for mkey, field_name, old_triple, new_triple in changed_fields:
+        for mkey, field_name, old_triple, new_triple, last_orm in changed_fields:
             
             model = model_unkey(mkey)
+            
             old_def = triples_to_defs(app, model, {
                 field_name: old_triple,
             })[field_name]
                 field_name: new_triple,
             })[field_name]
             
-            # We need to create the field, to see if it needs _id, or if it's an M2M
+            # We need to create the fields, to see if it needs _id, or if it's an M2M
             field = model._meta.get_field_by_name(field_name)[0]
+            old_field = last_orm[mkey + ":" + field_name]
+            
+            if field.column != old_field.column:
+                forwards += RENAME_COLUMN_SNIPPET % {
+                    "field_name": field_name,
+                    "old_column": old_field.column,
+                    "new_column": field.column,
+                }
             
             if hasattr(field, "m2m_db_table"):
                 # See if anything has ACTUALLY changed
                 field.get_attname(),
                 "orm[%r]" % (mkey + ":" + field.name),
             )
+            
+            if field.column != old_field.column:
+                backwards += RENAME_COLUMN_SNIPPET % {
+                    "field_name": field_name,
+                    "old_column": field.column,
+                    "new_column": old_field.column,
+                }
         
         
         ### Added unique_togethers ###
         
         # Fill out frozen model definitions
         for model, last_models in frozen_models.items():
+            if hasattr(model._meta, "proxy") and model._meta.proxy:
+                model = model._meta.proxy_for_model
             all_models[model_key(model)] = prep_for_freeze(model, last_models)
         
         # Do some model cleanup, and warnings
 
 
 USELESS_KEYWORDS = ["choices", "help_text", "upload_to", "verbose_name"]
-USELESS_DB_KEYWORDS = ["related_name"] # Important for ORM, not for DB.
+USELESS_DB_KEYWORDS = ["related_name", "default"] # Important for ORM, not for DB.
 
 def remove_useless_attributes(field, db=False):
     "Removes useless (for database) attributes from the field's defn."
         # Deleting unique_together for [%s] on %s.
         db.delete_unique(%r, %r)
         '''
+RENAME_COLUMN_SNIPPET = '''
+        # Renaming column for field '%(field_name)s'.
+        db.rename_column(%(old_column)r, %(new_column)r)
+        '''
 FIELD_NEEDS_DEF_SNIPPET = "<< PUT FIELD DEFINITION HERE >>"
+
+CREATE_INDEX_SNIPPET = '''
+        # Adding index on '%s.%s'
+        db.create_index(%r, [%r])
+        '''
+DELETE_INDEX_SNIPPET = '''
+        # Deleting index on '%s.%s'
+        db.delete_index(%r, [%r])
+        '''

File south/migration/__init__.py

File contents unchanged.

File south/modelsinspector.py

 from south.utils import get_attribute
 
 from django.db import models
-from django.db.models.base import ModelBase
+from django.db.models.base import ModelBase, Model
 from django.db.models.fields import NOT_PROVIDED
 from django.conf import settings
 from django.utils.functional import Promise
     attrname, options = descriptor
     value = get_attribute(field, attrname)
     # Lazy-eval functions get eval'd.
-    # Annoyingly, we can't do an isinstance() test
     if isinstance(value, Promise):
         value = unicode(value)
     # If the value is the same as the default, omit it for clarity
         default_value = format % tuple(map(lambda x: get_attribute(field, x), attrs))
         if value == default_value:
             raise IsDefault
+    # Callables get called.
+    if callable(value) and not isinstance(value, ModelBase):
+        # Datetime.datetime.now is special, as we can access it from the eval
+        # context (and because it changes all the time; people will file bugs otherwise).
+        if value == datetime.datetime.now:
+            return "datetime.datetime.now"
+        if value == datetime.datetime.utcnow:
+            return "datetime.datetime.utcnow"
+        if value == datetime.date.today:
+            return "datetime.date.today"
+        # All other callables get called.
+        value = value()
     # Models get their own special repr()
     if isinstance(value, ModelBase):
         # If it's a proxy model, follow it back to its non-proxy parent
         if getattr(value._meta, "proxy", False):
             value = value._meta.proxy_for_model
         return "orm['%s.%s']" % (value._meta.app_label, value._meta.object_name)
-    # Callables get called.
-    elif callable(value):
-        # Datetime.datetime.now is special, as we can access it from the eval
-        # context (and because it changes all the time; people will file bugs otherwise).
-        if value == datetime.datetime.now:
-            return "datetime.datetime.now"
-        if value == datetime.date.today:
-            return "datetime.date.today"
-        # All other callables get called.
-        value = value()
+    # As do model instances
+    if isinstance(value, Model):
+        return "orm['%s.%s'].objects.get(pk=%r)" % (value.__class__._meta.app_label, value.__class__._meta.object_name, value.pk)
     # Now, apply the converter func if there is one
     if "converter" in options:
         value = options['converter'](value)
     # Now, ask the parser to have a look at this model too.
     try:
         parser_fields = modelsparser.get_model_fields(model, m2m) or {}
-    except TypeError: # Almost certainly a not-real module
+    except (TypeError, IndentationError): # Almost certainly a not-real module
         parser_fields = {}
     
     # Now, go through all the fields and try to get their definition
     return meta_def
 
 # Now, load the built-in South introspection plugins
-import south.introspection_plugins
+import south.introspection_plugins

File south/modelsparser.py

 
 def get_model_tree(model):
     # Get the source of the model's file
-    source = open(inspect.getsourcefile(model)).read().replace("\r\n", "\n").replace("\r","\n") + "\n"
+    try:
+        source = inspect.getsource(model).replace("\r\n", "\n").replace("\r","\n") + "\n"
+    except IOError:
+        return None
     tree = STTree(parser.suite(source).totuple())
     # Now, we have to find it
     for poss in tree.find("compound_stmt"):

File south/tests/__init__.py

 
 import unittest
+import os
+import sys
 from django.conf import settings
 from south.hacks import hacks
 
+# Add the tests directory so fakeapp is on sys.path
+test_root = os.path.dirname(__file__)
+sys.path.append(test_root)
+
 # Note: the individual test files are imported below this.
 
 class Monkeypatcher(unittest.TestCase):
     from south.tests.db import *
     from south.tests.logic import *
     from south.tests.autodetection import *
+    from south.tests.logger import *
+    from south.tests.inspector import *

File south/tests/db.py

         Test = db.mock_model(model_name='Test', db_table='test5a',
                              db_tablespace='', pk_field_name='ID',
                              pk_field_type=models.AutoField, pk_field_args=[])
-        cursor = connection.cursor()
         db.start_transaction()
         db.create_table("test5a", [('ID', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True))])
         db.create_table("test5b", [
         db.add_column("test4", "add1", models.IntegerField(default=3), keep_default=False)
         # Add a FK with keep_default=False (#69)
         User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
-        db.add_column("test4", "user", models.ForeignKey(User), keep_default=False)
+        db.add_column("test4", "user", models.ForeignKey(User, null=True), keep_default=False)
         db.delete_column("test4", "add1")
         
         db.rollback_transaction()
             self.fail("Could insert non-unique pair.")
         db.delete_unique("test_unique", ["spam", "eggs", "ham_id"])
     
+    def test_capitalised_constraints(self):
+        """
+        Under PostgreSQL at least, capitalised constrains must be quoted.
+        """
+        db.start_transaction()
+        try:
+            db.create_table("test_capconst", [
+                ('SOMECOL', models.PositiveIntegerField(primary_key=True)),
+            ])
+            # Alter it so it's not got the check constraint
+            db.alter_column("test_capconst", "SOMECOL", models.IntegerField())
+        finally:
+            db.rollback_transaction()
+    
+    def test_text_default(self):
+        """
+        MySQL cannot have blank defaults on TEXT columns.
+        """
+        db.start_transaction()
+        try:
+            db.create_table("test_textdef", [
+                ('textcol', models.TextField(blank=True)),
+            ])
+        finally:
+            db.rollback_transaction()
+    
     def test_add_unique_fk(self):
         """
         Test adding a ForeignKey with unique=True or a OneToOneField

File south/tests/inspector.py

+import unittest
+
+from south.tests import Monkeypatcher
+from south.modelsinspector import *
+from fakeapp.models import HorribleModel
+
+class TestModelInspector(Monkeypatcher):
+
+    """
+    Tests if the various parts of the modelinspector work.
+    """
+    
+    def test_get_value(self):
+        
+        # Let's start nicely.
+        name = HorribleModel._meta.get_field_by_name("name")[0]
+        slug = HorribleModel._meta.get_field_by_name("slug")[0]
+        user = HorribleModel._meta.get_field_by_name("user")[0]
+        
+        # Simple int retrieval
+        self.assertEqual(
+            get_value(name, ["max_length", {}]),
+            "255",
+        )
+        
+        # Bool retrieval
+        self.assertEqual(
+            get_value(slug, ["unique", {}]),
+            "True",
+        )
+        
+        # String retrieval
+        self.assertEqual(
+            get_value(user, ["rel.related_name", {}]),
+            "'horribles'",
+        )
+        
+        # Default triggering
+        self.assertEqual(
+            get_value(slug, ["unique", {"default": False}]),
+            "True",
+        )
+        self.assertRaises(
+            IsDefault,
+            get_value,
+            slug,
+            ["unique", {"default": True}],
+        )
+    

File south/tests/logger.py

+import os
+import unittest
+from django.conf import settings
+from django.db import connection, models
+
+from south.db import db
+
+# 
+# # Create a list of error classes from the various database libraries
+# errors = []
+# try:
+#     from psycopg2 import ProgrammingError
+#     errors.append(ProgrammingError)
+# except ImportError:
+#     pass
+# errors = tuple(errors)
+
+class TestLogger(unittest.TestCase):
+
+    """
+    Tests if the various logging functions.
+    """
+    def setUp(self):
+        db.debug = False
+        self.test_path = os.path.join(os.path.dirname(__file__),"test.log")
+        
+    def test_db_execute_logging_nofile(self):
+        """ Does logging degrade nicely if SOUTH_DEBUG_ON not set?
+        """
+        settings.SOUTH_LOGGING_ON = False     # this needs to be set to False
+                                              # to avoid issues where other tests
+                                              # set this to True. settings is shared
+                                              # between these tests.
+        db.create_table("test9", [('email_confirmed', models.BooleanField(default=False))])
+        
+    def test_db_execute_logging_validfile(self):
+        """ Does logging work when passing in a valid file?
+        """
+        settings.SOUTH_LOGGING_ON = True
+        settings.SOUTH_LOGGING_FILE = self.test_path
+        db.create_table("test10", [('email_confirmed', models.BooleanField(default=False))])
+        
+        # remove the test log file
+        os.remove(self.test_path) 
+
+    def test_db_execute_logging_missingfilename(self):
+        """ Does logging raise an error if there is a missing filename?
+        """
+        settings.SOUTH_LOGGING_ON = True
+        settings.SOUTH_LOGGING_FILE = None
+        self.assertRaises(IOError,
+            db.create_table, "test11", [('email_confirmed', models.BooleanField(default=False))])
+        
+        

File south/tests/logic.py

 from south.models import MigrationHistory
 from south.tests import Monkeypatcher
 
-# Add the tests directory so fakeapp is on sys.path
-test_root = os.path.dirname(__file__)
-sys.path.append(test_root)
-
-
 class TestMigration(Monkeypatcher):
     installed_apps = ["fakeapp", "otherfakeapp", "brokenapp"]