Kris Kumler avatar Kris Kumler committed c4406ff

Fix #1230 - MySQL CREATE TABLE storage engine

Comments (0)

Files changed (2)

south/db/generic.py

     add_column_string = 'ALTER TABLE %s ADD COLUMN %s;'
     delete_unique_sql = "ALTER TABLE %s DROP CONSTRAINT %s"
     delete_foreign_key_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s'
+    create_table_sql = 'CREATE TABLE %(table)s (%(columns)s)'
     max_index_name_length = 63
     drop_index_string = 'DROP INDEX %(index_name)s'
     delete_column_string = 'ALTER TABLE %s DROP COLUMN %s CASCADE;'
     rename_table_sql = "ALTER TABLE %s RENAME TO %s;"
     backend_name = None
     default_schema_name = "public"
-    
+
     # Features
     allows_combined_alters = True
     supports_foreign_keys = True
             return self._get_setting('schema')
         except (KeyError, AttributeError):
             return self.default_schema_name
-    
+
     def _possibly_initialise(self):
         if not self._initialised:
             self.connection_init()
         e.g. which storage engine (MySQL) or transaction serialisability level.
         """
         pass
-    
+
     def quote_name(self, name):
         """
         Uses the database backend to quote the given table/column name.
     def _print_sql_error(self, e, sql, params=[]):
         print('FATAL ERROR - The following SQL query failed: %s' % sql, file=sys.stderr)
         print('The error was: %s' % e, file=sys.stderr)
-        
+
     def execute(self, sql, params=[], print_all_errors=True):
         """
         Executes the given SQL statement, with optional parameters.
         If the instance's debug attribute is True, prints out what it executes.
         """
-        
+
         self._possibly_initialise()
-        
+
         cursor = self._get_connection().cursor()
         if self.debug:
             print("   = %s" % sql, params)
             for field_name, field in fields
         ]
 
-        self.execute('CREATE TABLE %s (%s);' % (
-            self.quote_name(table_name),
-            ', '.join([col for col in columns if col]),
-        ))
+        self.execute(self.create_table_sql % {
+            "table": self.quote_name(table_name),
+            "columns": ', '.join([col for col in columns if col]),
+        })
 
     add_table = alias('create_table')  # Alias for consistency's sake
 
             return field.db_type(connection=self._get_connection())
         except TypeError:
             return field.db_type()
-        
+
     def _alter_add_column_mods(self, field, name, params, sqls):
         """
         Subcommand of alter_column that modifies column definitions beyond
         @param name: The name of the column to alter
         @param field: The new field definition to use
         """
-        
+
         if self.dry_run:
             if self.debug:
                 print('   - no dry run output for alter_column() due to dynamic DDL, sorry')
                         'table': self.quote_name(table_name),
                         'constraint': self.quote_name(constraint),
                     })
-        
+
             # Drop all foreign key constraints
             try:
                 self.delete_foreign_key(table_name, name)
 
         # SQLs is a list of (SQL, values) pairs.
         sqls = []
-        
+
         # Only alter the column if it has a type (Geometry ones sometimes don't)
         if params["type"] is not None:
             sqls.append((self.alter_string_set_type % params, []))
-        
+
         # Add any field- and backend- specific modifications
         self._alter_add_column_mods(field, name, params, sqls)
         # Next, nullity
         if not field.null and field.has_default():
             # Final fixes
             self._update_nulls_to_default(params, field)
-            self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), self.alter_string_drop_null % params), [])            
-        
+            self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), self.alter_string_drop_null % params), [])
+
         if not ignore_constraints:
             # Add back FK constraints if needed
             if field.rel and self.supports_foreign_keys:
             sql = field.db_type(connection=self._get_connection())
         except TypeError:
             sql = field.db_type()
-        
+
         if sql:
-            
+
             # Some callers, like the sqlite stuff, just want the extended type.
             if with_name:
                 field_output = [self.quote_name(field.column), sql]
             else:
                 field_output = [sql]
-            
+
             field_output.append('%sNULL' % (not field.null and 'NOT ' or ''))
             if field.primary_key:
                 field_output.append('PRIMARY KEY')
                 # We must specify the index tablespace inline, because we
                 # won't be generating a CREATE INDEX statement for this field.
                 field_output.append(self._get_connection().ops.tablespace_sql(tablespace, inline=True))
-            
+
             sql = ' '.join(field_output)
             sqlparams = ()
             # if the field is "NOT NULL" and a default value is provided, create the column with it
                         # If the default is a callable, then call it!
                         if callable(default):
                             default = default()
-                            
+
                         default = field.get_db_prep_save(default, connection=self._get_connection())
                         default = self._default_value_workaround(default)
                         # Now do some very cheap quoting. TODO: Redesign return values to avoid this.
         if hasattr(field, 'post_create_sql'):
             for stmt in field.post_create_sql(no_style(), table_name):
                 self.add_deferred_sql(stmt)
-        
+
         # In 1.2 and above, you have to ask the DatabaseCreation stuff for it.
         # This also creates normal indexes in 1.1.
         if hasattr(self._get_connection().creation, "sql_indexes_for_field"):
             model = self.mock_model("FakeModelForGISCreation", table_name)
             for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()):
                 self.add_deferred_sql(stmt)
-        
+
         if sql:
             return sql % sqlparams
         else:
             if self.debug:
                 print('   - no dry run output for delete_primary_key() due to dynamic DDL, sorry')
             return
-        
+
         constraints = list(self._constraints_affecting_columns(table_name, None, type="PRIMARY KEY"))
         if not constraints:
             raise ValueError("Cannot find a PRIMARY KEY constraint on table %s" % (table_name,))
-        
+
         for constraint in constraints:
             self.execute(self.delete_primary_key_sql % {
                 "table": self.quote_name(table_name),
                 "constraint": self.quote_name(constraint),
             })
-    
+
     drop_primary_key = alias('delete_primary_key')
 
     @invalidate_table_constraints
         Find all columns of the primary key of the specified table
         """
         db_name = self._get_setting('NAME')
-        
+
         primary_key_columns = set()
         for col, constraints in self.lookup_constraint(db_name, table_name):
             for kind, cname in constraints:
                 if kind == 'PRIMARY KEY':
                     primary_key_columns.add(col.lower())
-                    
+
         return primary_key_columns
 
     def start_transaction(self):
         over all models within the app sending the signal.  This is a
         patch we should push Django to make  For now, this should work.
         """
-        
+
         if self.debug:
             print(" - Sending post_syncdb signal for %s: %s" % (app_label, model_names))
-        
+
         app = models.get_app(app_label)
         if not app:
             return
         if isinstance(field, (models.PositiveSmallIntegerField, models.PositiveIntegerField)):
             return super_result.split(" ", 1)[0]
         return super_result
-        
+
     def _alter_add_positive_check(self, klass, field, name, params, sqls):
         """
         A helper for subclasses overriding _alter_add_column_mods:

south/db/mysql.py

         self._constraint_references = {}
         self._reverse_cache = {}
         super(DatabaseOperations, self).__init__(db_alias)
+        if self._has_setting('STORAGE_ENGINE') and self._get_setting('STORAGE_ENGINE'):
+            self.create_table_sql += ' ENGINE=%s' % self._get_setting('STORAGE_ENGINE')
 
     def _is_valid_cache(self, db_name, table_name):
         cache = self._constraint_cache
         super(DatabaseOperations, self).delete_column(table_name, name)
 
     @invalidate_table_constraints
+    def create_table(self, table_name, fields):
+        super(DatabaseOperations, self).create_table(table_name, fields)
+
+    @invalidate_table_constraints
     def rename_table(self, old_table_name, table_name):
         super(DatabaseOperations, self).rename_table(old_table_name,
                 table_name)
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.