Commits

Mike Bayer committed b91c1cc Merge

- an enormous merge just because I committed a one line log entry. the joy of DVCS

Comments (0)

Files changed (134)

lib/sqlalchemy/connectors/__init__.py

 
 class Connector(object):
     pass
-

lib/sqlalchemy/connectors/mxodbc.py

 
 from . import Connector
 
+
 class MxODBCConnector(Connector):
-    driver='mxodbc'
+    driver = 'mxodbc'
 
     supports_sane_multi_rowcount = False
     supports_unicode_statements = True
         elif platform == 'darwin':
             from mx.ODBC import iODBC as module
         else:
-            raise ImportError, "Unrecognized platform for mxODBC import"
+            raise ImportError("Unrecognized platform for mxODBC import")
         return module
 
     @classmethod
         emit Python standard warnings.
         """
         from mx.ODBC.Error import Warning as MxOdbcWarning
+
         def error_handler(connection, cursor, errorclass, errorvalue):
-
             if issubclass(errorclass, MxOdbcWarning):
                 errorclass.__bases__ = (Warning,)
                 warnings.warn(message=str(errorvalue),
             return True
 
     def do_executemany(self, cursor, statement, parameters, context=None):
-        cursor.executemany(statement, parameters, direct=self._get_direct(context))
+        cursor.executemany(
+            statement, parameters, direct=self._get_direct(context))
 
     def do_execute(self, cursor, statement, parameters, context=None):
         cursor.execute(statement, parameters, direct=self._get_direct(context))

lib/sqlalchemy/connectors/mysqldb.py

 from .. import exc, log, schema, sql, types as sqltypes, util, processors
 import re
 
+
 # the subclassing of Connector by all classes
 # here is not strictly necessary
 
+
 class MySQLDBExecutionContext(Connector):
 
     @property
         else:
             return self.cursor.rowcount
 
+
 class MySQLDBCompiler(Connector):
     def visit_mod_binary(self, binary, operator, **kw):
         return self.process(binary.left, **kw) + " %% " + \
     def post_process_text(self, text):
         return text.replace('%', '%%')
 
+
 class MySQLDBIdentifierPreparer(Connector):
 
     def _escape_identifier(self, value):
         value = value.replace(self.escape_quote, self.escape_to_quote)
         return value.replace("%", "%%")
 
+
 class MySQLDBConnector(Connector):
     driver = 'mysqldb'
     supports_unicode_statements = False
         # query string.
 
         ssl = {}
-        for key in ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher']:
+        keys = ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher']
+        for key in keys:
             if key in opts:
                 ssl[key[4:]] = opts[key]
                 util.coerce_kw_type(ssl, key[4:], str)
                     "combination of MySQL server and MySQL-python. "
                     "MySQL-python >= 1.2.2 is recommended.  Assuming latin1.")
                 return 'latin1'
-

lib/sqlalchemy/connectors/pyodbc.py

 import re
 import urllib
 
+
 class PyODBCConnector(Connector):
-    driver='pyodbc'
+    driver = 'pyodbc'
 
     supports_sane_multi_rowcount = False
     # PyODBC unicode is broken on UCS-4 builds
             dsn_connection = 'dsn' in keys or \
                             ('host' in keys and 'database' not in keys)
             if dsn_connection:
-                connectors= ['dsn=%s' % (keys.pop('host', '') or \
+                connectors = ['dsn=%s' % (keys.pop('host', '') or \
                             keys.pop('dsn', ''))]
             else:
                 port = ''
                 connectors = ["DRIVER={%s}" %
                                 keys.pop('driver', self.pyodbc_driver_name),
                               'Server=%s%s' % (keys.pop('host', ''), port),
-                              'Database=%s' % keys.pop('database', '') ]
+                              'Database=%s' % keys.pop('database', '')]
 
             user = keys.pop("user", None)
             if user:
                 connectors.append("AutoTranslate=%s" %
                                     keys.pop("odbc_autotranslate"))
 
-            connectors.extend(['%s=%s' % (k,v) for k,v in keys.iteritems()])
-        return [[";".join (connectors)], connect_args]
+            connectors.extend(['%s=%s' % (k, v) for k, v in keys.iteritems()])
+        return [[";".join(connectors)], connect_args]
 
     def is_disconnect(self, e, connection, cursor):
         if isinstance(e, self.dbapi.ProgrammingError):
                             ))
 
         if self.freetds:
-            self.freetds_driver_version = dbapi_con.getinfo(pyodbc.SQL_DRIVER_VER)
+            self.freetds_driver_version = dbapi_con.getinfo(
+                pyodbc.SQL_DRIVER_VER)
 
         # the "Py2K only" part here is theoretical.
         # have not tried pyodbc + python3.1 yet.
         # Py2K
-        self.supports_unicode_statements = not self.freetds and not self.easysoft
+        self.supports_unicode_statements = (
+            not self.freetds and not self.easysoft)
         if self._user_supports_unicode_binds is not None:
             self.supports_unicode_binds = self._user_supports_unicode_binds
         else:
-            self.supports_unicode_binds = (not self.freetds or
-                                            self.freetds_driver_version >= '0.91'
-                                            ) and not self.easysoft
+            self.supports_unicode_binds = (
+                not self.freetds or self.freetds_driver_version >= '0.91'
+            ) and not self.easysoft
         # end Py2K
 
         # run other initialization which asks for user name, etc.

lib/sqlalchemy/connectors/zxJDBC.py

 import sys
 from . import Connector
 
+
 class ZxJDBCConnector(Connector):
     driver = 'zxjdbc'
 

lib/sqlalchemy/dialects/__init__.py

 
 from .. import util
 
+
 def _auto_fn(name):
     """default dialect importer.
 
     else:
         return None
 
-registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)
+registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)

lib/sqlalchemy/dialects/firebird/__init__.py

     'TEXT', 'NUMERIC', 'FLOAT', 'TIMESTAMP', 'VARCHAR', 'CHAR', 'BLOB',
     'dialect'
 )
-
-

lib/sqlalchemy/dialects/firebird/base.py

 
 """
 
-import datetime, re
+import datetime
 
 from sqlalchemy import schema as sa_schema
 from sqlalchemy import exc, types as sqltypes, sql, util
         self.charset = charset
         super(_StringType, self).__init__(**kw)
 
+
 class VARCHAR(_StringType, sqltypes.VARCHAR):
     """Firebird VARCHAR type"""
     __visit_name__ = 'VARCHAR'
     def __init__(self, length=None, **kwargs):
         super(VARCHAR, self).__init__(length=length, **kwargs)
 
+
 class CHAR(_StringType, sqltypes.CHAR):
     """Firebird CHAR type"""
     __visit_name__ = 'CHAR'
         return self._extend_string(type_, basic)
 
 
-
 class FBCompiler(sql.compiler.SQLCompiler):
     """Firebird specific idiosyncrasies"""
 
         # get primary key fields
         c = connection.execute(keyqry, ["PRIMARY KEY", tablename])
         pkfields = [self.normalize_name(r['fname']) for r in c.fetchall()]
-        return {'constrained_columns':pkfields, 'name':None}
+        return {'constrained_columns': pkfields, 'name': None}
 
     @reflection.cache
     def get_column_sequence(self, connection,
                     # Redundant
                     defvalue = None
             col_d = {
-                'name' : name,
-                'type' : coltype,
-                'nullable' :  not bool(row['null_flag']),
-                'default' : defvalue,
-                'autoincrement':defvalue is None
+                'name': name,
+                'type': coltype,
+                'nullable': not bool(row['null_flag']),
+                'default': defvalue,
+                'autoincrement': defvalue is None
             }
 
             if orig_colname.lower() == orig_colname:
 
             # if the PK is a single field, try to see if its linked to
             # a sequence thru a trigger
-            if len(pkey_cols)==1 and name==pkey_cols[0]:
+            if len(pkey_cols) == 1 and name == pkey_cols[0]:
                 seq_d = self.get_column_sequence(connection, tablename, name)
                 if seq_d is not None:
                     col_d['sequence'] = seq_d
         tablename = self.denormalize_name(table_name)
 
         c = connection.execute(fkqry, ["FOREIGN KEY", tablename])
-        fks = util.defaultdict(lambda:{
-            'name' : None,
-            'constrained_columns' : [],
-            'referred_schema' : None,
-            'referred_table' : None,
-            'referred_columns' : []
+        fks = util.defaultdict(lambda: {
+            'name': None,
+            'constrained_columns': [],
+            'referred_schema': None,
+            'referred_table': None,
+            'referred_columns': []
         })
 
         for row in c:

lib/sqlalchemy/dialects/firebird/fdb.py

 from .kinterbasdb import FBDialect_kinterbasdb
 from ... import util
 
+
 class FBDialect_fdb(FBDialect_kinterbasdb):
 
     @classmethod
 
         return self._parse_version_info(version)
 
-dialect = FBDialect_fdb
+dialect = FBDialect_fdb

lib/sqlalchemy/dialects/firebird/kinterbasdb.py

                 return value
         return process
 
+
 class FBExecutionContext_kinterbasdb(FBExecutionContext):
     @property
     def rowcount(self):
         else:
             return -1
 
+
 class FBDialect_kinterbasdb(FBDialect):
     driver = 'kinterbasdb'
     supports_sane_rowcount = False
     colspecs = util.update_copy(
         FBDialect.colspecs,
         {
-            sqltypes.Numeric:_FBNumeric_kinterbasdb,
+            sqltypes.Numeric: _FBNumeric_kinterbasdb,
         }
 
     )

lib/sqlalchemy/dialects/informix/__init__.py

 
 from sqlalchemy.dialects.informix import base, informixdb
 
-base.dialect = informixdb.dialect
+base.dialect = informixdb.dialect

lib/sqlalchemy/dialects/informix/base.py

     "xadatasource", "xid", "xload", "xunload", "year"
     ])
 
+
 class InfoDateTime(sqltypes.DateTime):
+
     def bind_processor(self, dialect):
         def process(value):
             if value is not None:
             return value
         return process
 
+
 class InfoTime(sqltypes.Time):
+
     def bind_processor(self, dialect):
         def process(value):
             if value is not None:
         return process
 
 colspecs = {
-    sqltypes.DateTime : InfoDateTime,
+    sqltypes.DateTime: InfoDateTime,
     sqltypes.TIMESTAMP: InfoDateTime,
     sqltypes.Time: InfoTime,
 }
 
 
 ischema_names = {
-    0   : sqltypes.CHAR,       # CHAR
-    1   : sqltypes.SMALLINT, # SMALLINT
-    2   : sqltypes.INTEGER,      # INT
-    3   : sqltypes.FLOAT,      # Float
-    3   : sqltypes.Float,      # SmallFloat
-    5   : sqltypes.DECIMAL,      # DECIMAL
-    6   : sqltypes.Integer,      # Serial
-    7   : sqltypes.DATE,         # DATE
-    8   : sqltypes.Numeric,      # MONEY
-    10  : sqltypes.DATETIME,     # DATETIME
-    11  : sqltypes.LargeBinary,       # BYTE
-    12  : sqltypes.TEXT,         # TEXT
-    13  : sqltypes.VARCHAR,       # VARCHAR
-    15  : sqltypes.NCHAR,       # NCHAR
-    16  : sqltypes.NVARCHAR,       # NVARCHAR
-    17  : sqltypes.Integer,      # INT8
-    18  : sqltypes.Integer,      # Serial8
-    43  : sqltypes.String,       # LVARCHAR
-    -1  : sqltypes.BLOB,       # BLOB
-    -1  : sqltypes.CLOB,         # CLOB
+    0: sqltypes.CHAR,           # CHAR
+    1: sqltypes.SMALLINT,       # SMALLINT
+    2: sqltypes.INTEGER,        # INT
+    3: sqltypes.FLOAT,          # Float
+    3: sqltypes.Float,          # SmallFloat
+    5: sqltypes.DECIMAL,        # DECIMAL
+    6: sqltypes.Integer,        # Serial
+    7: sqltypes.DATE,           # DATE
+    8: sqltypes.Numeric,        # MONEY
+    10: sqltypes.DATETIME,      # DATETIME
+    11: sqltypes.LargeBinary,   # BYTE
+    12: sqltypes.TEXT,          # TEXT
+    13: sqltypes.VARCHAR,       # VARCHAR
+    15: sqltypes.NCHAR,         # NCHAR
+    16: sqltypes.NVARCHAR,      # NVARCHAR
+    17: sqltypes.Integer,       # INT8
+    18: sqltypes.Integer,       # Serial8
+    43: sqltypes.String,        # LVARCHAR
+    -1: sqltypes.BLOB,          # BLOB
+    -1: sqltypes.CLOB,          # CLOB
 }
 
 
     def visit_boolean(self, type_):
         return "SMALLINT"
 
+
 class InfoSQLCompiler(compiler.SQLCompiler):
+
     def default_from(self):
         return " from systables where tabname = 'systables' "
 
             text += "CONSTRAINT %s " % self.preparer.format_constraint(constraint)
         return text
 
+
 class InformixIdentifierPreparer(compiler.IdentifierPreparer):
 
     reserved_words = RESERVED_WORDS
 class InformixDialect(default.DefaultDialect):
     name = 'informix'
 
-    max_identifier_length = 128 # adjusts at runtime based on server version
+    max_identifier_length = 128  # adjusts at runtime based on server version
 
     type_compiler = InfoTypeCompiler
     statement_compiler = InfoSQLCompiler
             if coltype not in (0, 13) and default:
                 default = default.split()[-1]
 
-            if coltype == 6: # Serial, mark as autoincrement
+            if coltype == 6:  # Serial, mark as autoincrement
                 autoincrement = True
 
-            if coltype == 0 or coltype == 13: # char, varchar
+            if coltype == 0 or coltype == 13:  # char, varchar
                 coltype = ischema_names[coltype](collength)
                 if default:
                     default = "'%s'" % default
-            elif coltype == 5: # decimal
+            elif coltype == 5:  # decimal
                 precision, scale = (collength & 0xFF00) >> 8, collength & 0xFF
                 if scale == 255:
                     scale = 0
              t8.idxname
              and t7.tabid = t5.ptabid""", table_name, schema_sel)
 
-
         def fkey_rec():
             return {
-                 'name' : None,
-                 'constrained_columns' : [],
-                 'referred_schema' : None,
-                 'referred_table' : None,
-                 'referred_columns' : []
+                 'name': None,
+                 'constrained_columns': [],
+                 'referred_schema': None,
+                 'referred_table': None,
+                 'referred_columns': []
              }
 
         fkeys = util.defaultdict(fkey_rec)
         colpositions = set()
 
         for row in data:
-            colpos = set([getattr(row, 'part%d' % x) for x in range(1,16)])
+            colpos = set([getattr(row, 'part%d' % x) for x in range(1, 16)])
             colpositions |= colpos
 
         if not len(colpositions):
-            return {'constrained_columns':[], 'name':None}
+            return {'constrained_columns': [], 'name': None}
 
         # Select the column names using the columnpositions
         # TODO: Maybe cache a bit of those col infos (eg select all colnames for one table)
-        place_holder = ','.join('?'*len(colpositions))
+        place_holder = ','.join('?' * len(colpositions))
         c = connection.execute(
             """select t1.colname
             from syscolumns as t1, systables as t2
             table_name, *colpositions
         ).fetchall()
 
-        cols = reduce(lambda x,y: list(x)+list(y), c, [])
-        return {'constrained_columns':cols, 'name':None}
+        cols = reduce(lambda x, y: list(x) + list(y), c, [])
+        return {'constrained_columns': cols, 'name': None}
 
     @reflection.cache
     def get_indexes(self, connection, table_name, schema, **kw):
 
         indexes = []
         for row in c.fetchall():
-            colnames = [getattr(row, 'part%d' % x) for x in range(1,16)]
+            colnames = [getattr(row, 'part%d' % x) for x in range(1, 16)]
             colnames = [x for x in colnames if x]
-            place_holder = ','.join('?'*len(colnames))
+            place_holder = ','.join('?' * len(colnames))
             c = connection.execute(
                 """select t1.colname
                 from syscolumns as t1, systables as t2
                 t1.colno in (%s)""" % place_holder,
                 table_name, *colnames
             ).fetchall()
-            c = reduce(lambda x,y: list(x)+list(y), c, [])
+            c = reduce(lambda x, y: list(x) + list(y), c, [])
             indexes.append({
                 'name': row.idxname,
                 'unique': row.idxtype.lower() == 'u',

lib/sqlalchemy/dialects/informix/informixdb.py

 
 VERSION_RE = re.compile(r'(\d+)\.(\d+)(.+\d+)')
 
+
 class InformixExecutionContext_informixdb(default.DefaultExecutionContext):
+
     def post_exec(self):
         if self.isinsert:
             self._lastrowid = self.cursor.sqlerrd[1]

lib/sqlalchemy/dialects/mssql/__init__.py

     'DATETIME2', 'DATETIMEOFFSET', 'DATE', 'TIME', 'SMALLDATETIME',
     'BINARY', 'VARBINARY', 'BIT', 'REAL', 'IMAGE', 'TIMESTAMP',
     'MONEY', 'SMALLMONEY', 'UNIQUEIDENTIFIER', 'SQL_VARIANT', 'dialect'
-)
+)

lib/sqlalchemy/dialects/mssql/adodbapi.py

 from sqlalchemy.dialects.mssql.base import MSDateTime, MSDialect
 import sys
 
+
 class MSDateTime_adodbapi(MSDateTime):
     def result_processor(self, dialect, coltype):
         def process(value):
     colspecs = util.update_copy(
         MSDialect.colspecs,
         {
-            sqltypes.DateTime:MSDateTime_adodbapi
+            sqltypes.DateTime: MSDateTime_adodbapi
         }
     )
 
 
         connectors = ["Provider=SQLOLEDB"]
         if 'port' in keys:
-            connectors.append ("Data Source=%s, %s" %
+            connectors.append("Data Source=%s, %s" %
                                 (keys.get("host"), keys.get("port")))
         else:
-            connectors.append ("Data Source=%s" % keys.get("host"))
-        connectors.append ("Initial Catalog=%s" % keys.get("database"))
+            connectors.append("Data Source=%s" % keys.get("host"))
+        connectors.append("Initial Catalog=%s" % keys.get("database"))
         user = keys.get("user")
         if user:
             connectors.append("User Id=%s" % user)
             connectors.append("Password=%s" % keys.get("password", ""))
         else:
             connectors.append("Integrated Security=SSPI")
-        return [[";".join (connectors)], {}]
+        return [[";".join(connectors)], {}]
 
     def is_disconnect(self, e, connection, cursor):
         return isinstance(e, self.dbapi.adodbapi.DatabaseError) and \

lib/sqlalchemy/dialects/mssql/base.py

      'writetext',
     ])
 
+
 class REAL(sqltypes.REAL):
     __visit_name__ = 'REAL'
 
         kw['precision'] = 24
         super(REAL, self).__init__(**kw)
 
+
 class TINYINT(sqltypes.Integer):
     __visit_name__ = 'TINYINT'
 
         return process
 
     _reg = re.compile(r"(\d+)-(\d+)-(\d+)")
+
     def result_processor(self, dialect, coltype):
         def process(value):
             if isinstance(value, datetime.datetime):
                 return value
         return process
 
+
 class TIME(sqltypes.TIME):
     def __init__(self, precision=None, **kwargs):
         self.precision = precision
         return process
 
     _reg = re.compile(r"(\d+):(\d+):(\d+)(?:\.(\d{0,6}))?")
+
     def result_processor(self, dialect, coltype):
         def process(value):
             if isinstance(value, datetime.datetime):
         return process
 _MSTime = TIME
 
+
 class _DateTimeBase(object):
     def bind_processor(self, dialect):
         def process(value):
                 return value
         return process
 
+
 class _MSDateTime(_DateTimeBase, sqltypes.DateTime):
     pass
 
+
 class SMALLDATETIME(_DateTimeBase, sqltypes.DateTime):
     __visit_name__ = 'SMALLDATETIME'
 
+
 class DATETIME2(_DateTimeBase, sqltypes.DateTime):
     __visit_name__ = 'DATETIME2'
 
     def __init__(self, precision=None, **kwargs):
         self.precision = precision
 
+
 class _StringType(object):
     """Base for MSSQL string types."""
 
     def __init__(self, collation=None):
         self.collation = collation
 
+
 class TEXT(_StringType, sqltypes.TEXT):
     """MSSQL TEXT type, for variable-length text up to 2^31 characters."""
 
         _StringType.__init__(self, collation)
         sqltypes.Text.__init__(self, length, **kw)
 
+
 class NTEXT(_StringType, sqltypes.UnicodeText):
     """MSSQL NTEXT type, for variable-length unicode text up to 2^30
     characters."""
         _StringType.__init__(self, collation)
         sqltypes.VARCHAR.__init__(self, length, **kw)
 
+
 class NVARCHAR(_StringType, sqltypes.NVARCHAR):
     """MSSQL NVARCHAR type.
 
         _StringType.__init__(self, collation)
         sqltypes.NVARCHAR.__init__(self, length, **kw)
 
+
 class CHAR(_StringType, sqltypes.CHAR):
     """MSSQL CHAR type, for fixed-length non-Unicode data with a maximum
     of 8,000 characters."""
         _StringType.__init__(self, collation)
         sqltypes.CHAR.__init__(self, length, **kw)
 
+
 class NCHAR(_StringType, sqltypes.NCHAR):
     """MSSQL NCHAR type.
 
         _StringType.__init__(self, collation)
         sqltypes.NCHAR.__init__(self, length, **kw)
 
+
 class IMAGE(sqltypes.LargeBinary):
     __visit_name__ = 'IMAGE'
 
+
 class BIT(sqltypes.TypeEngine):
     __visit_name__ = 'BIT'
 
 class MONEY(sqltypes.TypeEngine):
     __visit_name__ = 'MONEY'
 
+
 class SMALLMONEY(sqltypes.TypeEngine):
     __visit_name__ = 'SMALLMONEY'
 
+
 class UNIQUEIDENTIFIER(sqltypes.TypeEngine):
     __visit_name__ = "UNIQUEIDENTIFIER"
 
+
 class SQL_VARIANT(sqltypes.TypeEngine):
     __visit_name__ = 'SQL_VARIANT'
 
 MSVariant = SQL_VARIANT
 
 ischema_names = {
-    'int' : INTEGER,
+    'int': INTEGER,
     'bigint': BIGINT,
-    'smallint' : SMALLINT,
-    'tinyint' : TINYINT,
-    'varchar' : VARCHAR,
-    'nvarchar' : NVARCHAR,
-    'char' : CHAR,
-    'nchar' : NCHAR,
-    'text' : TEXT,
-    'ntext' : NTEXT,
-    'decimal' : DECIMAL,
-    'numeric' : NUMERIC,
-    'float' : FLOAT,
-    'datetime' : DATETIME,
-    'datetime2' : DATETIME2,
-    'datetimeoffset' : DATETIMEOFFSET,
+    'smallint': SMALLINT,
+    'tinyint': TINYINT,
+    'varchar': VARCHAR,
+    'nvarchar': NVARCHAR,
+    'char': CHAR,
+    'nchar': NCHAR,
+    'text': TEXT,
+    'ntext': NTEXT,
+    'decimal': DECIMAL,
+    'numeric': NUMERIC,
+    'float': FLOAT,
+    'datetime': DATETIME,
+    'datetime2': DATETIME2,
+    'datetimeoffset': DATETIMEOFFSET,
     'date': DATE,
     'time': TIME,
-    'smalldatetime' : SMALLDATETIME,
-    'binary' : BINARY,
-    'varbinary' : VARBINARY,
+    'smalldatetime': SMALLDATETIME,
+    'binary': BINARY,
+    'varbinary': VARBINARY,
     'bit': BIT,
-    'real' : REAL,
-    'image' : IMAGE,
+    'real': REAL,
+    'image': IMAGE,
     'timestamp': TIMESTAMP,
     'money': MONEY,
     'smallmoney': SMALLMONEY,
         return self._extend("TEXT", type_)
 
     def visit_VARCHAR(self, type_):
-        return self._extend("VARCHAR", type_,
-                    length = type_.length or 'max')
+        return self._extend("VARCHAR", type_, length=type_.length or 'max')
 
     def visit_CHAR(self, type_):
         return self._extend("CHAR", type_)
         return self._extend("NCHAR", type_)
 
     def visit_NVARCHAR(self, type_):
-        return self._extend("NVARCHAR", type_,
-                    length = type_.length or 'max')
+        return self._extend("NVARCHAR", type_, length=type_.length or 'max')
 
     def visit_date(self, type_):
         if self.dialect.server_version_info < MS_2008_VERSION:
     def visit_SQL_VARIANT(self, type_):
         return 'SQL_VARIANT'
 
+
 class MSExecutionContext(default.DefaultExecutionContext):
     _enable_identity_insert = False
     _select_lastrowid = False
         else:
             return engine.ResultProxy(self)
 
+
 class MSSQLCompiler(compiler.SQLCompiler):
     returning_precedes_values = True
 
                                     fromhints=from_hints, **kw)
                     for t in [from_table] + extra_froms)
 
+
 class MSSQLStrictCompiler(MSSQLCompiler):
     """A subclass of MSSQLCompiler which disables the usage of bind
     parameters where not allowed natively by MS-SQL.
             return super(MSSQLStrictCompiler, self).\
                                 render_literal_value(value, type_)
 
+
 class MSDDLCompiler(compiler.DDLCompiler):
     def get_column_specification(self, column, **kwargs):
         colspec = (self.preparer.format_column(column) + " "
         result = '.'.join([self.quote(x, force) for x in schema.split('.')])
         return result
 
+
 def _db_plus_owner_listing(fn):
     def wrap(dialect, connection, schema=None, **kw):
         dbname, owner = _owner_plus_db(dialect, schema)
                             dbname, owner, schema, **kw)
     return update_wrapper(wrap, fn)
 
+
 def _db_plus_owner(fn):
     def wrap(dialect, connection, tablename, schema=None, **kw):
         dbname, owner = _owner_plus_db(dialect, schema)
                             tablename, dbname, owner, schema, **kw)
     return update_wrapper(wrap, fn)
 
+
 def _switch_db(dbname, connection, fn, *arg, **kw):
     if dbname:
         current_db = connection.scalar("select db_name()")
         if dbname:
             connection.execute("use %s" % current_db)
 
+
 def _owner_plus_db(dialect, schema):
     if not schema:
         return None, dialect.default_schema_name
     else:
         return None, schema
 
+
 class MSDialect(default.DefaultDialect):
     name = 'mssql'
     supports_default_values = True
     schema_name = "dbo"
 
     colspecs = {
-        sqltypes.DateTime : _MSDateTime,
-        sqltypes.Date : _MSDate,
-        sqltypes.Time : TIME,
+        sqltypes.DateTime: _MSDateTime,
+        sqltypes.Date: _MSDate,
+        sqltypes.Time: TIME,
     }
 
     ischema_names = ischema_names
                 "behaviors may not function properly.   If using ODBC "
                 "with FreeTDS, ensure server version 7.0 or 8.0, not 4.2, "
                 "is configured in the FreeTDS configuration." %
-                ".".join(str(x) for x in self.server_version_info) )
+                ".".join(str(x) for x in self.server_version_info))
         if self.server_version_info >= MS_2005_VERSION and \
                     'implicit_returning' not in self.__dict__:
             self.implicit_returning = True
                     sql.bindparam('schname', owner,
                                     sqltypes.String(convert_unicode=True))
                 ],
-                typemap = {
+                typemap={
                     'name': sqltypes.Unicode()
                 }
             )
                             sql.bindparam('schname', owner,
                                     sqltypes.String(convert_unicode=True))
                         ],
-                        typemap = {
-                            'name': sqltypes.Unicode()
-                        }
+                        typemap={'name': sqltypes.Unicode()}
                         ),
             )
         for row in rp:
 
                 coltype = coltype(**kwargs)
             cdict = {
-                'name' : name,
-                'type' : coltype,
-                'nullable' : nullable,
-                'default' : default,
-                'autoincrement':False,
+                'name': name,
+                'type': coltype,
+                'nullable': nullable,
+                'default': default,
+                'autoincrement': False,
             }
             cols.append(cdict)
         # autoincrement and identity
                                                 RR.c.unique_constraint_name,
                                 C.c.ordinal_position == R.c.ordinal_position
                                 ),
-                       order_by= [
-                                    RR.c.constraint_name,
-                                    R.c.ordinal_position])
-
+                       order_by=[RR.c.constraint_name, R.c.ordinal_position]
+        )
 
         # group rows by constraint ID, to handle multi-column FKs
         fkeys = []
             remote_cols.append(rcol)
 
         return fkeys.values()
-

lib/sqlalchemy/dialects/mssql/information_schema.py

 
 ischema = MetaData()
 
+
 class CoerceUnicode(TypeDecorator):
     impl = Unicode
 
     Column("CHECK_OPTION", String, key="check_option"),
     Column("IS_UPDATABLE", String, key="is_updatable"),
     schema="INFORMATION_SCHEMA")
-

lib/sqlalchemy/dialects/mssql/mxodbc.py

     """Include pyodbc's numeric processor.
     """
 
+
 class _MSDate_mxodbc(_MSDate):
     def bind_processor(self, dialect):
         def process(value):
                 return None
         return process
 
+
 class _MSTime_mxodbc(_MSTime):
     def bind_processor(self, dialect):
         def process(value):
                 return None
         return process
 
+
 class MSExecutionContext_mxodbc(MSExecutionContext_pyodbc):
     """
     The pyodbc execution context is useful for enabling
     #       is really only being used in cases where OUTPUT
     #       won't work.
 
+
 class MSDialect_mxodbc(MxODBCConnector, MSDialect):
 
     # this is only needed if "native ODBC" mode is used,
     _need_decimal_fix = True
 
     colspecs = {
-        sqltypes.Numeric : _MSNumeric_mxodbc,
-        sqltypes.DateTime : _MSDateTime,
-        sqltypes.Date : _MSDate_mxodbc,
-        sqltypes.Time : _MSTime_mxodbc,
+        sqltypes.Numeric: _MSNumeric_mxodbc,
+        sqltypes.DateTime: _MSDateTime,
+        sqltypes.Date: _MSDate_mxodbc,
+        sqltypes.Time: _MSTime_mxodbc,
     }
 
-
     def __init__(self, description_encoding=None, **params):
         super(MSDialect_mxodbc, self).__init__(**params)
         self.description_encoding = description_encoding
 
 dialect = MSDialect_mxodbc
-

lib/sqlalchemy/dialects/mssql/pymssql.py

 from ... import types as sqltypes, util, processors
 import re
 
+
 class _MSNumeric_pymssql(sqltypes.Numeric):
     def result_processor(self, dialect, type_):
         if not self.asdecimal:
         else:
             return sqltypes.Numeric.result_processor(self, dialect, type_)
 
+
 class MSDialect_pymssql(MSDialect):
     supports_sane_rowcount = False
     driver = 'pymssql'
     colspecs = util.update_copy(
         MSDialect.colspecs,
         {
-            sqltypes.Numeric:_MSNumeric_pymssql,
-            sqltypes.Float:sqltypes.Float,
+            sqltypes.Numeric: _MSNumeric_pymssql,
+            sqltypes.Float: sqltypes.Float,
         }
     )
+
     @classmethod
     def dbapi(cls):
         module = __import__('pymssql')

lib/sqlalchemy/dialects/mssql/pyodbc.py

 from ... import types as sqltypes, util
 from ...util.compat import decimal
 
+
 class _MSNumeric_pyodbc(sqltypes.Numeric):
     """Turns Decimals with adjusted() < 0 or > 7 into strings.
 
             result = "%s%s%s" % (
                     (value < 0 and '-' or ''),
                     "".join([str(s) for s in _int]),
-                    "0" * (value.adjusted() - (len(_int)-1)))
+                    "0" * (value.adjusted() - (len(_int) - 1)))
         else:
             if (len(_int) - 1) > value.adjusted():
                 result = "%s%s.%s" % (
     colspecs = util.update_copy(
         MSDialect.colspecs,
         {
-            sqltypes.Numeric:_MSNumeric_pyodbc
+            sqltypes.Numeric: _MSNumeric_pyodbc
         }
     )
 

lib/sqlalchemy/dialects/mssql/zxjdbc.py

 from .base import MSDialect, MSExecutionContext
 from ... import engine
 
+
 class MSExecutionContext_zxjdbc(MSExecutionContext):
 
     _embedded_scope_identity = False

lib/sqlalchemy/dialects/mysql/base.py

      'union', 'unique', 'unlock', 'unsigned', 'update', 'usage', 'use',
      'using', 'utc_date', 'utc_time', 'utc_timestamp', 'values', 'varbinary',
      'varchar', 'varcharacter', 'varying', 'when', 'where', 'while', 'with',
-     'write', 'x509', 'xor', 'year_month', 'zerofill', # 5.0
-     'columns', 'fields', 'privileges', 'soname', 'tables', # 4.1
+     'write', 'x509', 'xor', 'year_month', 'zerofill',  # 5.0
+     'columns', 'fields', 'privileges', 'soname', 'tables',  # 4.1
      'accessible', 'linear', 'master_ssl_verify_server_cert', 'range',
-     'read_only', 'read_write', # 5.1
+     'read_only', 'read_write',  # 5.1
      ])
 
 AUTOCOMMIT_RE = re.compile(
         self.zerofill = zerofill
         super(_NumericType, self).__init__(**kw)
 
+
 class _FloatType(_NumericType, sqltypes.Float):
     def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
         if isinstance(self, (REAL, DOUBLE)) and \
         super(_FloatType, self).__init__(precision=precision, asdecimal=asdecimal, **kw)
         self.scale = scale
 
+
 class _IntegerType(_NumericType, sqltypes.Integer):
     def __init__(self, display_width=None, **kw):
         self.display_width = display_width
         super(_IntegerType, self).__init__(**kw)
 
+
 class _StringType(sqltypes.String):
     """Base for MySQL string types."""
 
         super(DOUBLE, self).__init__(precision=precision, scale=scale,
                                      asdecimal=asdecimal, **kw)
 
+
 class REAL(_FloatType, sqltypes.REAL):
     """MySQL REAL type."""
 
         super(REAL, self).__init__(precision=precision, scale=scale,
                                    asdecimal=asdecimal, **kw)
 
+
 class FLOAT(_FloatType, sqltypes.FLOAT):
     """MySQL FLOAT type."""
 
     def bind_processor(self, dialect):
         return None
 
+
 class INTEGER(_IntegerType, sqltypes.INTEGER):
     """MySQL INTEGER type."""
 
         """
         super(INTEGER, self).__init__(display_width=display_width, **kw)
 
+
 class BIGINT(_IntegerType, sqltypes.BIGINT):
     """MySQL BIGINTEGER type."""
 
         """
         super(BIGINT, self).__init__(display_width=display_width, **kw)
 
+
 class MEDIUMINT(_IntegerType):
     """MySQL MEDIUMINTEGER type."""
 
         """
         super(MEDIUMINT, self).__init__(display_width=display_width, **kw)
 
+
 class TINYINT(_IntegerType):
     """MySQL TINYINT type."""
 
         """
         super(TINYINT, self).__init__(display_width=display_width, **kw)
 
+
 class SMALLINT(_IntegerType, sqltypes.SMALLINT):
     """MySQL SMALLINTEGER type."""
 
         """
         super(SMALLINT, self).__init__(display_width=display_width, **kw)
 
+
 class BIT(sqltypes.TypeEngine):
     """MySQL BIT type.
 
             return value
         return process
 
+
 class TIME(sqltypes.TIME):
     """MySQL TIME type.
 
 
     def result_processor(self, dialect, coltype):
         time = datetime.time
+
         def process(value):
             # convert from a timedelta value
             if value is not None:
                 return None
         return process
 
+
 class TIMESTAMP(sqltypes.TIMESTAMP):
     """MySQL TIMESTAMP type."""
     __visit_name__ = 'TIMESTAMP'
 
+
 class YEAR(sqltypes.TypeEngine):
     """MySQL YEAR type, for single byte storage of years 1901-2155."""
 
     def __init__(self, display_width=None):
         self.display_width = display_width
 
+
 class TEXT(_StringType, sqltypes.TEXT):
     """MySQL TEXT type, for text up to 2^16 characters."""
 
         """
         super(TEXT, self).__init__(length=length, **kw)
 
+
 class TINYTEXT(_StringType):
     """MySQL TINYTEXT type, for text up to 2^8 characters."""
 
         """
         super(TINYTEXT, self).__init__(**kwargs)
 
+
 class MEDIUMTEXT(_StringType):
     """MySQL MEDIUMTEXT type, for text up to 2^24 characters."""
 
         """
         super(MEDIUMTEXT, self).__init__(**kwargs)
 
+
 class LONGTEXT(_StringType):
     """MySQL LONGTEXT type, for text up to 2^32 characters."""
 
         """
         super(VARCHAR, self).__init__(length=length, **kwargs)
 
+
 class CHAR(_StringType, sqltypes.CHAR):
     """MySQL CHAR type, for fixed-length character data."""
 
         """
         super(CHAR, self).__init__(length=length, **kwargs)
 
+
 class NVARCHAR(_StringType, sqltypes.NVARCHAR):
     """MySQL NVARCHAR type.
 
         super(NCHAR, self).__init__(length=length, **kwargs)
 
 
-
-
 class TINYBLOB(sqltypes._Binary):
     """MySQL TINYBLOB type, for binary data up to 2^8 bytes."""
 
     __visit_name__ = 'TINYBLOB'
 
+
 class MEDIUMBLOB(sqltypes._Binary):
     """MySQL MEDIUMBLOB type, for binary data up to 2^24 bytes."""
 
     __visit_name__ = 'MEDIUMBLOB'
 
+
 class LONGBLOB(sqltypes._Binary):
     """MySQL LONGBLOB type, for binary data up to 2^32 bytes."""
 
     __visit_name__ = 'LONGBLOB'
 
+
 class ENUM(sqltypes.Enum, _StringType):
     """MySQL ENUM type."""
 
 
     def bind_processor(self, dialect):
         super_convert = super(ENUM, self).bind_processor(dialect)
+
         def process(value):
             if self.strict and value is not None and value not in self.enums:
                 raise exc.InvalidRequestError('"%s" not a valid value for '
         kw['strict'] = self.strict
         return sqltypes.Enum.adapt(self, impltype, **kw)
 
+
 class SET(_StringType):
     """MySQL SET type."""
 
 
     def bind_processor(self, dialect):
         super_convert = super(SET, self).bind_processor(dialect)
+
         def process(value):
             if value is None or isinstance(value, (int, long, basestring)):
                 pass
     'year': YEAR,
 }
 
+
 class MySQLExecutionContext(default.DefaultExecutionContext):
 
     def should_autocommit_text(self, statement):
         return AUTOCOMMIT_RE.match(statement)
 
+
 class MySQLCompiler(compiler.SQLCompiler):
 
     render_table_with_column_in_update_from = True
     """Overridden from base SQLCompiler value"""
 
     extract_map = compiler.SQLCompiler.extract_map.copy()
-    extract_map.update ({
-        'milliseconds': 'millisecond',
-    })
+    extract_map.update({'milliseconds': 'millisecond'})
 
     def visit_random_func(self, fn, **kw):
         return "rand%s" % self.function_argspec(fn)
 
         return constraint_string
 
-
     def get_column_specification(self, column, **kw):
         """Builds column DDL."""
 
             table_opts.append(joiner.join((opt, arg)))
         return ' '.join(table_opts)
 
-
     def visit_create_index(self, create):
         index = create.element
         preparer = self.preparer
                     (self.preparer.format_table(constraint.table),
                     qual, const)
 
+
 class MySQLTypeCompiler(compiler.GenericTypeCompiler):
     def _extend_numeric(self, type_, spec):
         "Extend a numeric-type declaration with MySQL specific extensions."
         else:
             return self._extend_numeric(type_,
                             "NUMERIC(%(precision)s, %(scale)s)" %
-                            {'precision': type_.precision, 'scale' : type_.scale})
+                            {'precision': type_.precision, 'scale': type_.scale})
 
     def visit_DECIMAL(self, type_):
         if type_.precision is None:
         else:
             return self._extend_numeric(type_,
                             "DECIMAL(%(precision)s, %(scale)s)" %
-                            {'precision': type_.precision, 'scale' : type_.scale})
+                            {'precision': type_.precision, 'scale': type_.scale})
 
     def visit_DOUBLE(self, type_):
         if type_.precision is not None and type_.scale is not None:
             return self._extend_numeric(type_, "DOUBLE(%(precision)s, %(scale)s)" %
                                 {'precision': type_.precision,
-                                 'scale' : type_.scale})
+                                 'scale': type_.scale})
         else:
             return self._extend_numeric(type_, 'DOUBLE')
 
         if type_.precision is not None and type_.scale is not None:
             return self._extend_numeric(type_, "REAL(%(precision)s, %(scale)s)" %
                                 {'precision': type_.precision,
-                                 'scale' : type_.scale})
+                                 'scale': type_.scale})
         else:
             return self._extend_numeric(type_, 'REAL')
 
 
         return tuple([self.quote_identifier(i) for i in ids if i is not None])
 
+
 class MySQLDialect(default.DefaultDialect):
     """Details of the MySQL dialect.  Not used directly in application code."""
 
     def _get_default_schema_name(self, connection):
         return connection.execute('SELECT DATABASE()').scalar()
 
-
     def has_table(self, connection, table_name, schema=None):
         # SHOW TABLE STATUS LIKE and SHOW TABLES LIKE do not function properly
         # on macosx (and maybe win?) with multibyte table names.
         # full_name = self.identifier_preparer.format_table(table,
         #                                                   use_schema=True)
 
-
         full_name = '.'.join(self.identifier_preparer._quote_free_identifiers(
             schema, table_name))
 
             if key['type'] == 'PRIMARY':
                 # There can be only one.
                 cols = [s[0] for s in key['columns']]
-                return {'constrained_columns':cols, 'name':None}
-        return {'constrained_columns':[], 'name':None}
+                return {'constrained_columns': cols, 'name': None}
+        return {'constrained_columns': [], 'name': None}
 
     @reflection.cache
     def get_foreign_keys(self, connection, table_name, schema=None, **kw):
                     con_kw[opt] = spec[opt]
 
             fkey_d = {
-                'name' : spec['name'],
-                'constrained_columns' : loc_names,
-                'referred_schema' : ref_schema,
-                'referred_table' : ref_name,
-                'referred_columns' : ref_names,
-                'options' : con_kw
+                'name': spec['name'],
+                'constrained_columns': loc_names,
+                'referred_schema': ref_schema,
+                'referred_table': ref_name,
+                'referred_columns': ref_names,
+                'options': con_kw
             }
             fkeys.append(fkey_d)
         return fkeys
                 rp.close()
         return rows
 
+
 class ReflectedState(object):
     """Stores raw information about a SHOW CREATE TABLE statement."""
 
         self.keys = []
         self.constraints = []
 
+
 class MySQLTableDefinitionParser(object):
     """Parses the results of a SHOW CREATE TABLE statement."""
 
         # 123 or 123,456
         self._re_csv_int = _re_compile(r'\d+')
 
-
         # `colname` <type> [type opts]
         #  (NOT NULL | NULL)
         #   DEFAULT ('value' | CURRENT_TIMESTAMP...)
                  r"'(?P<val>(?:[^']|'')*?)'(?!')" %
                  (re.escape(directive), self._optional_equals))
         self._pr_options.append(
-            _pr_compile(regex, lambda v: v.replace("\\\\","\\").replace("''", "'")))
+            _pr_compile(regex, lambda v: v.replace("\\\\", "\\").replace("''", "'")))
 
     def _add_option_word(self, directive):
         regex = (r'(?P<directive>%s)%s'
 
     return (_re_compile(regex), cleanup)
 
+
 def _re_compile(regex):
     """Compile a string to regex, I and UNICODE."""
 
     return re.compile(regex, re.I | re.UNICODE)
-

lib/sqlalchemy/dialects/mysql/gaerdbms.py

         if code:
             return int(code)
 
-dialect = MySQLDialect_gaerdbms
+dialect = MySQLDialect_gaerdbms

lib/sqlalchemy/dialects/mysql/mysqlconnector.py

 
 from ... import util
 
+
 class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
 
     def get_lastrowid(self):
     def post_process_text(self, text):
         return text.replace('%', '%%')
 
+
 class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer):
 
     def _escape_identifier(self, value):
         value = value.replace(self.escape_quote, self.escape_to_quote)
         return value.replace("%", "%%")
 
+
 class _myconnpyBIT(BIT):
     def result_processor(self, dialect, coltype):
         """MySQL-connector already converts mysql bits, so."""
 
         return None
 
+
 class MySQLDialect_mysqlconnector(MySQLDialect):
     driver = 'mysqlconnector'
     supports_unicode_statements = True
 
     def is_disconnect(self, e, connection, cursor):
         errnos = (2006, 2013, 2014, 2045, 2055, 2048)
-        exceptions = (self.dbapi.OperationalError,self.dbapi.InterfaceError)
+        exceptions = (self.dbapi.OperationalError, self.dbapi.InterfaceError)
         if isinstance(e, exceptions):
             return e.errno in errnos
         else:

lib/sqlalchemy/dialects/mysql/mysqldb.py

                         MySQLDBConnector
                     )
 
+
 class MySQLExecutionContext_mysqldb(MySQLDBExecutionContext, MySQLExecutionContext):
     pass
 
 class MySQLIdentifierPreparer_mysqldb(MySQLDBIdentifierPreparer, MySQLIdentifierPreparer):
     pass
 
+
 class MySQLDialect_mysqldb(MySQLDBConnector, MySQLDialect):
     execution_ctx_cls = MySQLExecutionContext_mysqldb
     statement_compiler = MySQLCompiler_mysqldb

lib/sqlalchemy/dialects/mysql/oursql.py

 from ... import types as sqltypes, util
 
 
-
 class _oursqlBIT(BIT):
     def result_processor(self, dialect, coltype):
         """oursql already converts mysql bits, so."""
     def plain_query(self):
         return self.execution_options.get('_oursql_plain_query', False)
 
+
 class MySQLDialect_oursql(MySQLDialect):
     driver = 'oursql'
 # Py2K
     # Q: why didn't we need all these "plain_query" overrides earlier ?
     # am i on a newer/older version of OurSQL ?
     def has_table(self, connection, table_name, schema=None):
-        return MySQLDialect.has_table(self,
-                                        connection.connect().\
-                                            execution_options(_oursql_plain_query=True),
-                                        table_name, schema)
+        return MySQLDialect.has_table(
+          self,
+          connection.connect().execution_options(_oursql_plain_query=True),
+          table_name,
+          schema
+        )
 
     def get_table_options(self, connection, table_name, schema=None, **kw):
-        return MySQLDialect.get_table_options(self,
-                                            connection.connect().\
-                                                execution_options(_oursql_plain_query=True),
-                                            table_name,
-                                            schema = schema,
-                                            **kw
+        return MySQLDialect.get_table_options(
+            self,
+            connection.connect().execution_options(_oursql_plain_query=True),
+            table_name,
+            schema=schema,
+            **kw
         )
 
-
     def get_columns(self, connection, table_name, schema=None, **kw):
-        return MySQLDialect.get_columns(self,
-                                        connection.connect().\
-                                                    execution_options(_oursql_plain_query=True),
-                                        table_name,
-                                        schema=schema,
-                                        **kw
+        return MySQLDialect.get_columns(
+            self,
+            connection.connect().execution_options(_oursql_plain_query=True),
+            table_name,
+            schema=schema,
+            **kw
         )
 
     def get_view_names(self, connection, schema=None, **kw):
-        return MySQLDialect.get_view_names(self,
-                                            connection.connect().\
-                                                    execution_options(_oursql_plain_query=True),
-                                            schema=schema,
-                                            **kw
+        return MySQLDialect.get_view_names(
+            self,
+            connection.connect().execution_options(_oursql_plain_query=True),
+            schema=schema,
+            **kw
         )
 
     def get_table_names(self, connection, schema=None, **kw):
-        return MySQLDialect.get_table_names(self,
-                            connection.connect().\
-                                        execution_options(_oursql_plain_query=True),
-                            schema
+        return MySQLDialect.get_table_names(
+            self,
+            connection.connect().execution_options(_oursql_plain_query=True),
+            schema
         )
 
     def get_schema_names(self, connection, **kw):
-        return MySQLDialect.get_schema_names(self,
-                                    connection.connect().\
-                                                execution_options(_oursql_plain_query=True),
-                                    **kw
+        return MySQLDialect.get_schema_names(
+            self,
+            connection.connect().execution_options(_oursql_plain_query=True),
+            **kw
         )
 
     def initialize(self, connection):
         return MySQLDialect.initialize(
-                            self,
-                            connection.execution_options(_oursql_plain_query=True)
-                            )
+            self,
+            connection.execution_options(_oursql_plain_query=True)
+        )
 
     def _show_create_table(self, connection, table, charset=None,
                            full_name=None):
-        return MySQLDialect._show_create_table(self,
-                                connection.contextual_connect(close_with_result=True).
-                                execution_options(_oursql_plain_query=True),
-                                table, charset, full_name)
+        return MySQLDialect._show_create_table(
+            self,
+            connection.contextual_connect(close_with_result=True).
+            execution_options(_oursql_plain_query=True),
+            table, charset, full_name
+        )
 
     def is_disconnect(self, e, connection, cursor):
         if isinstance(e, self.dbapi.ProgrammingError):

lib/sqlalchemy/dialects/mysql/pymysql.py

 
 from .mysqldb import MySQLDialect_mysqldb
 
+
 class MySQLDialect_pymysql(MySQLDialect_mysqldb):
     driver = 'pymysql'
 
     description_encoding = None
+
     @classmethod
     def dbapi(cls):
         return __import__('pymysql')
 
-dialect = MySQLDialect_pymysql
+dialect = MySQLDialect_pymysql

lib/sqlalchemy/dialects/mysql/pyodbc.py

 from ... import util
 import re
 
+
 class MySQLExecutionContext_pyodbc(MySQLExecutionContext):
 
     def get_lastrowid(self):
         cursor.close()
         return lastrowid
 
+
 class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect):
     supports_unicode_statements = False
     execution_ctx_cls = MySQLExecutionContext_pyodbc

lib/sqlalchemy/dialects/mysql/zxjdbc.py

 from ...connectors.zxJDBC import ZxJDBCConnector
 from .base import BIT, MySQLDialect, MySQLExecutionContext
 
+
 class _ZxJDBCBit(BIT):
     def result_processor(self, dialect, coltype):
         """Converts boolean or byte arrays from MySQL Connector/J to longs."""
         if c:
             return int(c)
 
-    def _get_server_version_info(self,connection):
+    def _get_server_version_info(self, connection):
         dbapi_con = connection.connection
         version = []
         r = re.compile('[.\-]')

lib/sqlalchemy/dialects/oracle/base.py

 
 """
 
-import random, re
+import re
 
-from sqlalchemy import schema as sa_schema
-from sqlalchemy import util, sql, log
+from sqlalchemy import util, sql
 from sqlalchemy.engine import default, base, reflection
 from sqlalchemy.sql import compiler, visitors, expression
 from sqlalchemy.sql import operators as sql_operators, functions as sql_functions
 NO_ARG_FNS = set('UID CURRENT_DATE SYSDATE USER '
                 'CURRENT_TIME CURRENT_TIMESTAMP'.split())
 
+
 class RAW(sqltypes._Binary):
     __visit_name__ = 'RAW'
 OracleRaw = RAW
 
+
 class NCLOB(sqltypes.Text):
     __visit_name__ = 'NCLOB'
 
+
 class VARCHAR2(VARCHAR):
     __visit_name__ = 'VARCHAR2'
 
 NVARCHAR2 = NVARCHAR
 
+
 class NUMBER(sqltypes.Numeric, sqltypes.Integer):
     __visit_name__ = 'NUMBER'
 
 
 class DOUBLE_PRECISION(sqltypes.Numeric):
     __visit_name__ = 'DOUBLE_PRECISION'
+
     def __init__(self, precision=None, scale=None, asdecimal=None):
         if asdecimal is None:
             asdecimal = False
 
         super(DOUBLE_PRECISION, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal)
 
+
 class BFILE(sqltypes.LargeBinary):
     __visit_name__ = 'BFILE'
 
+
 class LONG(sqltypes.Text):
     __visit_name__ = 'LONG'
 
+
 class INTERVAL(sqltypes.TypeEngine):
     __visit_name__ = 'INTERVAL'
 
     def _type_affinity(self):