Commits

jason kirtland committed d95e18e

Janitorial.

  • Participants
  • Parent commits 04924c4
  • Branches user_defined_state

Comments (0)

Files changed (35)

lib/sqlalchemy/databases/access.py

         # This is necessary, so we get the latest updates
         dtbs = daoEngine.OpenDatabase(connection.engine.url.database)
 
-        names = [t.Name for t in dtbs.TableDefs if t.Name[:4] != "MSys" and t.Name[:4] <> "~TMP"]
+        names = [t.Name for t in dtbs.TableDefs if t.Name[:4] != "MSys" and t.Name[:4] != "~TMP"]
         dtbs.Close()
         return names
 

lib/sqlalchemy/databases/firebird.py

         default.DefaultDialect.__init__(self, **kwargs)
 
         self.type_conv = type_conv
-        self.concurrency_level= concurrency_level
+        self.concurrency_level = concurrency_level
 
     def dbapi(cls):
         import kinterbasdb
 
         # get primary key fields
         c = connection.execute(keyqry, ["PRIMARY KEY", tablename])
-        pkfields =[self._normalize_name(r['fname']) for r in c.fetchall()]
+        pkfields = [self._normalize_name(r['fname']) for r in c.fetchall()]
 
         # get all of the fields for this table
         c = connection.execute(tblqry, [tablename])
         fks = {}
         while True:
             row = c.fetchone()
-            if not row: break
+            if not row:
+                break
 
             cname = self._normalize_name(row['cname'])
             try:
             fk[0].append(fname)
             fk[1].append(refspec)
 
-        for name,value in fks.iteritems():
+        for name, value in fks.iteritems():
             table.append_constraint(schema.ForeignKeyConstraint(value[0], value[1], name=name))
 
     def do_execute(self, cursor, statement, parameters, **kwargs):
     reserved_words = RESERVED_WORDS
 
     def __init__(self, dialect):
-        super(FBIdentifierPreparer,self).__init__(dialect, omit_schema=True)
+        super(FBIdentifierPreparer, self).__init__(dialect, omit_schema=True)
 
 
 dialect = FBDialect

lib/sqlalchemy/databases/information_schema.py

         coltype = ischema_names[type]
         #print "coltype " + repr(coltype) + " args " +  repr(args)
         coltype = coltype(*args)
-        colargs= []
+        colargs = []
         if default is not None:
             colargs.append(PassiveDefault(sql.text(default)))
         table.append_column(Column(name, coltype, nullable=nullable, *colargs))
             row[colmap[6]]
         )
         #print "type %s on column %s to remote %s.%s.%s" % (type, constrained_column, referred_schema, referred_table, referred_column) 
-        if type=='PRIMARY KEY':
+        if type == 'PRIMARY KEY':
             table.primary_key.add(table.c[constrained_column])
-        elif type=='FOREIGN KEY':
+        elif type == 'FOREIGN KEY':
             try:
                 fk = fks[constraint_name]
             except KeyError:
-                fk = ([],[])
+                fk = ([], [])
                 fks[constraint_name] = fk
             if current_schema == referred_schema:
                 referred_schema = table.schema

lib/sqlalchemy/databases/informix.py

     # 5 - rowid after insert
     def post_exec(self):
         if getattr(self.compiled, "isinsert", False) and self.last_inserted_ids() is None:
-            self._last_inserted_ids = [self.cursor.sqlerrd[1],]
+            self._last_inserted_ids = [self.cursor.sqlerrd[1]]
         elif hasattr( self.compiled , 'offset' ):
             self.cursor.offset( self.compiled.offset )
         super(InfoExecutionContext, self).post_exec()
     # for informix 7.31
     max_identifier_length = 18
 
-    def __init__(self, use_ansi=True,**kwargs):
+    def __init__(self, use_ansi=True, **kwargs):
         self.use_ansi = use_ansi
         default.DefaultDialect.__init__(self, **kwargs)
 
         else:
             opt = {}
 
-        return ([dsn,], opt )
+        return ([dsn], opt)
 
     def create_execution_context(self , *args, **kwargs):
         return InfoExecutionContext(self, *args, **kwargs)
 
-    def oid_column_name(self,column):
+    def oid_column_name(self, column):
         return "rowid"
 
     def table_names(self, connection, schema):
         s = "select tabname from systables"
         return [row[0] for row in connection.execute(s)]
 
-    def has_table(self, connection, table_name,schema=None):
+    def has_table(self, connection, table_name, schema=None):
         cursor = connection.execute("""select tabname from systables where tabname=?""", table_name.lower() )
         return bool( cursor.fetchone() is not None )
 
             try:
                 fk = fks[cons_name]
             except KeyError:
-               fk = ([], [])
-               fks[cons_name] = fk
+                fk = ([], [])
+                fks[cons_name] = fk
             refspec = ".".join([remote_table, remote_column])
             schema.Table(remote_table, table.metadata, autoload=True, autoload_with=connection)
             if local_column not in fk[0]:

lib/sqlalchemy/databases/mssql.py

                 elif coltype in (MSNVarchar, AdoMSNVarchar) and charlen == -1:
                     args[0] = None
                 coltype = coltype(*args)
-            colargs= []
+            colargs = []
             if default is not None:
                 colargs.append(schema.PassiveDefault(sql.text(default)))
 
                 row = cursor.fetchone()
                 cursor.close()
                 if not row is None:
-                    ic.sequence.start=int(row[0])
-                    ic.sequence.increment=int(row[1])
+                    ic.sequence.start = int(row[0])
+                    ic.sequence.increment = int(row[1])
             except:
                 # ignoring it, works just like before
                 pass
                 
             if rfknm != fknm:
                 if fknm:
-                    table.append_constraint(schema.ForeignKeyConstraint(scols, [_gen_fkref(table,s,t,c) for s,t,c in rcols], fknm))
+                    table.append_constraint(schema.ForeignKeyConstraint(scols, [_gen_fkref(table, s, t, c) for s, t, c in rcols], fknm))
                 fknm, scols, rcols = (rfknm, [], [])
-            if (not scol in scols): scols.append(scol)
-            if (not (rschema, rtbl, rcol) in rcols): rcols.append((rschema, rtbl, rcol))
+            if not scol in scols:
+                scols.append(scol)
+            if not (rschema, rtbl, rcol) in rcols:
+                rcols.append((rschema, rtbl, rcol))
 
         if fknm and scols:
-            table.append_constraint(schema.ForeignKeyConstraint(scols, [_gen_fkref(table,s,t,c) for s,t,c in rcols], fknm))
+            table.append_constraint(schema.ForeignKeyConstraint(scols, [_gen_fkref(table, s, t, c) for s, t, c in rcols], fknm))
 
 
 class MSSQLDialect_pymssql(MSSQLDialect):

lib/sqlalchemy/databases/mxODBC.py

 
 # override 'connect' call
 def connect(*args, **kwargs):
-        import mx.ODBC.Windows
-        conn = mx.ODBC.Windows.Connect(*args, **kwargs)
-        conn.datetimeformat = mx.ODBC.Windows.PYDATETIME_DATETIMEFORMAT
-        return Connection(conn)
+    import mx.ODBC.Windows
+    conn = mx.ODBC.Windows.Connect(*args, **kwargs)
+    conn.datetimeformat = mx.ODBC.Windows.PYDATETIME_DATETIMEFORMAT
+    return Connection(conn)
 Connect = connect

lib/sqlalchemy/databases/mysql.py

 
     def for_update_clause(self, select):
         if select.for_update == 'read':
-             return ' LOCK IN SHARE MODE'
+            return ' LOCK IN SHARE MODE'
         else:
             return super(MySQLCompiler, self).for_update_clause(select)
 

lib/sqlalchemy/databases/oracle.py

 
     def result_processor(self, dialect):
         def process(value):
-            if value is None or isinstance(value,datetime.datetime):
+            if value is None or isinstance(value, datetime.datetime):
                 return value
             else:
                 # convert cx_oracle datetime object returned pre-python 2.4
-                return datetime.datetime(value.year,value.month,
+                return datetime.datetime(value.year, value.month,
                     value.day,value.hour, value.minute, value.second)
         return process
 
 
     def result_processor(self, dialect):
         def process(value):
-            if value is None or isinstance(value,datetime.datetime):
+            if value is None or isinstance(value, datetime.datetime):
                 return value
             else:
                 # convert cx_oracle datetime object returned pre-python 2.4
-                return datetime.datetime(value.year,value.month,
+                return datetime.datetime(value.year, value.month,
                     value.day,value.hour, value.minute, value.second)
         return process
 
     def get_result_proxy(self):
         if hasattr(self, 'out_parameters'):
             if self.compiled_parameters is not None and len(self.compiled_parameters) == 1:
-                 for bind, name in self.compiled.bind_names.iteritems():
-                     if name in self.out_parameters:
-                         type = bind.type
-                         self.out_parameters[name] = type.dialect_impl(self.dialect).result_processor(self.dialect)(self.out_parameters[name].getvalue())
+                for bind, name in self.compiled.bind_names.iteritems():
+                    if name in self.out_parameters:
+                        type = bind.type
+                        self.out_parameters[name] = type.dialect_impl(self.dialect).result_processor(self.dialect)(self.out_parameters[name].getvalue())
             else:
-                 for k in self.out_parameters:
-                     self.out_parameters[k] = self.out_parameters[k].getvalue()
+                for k in self.out_parameters:
+                    self.out_parameters[k] = self.out_parameters[k].getvalue()
 
         if self.cursor.description is not None:
             for column in self.cursor.description:
         this id will be passed to do_begin_twophase(), do_rollback_twophase(),
         do_commit_twophase().  its format is unspecified."""
 
-        id = random.randint(0,2**128)
+        id = random.randint(0, 2 ** 128)
         return (0x1234, "%032x" % 9, "%032x" % id)
 
     def do_release_savepoint(self, connection, name):
             cursor = connection.execute(s)
         else:
             s = "select table_name from all_tables where tablespace_name NOT IN ('SYSTEM','SYSAUX') AND OWNER = :owner"
-            cursor = connection.execute(s,{'owner':self._denormalize_name(schema)})
+            cursor = connection.execute(s, {'owner': self._denormalize_name(schema)})
         return [self._normalize_name(row[0]) for row in cursor]
 
     def _resolve_synonym(self, connection, desired_owner=None, desired_synonym=None, desired_table=None):
 
         if desired_owner is None, attempts to locate a distinct owner.
 
-	returns the actual name, owner, dblink name, and synonym name if found.
+        returns the actual name, owner, dblink name, and synonym name if found.
         """
 
-	sql = """select OWNER, TABLE_OWNER, TABLE_NAME, DB_LINK, SYNONYM_NAME
-		   from   ALL_SYNONYMS WHERE """
+        sql = """select OWNER, TABLE_OWNER, TABLE_NAME, DB_LINK, SYNONYM_NAME
+                   from   ALL_SYNONYMS WHERE """
 
         clauses = []
         params = {}
             clauses.append("TABLE_NAME=:tname")
             params['tname'] = desired_table
 
-        sql += " AND ".join(clauses) 
+        sql += " AND ".join(clauses)
 
-	result = connection.execute(sql, **params)
+        result = connection.execute(sql, **params)
         if desired_owner:
             row = result.fetchone()
             if row:
 
         resolve_synonyms = table.kwargs.get('oracle_resolve_synonyms', False)
 
-	if resolve_synonyms:
+        if resolve_synonyms:
             actual_name, owner, dblink, synonym = self._resolve_synonym(connection, desired_owner=self._denormalize_name(table.schema), desired_synonym=self._denormalize_name(table.name))
         else:
             actual_name, owner, dblink, synonym = None, None, None, None
             # NUMBER(9,2) if the precision is 9 and the scale is 2
             # NUMBER(3) if the precision is 3 and scale is 0
             #length is ignored except for CHAR and VARCHAR2
-            if coltype=='NUMBER' :
+            if coltype == 'NUMBER' :
                 if precision is None and scale is None:
                     coltype = OracleNumeric
                 elif precision is None and scale == 0  :
             table.append_column(schema.Column(colname, coltype, nullable=nullable, *colargs))
 
         if not table.columns:
-           raise exc.AssertionError("Couldn't find any column information for table %s" % actual_name)
+            raise exc.AssertionError("Couldn't find any column information for table %s" % actual_name)
 
         c = connection.execute("""SELECT
              ac.constraint_name,
                 try:
                     fk = fks[cons_name]
                 except KeyError:
-                   fk = ([], [])
-                   fks[cons_name] = fk
+                    fk = ([], [])
+                    fks[cons_name] = fk
                 if remote_table is None:
                     # ticket 363
                     util.warn(
                         remote_owner = self._normalize_name(ref_remote_owner)
 
                 if not table.schema and self._denormalize_name(remote_owner) == owner:
-                    refspec =  ".".join([remote_table, remote_column])               
+                    refspec =  ".".join([remote_table, remote_column])
                     t = schema.Table(remote_table, table.metadata, autoload=True, autoload_with=connection, oracle_resolve_synonyms=resolve_synonyms, useexisting=True)
                 else:
                     refspec =  ".".join([x for x in [remote_owner, remote_table, remote_column] if x])
         self.column = column
     def _get_from_objects(self, **kwargs):
         return []
-    
+
 class OracleCompiler(compiler.DefaultCompiler):
     """Oracle compiler modifies the lexical structure of Select
     statements to work under non-ANSI configured Oracle databases, if
             return compiler.DefaultCompiler.visit_join(self, join, **kwargs)
         else:
             return self.process(join.left, asfrom=True) + ", " + self.process(join.right, asfrom=True)
-    
+
     def _get_nonansi_join_whereclause(self, froms):
         clauses = []
-        
+
         def visit_join(join):
             if join.isouter:
                 def visit_binary(binary):
                 clauses.append(visitors.cloned_traverse(join.onclause, {}, {'binary':visit_binary}))
             else:
                 clauses.append(join.onclause)
-        
+
         for f in froms:
             visitors.traverse(f, {}, {'join':visit_join})
         return sql.and_(*clauses)
-        
+
     def visit_outer_join_column(self, vc):
         return self.process(vc.column) + "(+)"
 
                 if whereclause:
                     select = select.where(whereclause)
                     select._oracle_visit = True
-                
+
             if select._limit is not None or select._offset is not None:
                 # to use ROW_NUMBER(), an ORDER BY is required.
                 orderby = self.process(select._order_by_clause)
 
                 select = select.column(sql.literal_column("ROW_NUMBER() OVER (ORDER BY %s)" % orderby).label("ora_rn")).order_by(None)
                 select._oracle_visit = True
-                
+
                 limitselect = sql.select([c for c in select.c if c.key!='ora_rn'])
                 limitselect._oracle_visit = True
                 limitselect._is_wrapper = True
-                
+
                 if select._offset is not None:
                     limitselect.append_whereclause("ora_rn>%d" % select._offset)
                     if select._limit is not None:
                 else:
                     limitselect.append_whereclause("ora_rn<=%d" % select._limit)
                 select = limitselect
-        
+
         kwargs['iswrapper'] = getattr(select, '_is_wrapper', False)
         return compiler.DefaultCompiler.visit_select(self, select, **kwargs)
 
         return ""
 
     def for_update_clause(self, select):
-        if select.for_update=="nowait":
+        if select.for_update == "nowait":
             return " FOR UPDATE NOWAIT"
         else:
             return super(OracleCompiler, self).for_update_clause(select)

lib/sqlalchemy/databases/postgres.py

             if value is None:
                 return value
             def convert_item(item):
-                if isinstance(item, (list,tuple)):
+                if isinstance(item, (list, tuple)):
                     return [convert_item(child) for child in item]
                 else:
                     if item_proc:
                             default = domain['default']
                         coltype = ischema_names[domain['attype']]
                 else:
-                    coltype=None
+                    coltype = None
 
             if coltype:
                 coltype = coltype(*args, **kwargs)
                           (attype, name))
                 coltype = sqltypes.NULLTYPE
 
-            colargs= []
+            colargs = []
             if default is not None:
                 match = re.search(r"""(nextval\(')([^']+)('.*$)""", default)
                 if match is not None:
             col = table.c[pk]
             table.primary_key.add(col)
             if col.default is None:
-                col.autoincrement=False
+                col.autoincrement = False
 
         # Foreign keys
         FK_SQL = """

lib/sqlalchemy/databases/sqlite.py

             microsecond = 0
         return time.strptime(value, self.__format__)[0:6] + (microsecond,)
 
-class SLDateTime(DateTimeMixin,sqltypes.DateTime):
+class SLDateTime(DateTimeMixin, sqltypes.DateTime):
     __format__ = "%Y-%m-%d %H:%M:%S"
     __microsecond__ = True
 
             return tuple([int(x) for x in num.split('.')])
         if self.dbapi is not None:
             sqlite_ver = self.dbapi.version_info
-            if sqlite_ver < (2,1,'3'):
+            if sqlite_ver < (2, 1, '3'):
                 util.warn(
                     ("The installed version of pysqlite2 (%s) is out-dated "
                      "and will cause errors in some cases.  Version 2.1.3 "
                 args = re.findall(r'(\d+)', args)
                 coltype = coltype(*[int(a) for a in args])
 
-            colargs= []
+            colargs = []
             if has_default:
                 colargs.append(PassiveDefault('?'))
             table.append_column(schema.Column(name, coltype, primary_key = primary_key, nullable = nullable, *colargs))
             try:
                 fk = fks[constraint_name]
             except KeyError:
-                fk = ([],[])
+                fk = ([], [])
                 fks[constraint_name] = fk
 
             # look up the table based on the given table's engine, not 'self',

lib/sqlalchemy/databases/sybase.py

                               (type, name))
                     coltype = sqltypes.NULLTYPE
                 coltype = coltype(*args)
-            colargs= []
+            colargs = []
             if default is not None:
                 colargs.append(schema.PassiveDefault(sql.text(default)))
 
                 row[0], row[1], row[2], row[3],
             )
             if not primary_table in foreignKeys.keys():
-                foreignKeys[primary_table] = [['%s'%(foreign_column)], ['%s.%s'%(primary_table,primary_column)]]
+                foreignKeys[primary_table] = [['%s' % (foreign_column)], ['%s.%s'%(primary_table, primary_column)]]
             else:
                 foreignKeys[primary_table][0].append('%s'%(foreign_column))
-                foreignKeys[primary_table][1].append('%s.%s'%(primary_table,primary_column))
+                foreignKeys[primary_table][1].append('%s.%s'%(primary_table, primary_column))
         for primary_table in foreignKeys.keys():
             #table.append_constraint(schema.ForeignKeyConstraint(['%s.%s'%(foreign_table, foreign_column)], ['%s.%s'%(primary_table,primary_column)]))
             table.append_constraint(schema.ForeignKeyConstraint(foreignKeys[primary_table][0], foreignKeys[primary_table][1]))
     def bindparam_string(self, name):
         res = super(SybaseSQLCompiler, self).bindparam_string(name)
         if name.lower().startswith('literal'):
-            res = 'STRING(%s)'%res
+            res = 'STRING(%s)' % res
         return res
 
     def get_select_precolumns(self, select):

lib/sqlalchemy/engine/base.py

     def __init__(self, pool, dialect, url, echo=None, proxy=None):
         self.pool = pool
         self.url = url
-        self.dialect=dialect
+        self.dialect = dialect
         self.echo = echo
         self.engine = self
         self.logger = log.instance_logger(self, echoflag=echo)

lib/sqlalchemy/engine/default.py

         This id will be passed to do_begin_twophase(), do_rollback_twophase(),
         do_commit_twophase().  Its format is unspecified."""
 
-        return "_sa_%032x" % random.randint(0,2**128)
+        return "_sa_%032x" % random.randint(0, 2 ** 128)
 
     def do_savepoint(self, connection, name):
         connection.execute(expression.SavepointClause(name))
         if self.dialect.positional:
             inputsizes = []
             for key in self.compiled.positiontup:
-               typeengine = types[key]
-               dbtype = typeengine.dialect_impl(self.dialect).get_dbapi_type(self.dialect.dbapi)
-               if dbtype is not None:
+                typeengine = types[key]
+                dbtype = typeengine.dialect_impl(self.dialect).get_dbapi_type(self.dialect.dbapi)
+                if dbtype is not None:
                     inputsizes.append(dbtype)
             try:
                 self.cursor.setinputsizes(*inputsizes)

lib/sqlalchemy/engine/strategies.py

 
         def create(self, entity, **kwargs):
             kwargs['checkfirst'] = False
-            self.dialect.schemagenerator(self.dialect ,self, **kwargs).traverse(entity)
+            self.dialect.schemagenerator(self.dialect, self, **kwargs).traverse(entity)
 
         def drop(self, entity, **kwargs):
             kwargs['checkfirst'] = False

lib/sqlalchemy/engine/url.py

             self.port = int(port)
         else:
             self.port = None
-        self.database= database
+        self.database = database
         self.query = query or {}
 
     def __str__(self):

lib/sqlalchemy/ext/associationproxy.py

     def clear(self):
         del self.col[0:len(self.col)]
 
-    def __eq__(self, other): return list(self) == other
-    def __ne__(self, other): return list(self) != other
-    def __lt__(self, other): return list(self) < other
-    def __le__(self, other): return list(self) <= other
-    def __gt__(self, other): return list(self) > other
-    def __ge__(self, other): return list(self) >= other
-    def __cmp__(self, other): return cmp(list(self), other)
+    def __eq__(self, other):
+        return list(self) == other
+
+    def __ne__(self, other):
+        return list(self) != other
+
+    def __lt__(self, other):
+        return list(self) < other
+
+    def __le__(self, other):
+        return list(self) <= other
+
+    def __gt__(self, other):
+        return list(self) > other
+
+    def __ge__(self, other):
+        return list(self) >= other
+
+    def __cmp__(self, other):
+        return cmp(list(self), other)
 
     def __add__(self, iterable):
         try:
     def clear(self):
         self.col.clear()
 
-    def __eq__(self, other): return dict(self) == other
-    def __ne__(self, other): return dict(self) != other
-    def __lt__(self, other): return dict(self) < other
-    def __le__(self, other): return dict(self) <= other
-    def __gt__(self, other): return dict(self) > other
-    def __ge__(self, other): return dict(self) >= other
-    def __cmp__(self, other): return cmp(dict(self), other)
+    def __eq__(self, other):
+        return dict(self) == other
+
+    def __ne__(self, other):
+        return dict(self) != other
+
+    def __lt__(self, other):
+        return dict(self) < other
+
+    def __le__(self, other):
+        return dict(self) <= other
+
+    def __gt__(self, other):
+        return dict(self) > other
+
+    def __ge__(self, other):
+        return dict(self) >= other
+
+    def __cmp__(self, other):
+        return cmp(dict(self), other)
 
     def __repr__(self):
         return repr(dict(self.items()))
     def copy(self):
         return util.Set(self)
 
-    def __eq__(self, other): return util.Set(self) == other
-    def __ne__(self, other): return util.Set(self) != other
-    def __lt__(self, other): return util.Set(self) < other
-    def __le__(self, other): return util.Set(self) <= other
-    def __gt__(self, other): return util.Set(self) > other
-    def __ge__(self, other): return util.Set(self) >= other
+    def __eq__(self, other):
+        return util.Set(self) == other
+
+    def __ne__(self, other):
+        return util.Set(self) != other
+
+    def __lt__(self, other):
+        return util.Set(self) < other
+
+    def __le__(self, other):
+        return util.Set(self) <= other
+
+    def __gt__(self, other):
+        return util.Set(self) > other
+
+    def __ge__(self, other):
+        return util.Set(self) >= other
 
     def __repr__(self):
         return repr(util.Set(self))

lib/sqlalchemy/ext/orderinglist.py

   u = User()
   u.topten.append(Blurb('Number one!'))
   u.topten.append(Blurb('Number two!'))
-  
+
   # Like magic.
   assert [blurb.position for blurb in u.topten] == [0, 1]
 
 
 def ordering_list(attr, count_from=None, **kw):
     """Prepares an OrderingList factory for use in mapper definitions.
-    
+
     Returns an object suitable for use as an argument to a Mapper relation's
     ``collection_class`` option.  Arguments are:
 
       example, ``ordering_list('pos', count_from=1)`` would create a 1-based
       list in SQL, storing the value in the 'pos' column.  Ignored if
       ``ordering_func`` is supplied.
-      
+
     Passes along any keyword arguments to ``OrderingList`` constructor.
     """
 
     Keyword argument filter, prepares a simple ``ordering_func`` from a
     ``count_from`` argument, otherwise passes ``ordering_func`` on unchanged.
     """
-    
+
     count_from = kw.pop('count_from', None)
     if kw.get('ordering_func', None) is None and count_from is not None:
         if count_from == 0:
     ``ordering_list`` function is used to configure ``OrderingList``
     collections in ``mapper`` relation definitions.
     """
-    
+
     def __init__(self, ordering_attr=None, ordering_func=None,
                  reorder_on_append=False):
         """A custom list that manages position information for its children.
-        
+
         ``OrderingList`` is a ``collection_class`` list implementation that
         syncs position in a Python list with a position attribute on the
         mapped objects.
 
           An ``ordering_func`` is called with two positional parameters: the
           index of the element in the list, and the list itself.
-          
+
           If omitted, Python list indexes are used for the attribute values.
           Two basic pre-built numbering functions are provided in this module:
           ``count_from_0`` and ``count_from_1``.  For more exotic examples
     def _reorder(self):
         """Sweep through the list and ensure that each object has accurate
         ordering information set."""
-        
+
         for index, entity in enumerate(self):
             self._order_entity(index, entity, True)
 
             return
 
         should_be = self.ordering_func(index, self)
-        if have <> should_be:
+        if have != should_be:
             self._set_order_value(entity, should_be)
 
     def append(self, entity):
         entity = super(OrderingList, self).pop(index)
         self._reorder()
         return entity
-        
+
     def __setitem__(self, index, entity):
         if isinstance(index, slice):
             for i in range(index.start or 0, index.stop or 0, index.step or 1):
         else:
             self._order_entity(index, entity, True)
             super(OrderingList, self).__setitem__(index, entity)
-            
+
     def __delitem__(self, index):
         super(OrderingList, self).__delitem__(index)
         self._reorder()

lib/sqlalchemy/ext/sqlsoup.py

 
 objectstore = Objectstore(create_session)
 
-class PKNotFoundError(SQLAlchemyError): pass
+class PKNotFoundError(SQLAlchemyError):
+    pass
 
 def _ddl_error(cls):
     msg = 'SQLSoup can only modify mapped Tables (found: %s)' \

lib/sqlalchemy/log.py

 def default_logging(name):
     global default_enabled
     if logging.getLogger(name).getEffectiveLevel() < logging.WARN:
-        default_enabled=True
+        default_enabled = True
     if not default_enabled:
         default_enabled = True
         handler = logging.StreamHandler(sys.stdout)

lib/sqlalchemy/orm/__init__.py

 
 
 def scoped_session(session_factory, scopefunc=None):
-  """Provides thread-local management of Sessions.
+    """Provides thread-local management of Sessions.
 
-  This is a front-end function to the [sqlalchemy.orm.scoping#ScopedSession]
-  class.
+    This is a front-end function to the [sqlalchemy.orm.scoping#ScopedSession]
+    class.
 
-  Usage::
+    Usage::
 
-    Session = scoped_session(sessionmaker(autoflush=True))
+      Session = scoped_session(sessionmaker(autoflush=True))
 
-  To instantiate a Session object which is part of the scoped
-  context, instantiate normally::
+    To instantiate a Session object which is part of the scoped context,
+    instantiate normally::
 
-    session = Session()
+      session = Session()
 
-  Most session methods are available as classmethods from
-  the scoped session::
+    Most session methods are available as classmethods from the scoped
+    session::
 
-    Session.commit()
-    Session.close()
+      Session.commit()
+      Session.close()
 
-  To map classes so that new instances are saved in the current
-  Session automatically, as well as to provide session-aware
-  class attributes such as "query", use the `mapper` classmethod
-  from the scoped session::
+    To map classes so that new instances are saved in the current Session
+    automatically, as well as to provide session-aware class attributes such
+    as "query", use the `mapper` classmethod from the scoped session::
 
-    mapper = Session.mapper
-    mapper(Class, table, ...)
+      mapper = Session.mapper
+      mapper(Class, table, ...)
 
-  """
-
-  return ScopedSession(session_factory, scopefunc=scopefunc)
+    """
+    return ScopedSession(session_factory, scopefunc=scopefunc)
 
 def create_session(bind=None, **kwargs):
     """create a new [sqlalchemy.orm.session#Session].

lib/sqlalchemy/orm/collections.py

         setattr(fn, '_sa_instrumented', True)
         fn.__doc__ = getattr(getattr(dict, fn.__name__), '__doc__')
 
-    Unspecified=sautil.symbol('Unspecified')
+    Unspecified = sautil.symbol('Unspecified')
 
     def __setitem__(fn):
         def __setitem__(self, key, value, _sa_initiator=None):
         setattr(fn, '_sa_instrumented', True)
         fn.__doc__ = getattr(getattr(Set, fn.__name__), '__doc__')
 
-    Unspecified=sautil.symbol('Unspecified')
+    Unspecified = sautil.symbol('Unspecified')
 
     def add(fn):
         def add(self, value, _sa_initiator=None):

lib/sqlalchemy/orm/dependency.py

             # the child objects have to have their foreign key to the parent set to NULL
             # this phase can be called safely for any cascade but is unnecessary if delete cascade
             # is on.
-            if self.post_update or not self.passive_deletes=='all':
+            if self.post_update or not self.passive_deletes == 'all':
                 for state in deplist:
-                    (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=self.passive_deletes)
+                    (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key, passive=self.passive_deletes)
                     if unchanged or deleted:
                         for child in deleted:
                             if child is not None and self.hasparent(child) is False:
             # head object is being deleted, and we manage its list of child objects
             # the child objects have to have their foreign key to the parent set to NULL
             if not self.post_update:
-                should_null_fks = not self.cascade.delete and not self.passive_deletes=='all'
+                should_null_fks = not self.cascade.delete and not self.passive_deletes == 'all'
                 for state in deplist:
-                    (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=self.passive_deletes)
+                    (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key, passive=self.passive_deletes)
                     if unchanged or deleted:
                         for child in deleted:
                             if child is not None and self.hasparent(child) is False:
                                     uowcommit.register_object(child)
         else:
             for state in deplist:
-                (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=True)
+                (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key, passive=True)
                 if added or deleted:
                     for child in added:
                         if child is not None:
     def process_dependencies(self, task, deplist, uowcommit, delete = False):
         #print self.mapper.mapped_table.name + " " + self.key + " " + repr(len(deplist)) + " process_dep isdelete " + repr(delete) + " direction " + repr(self.direction)
         if delete:
-            if self.post_update and not self.cascade.delete_orphan and not self.passive_deletes=='all':
+            if self.post_update and not self.cascade.delete_orphan and not self.passive_deletes == 'all':
                 # post_update means we have to update our row to not reference the child object
                 # before we can DELETE the row
                 for state in deplist:
                     self._synchronize(state, None, None, True, uowcommit)
-                    (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=self.passive_deletes)
+                    (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key, passive=self.passive_deletes)
                     if added or unchanged or deleted:
                         self._conditional_post_update(state, uowcommit, deleted + unchanged + added)
         else:
             for state in deplist:
-                (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=True)
+                (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key, passive=True)
                 if added or deleted or unchanged:
                     for child in added:
                         self._synchronize(state, child, None, False, uowcommit)
         if delete:
             if self.cascade.delete or self.cascade.delete_orphan:
                 for state in deplist:
-                    (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=self.passive_deletes)
+                    (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key, passive=self.passive_deletes)
                     if self.cascade.delete_orphan:
                         todelete = added + unchanged + deleted
                     else:
             for state in deplist:
                 uowcommit.register_object(state)
                 if self.cascade.delete_orphan:
-                    (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=self.passive_deletes)
+                    (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key, passive=self.passive_deletes)
                     if deleted:
                         for child in deleted:
                             if self.hasparent(child) is False:
 
         if delete:
             for state in deplist:
-                (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=self.passive_deletes)
+                (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key, passive=self.passive_deletes)
                 if deleted or unchanged:
                     for child in deleted + unchanged:
                         if child is None or (reverse_dep and (reverse_dep, "manytomany", child, state) in uowcommit.attributes):
         #print self.mapper.mapped_table.name + " " + self.key + " " + repr(len(deplist)) + " preprocess_dep isdelete " + repr(delete) + " direction " + repr(self.direction)
         if not delete:
             for state in deplist:
-                (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=True)
+                (added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key, passive=True)
                 if deleted:
                     for child in deleted:
                         if self.cascade.delete_orphan and self.hasparent(child) is False:

lib/sqlalchemy/orm/dynamic.py

     def __init__(self, class_, key, typecallable, class_manager, target_mapper, order_by, **kwargs):
         super(DynamicAttributeImpl, self).__init__(class_, key, typecallable, class_manager, **kwargs)
         self.target_mapper = target_mapper
-        self.order_by=order_by
+        self.order_by = order_by
         self.query_class = AppenderQuery
 
     def get(self, state, passive=False):

lib/sqlalchemy/orm/mapper.py

         # do a special check for the "discriminiator" column, as it may only be present
         # in the 'with_polymorphic' selectable but we need it for the base mapper
         if self.polymorphic_on and self.polymorphic_on not in self._columntoproperty:
-           col = self.mapped_table.corresponding_column(self.polymorphic_on) or self.polymorphic_on
-           self._compile_property(col.key, ColumnProperty(col), init=False, setparent=True)
+            col = self.mapped_table.corresponding_column(self.polymorphic_on) or self.polymorphic_on
+            self._compile_property(col.key, ColumnProperty(col), init=False, setparent=True)
 
     def _adapt_inherited_property(self, key, prop):
         if not self.concrete:
                 pks = mapper._pks_by_table[table]
                 def comparator(a, b):
                     for col in pks:
-                        x = cmp(a[1][col._label],b[1][col._label])
+                        x = cmp(a[1][col._label], b[1][col._label])
                         if x != 0:
                             return x
                     return 0
                     params[col.key] = mapper._get_state_attr_by_column(state, col)
                 if mapper.version_id_col and table.c.contains_column(mapper.version_id_col):
                     params[mapper.version_id_col.key] = mapper._get_state_attr_by_column(state, mapper.version_id_col)
-                
+
             for connection, del_objects in delete.iteritems():
                 mapper = table_to_mapper[table]
                 def comparator(a, b):
                     for col in mapper._pks_by_table[table]:
-                        x = cmp(a[col.key],b[col.key])
+                        x = cmp(a[col.key], b[col.key])
                         if x != 0:
                             return x
                     return 0
         visitables = [(self.__props.itervalues(), 'property', state)]
 
         while visitables:
-            iterator,item_type,parent_state = visitables[-1]
+            iterator, item_type, parent_state = visitables[-1]
             try:
                 if item_type == 'property':
                     prop = iterator.next()

lib/sqlalchemy/orm/properties.py

 class SynonymProperty(MapperProperty):
     def __init__(self, name, map_column=None, descriptor=None):
         self.name = name
-        self.map_column=map_column
+        self.map_column = map_column
         self.descriptor = descriptor
 
     def setup(self, context, entity, path, adapter, **kwargs):

lib/sqlalchemy/orm/query.py

 
         if self._lockmode:
             try:
-                for_update = {'read':'read','update':True,'update_nowait':'nowait',None:False}[self._lockmode]
+                for_update = {'read': 'read',
+                              'update': True,
+                              'update_nowait': 'nowait',
+                              None: False}[self._lockmode]
             except KeyError:
                 raise sa_exc.ArgumentError("Unknown lockmode '%s'" % self._lockmode)
         else:

lib/sqlalchemy/orm/scoping.py

         
         from sqlalchemy.orm import mapper
         
-        extension_args = dict([(arg,kwargs.pop(arg))
+        extension_args = dict([(arg, kwargs.pop(arg))
                                for arg in get_cls_kwargs(_ScopedExt)
                                if arg in kwargs])
         
     setattr(ScopedSession, prop, makeprop(prop))
 
 def clslevel(name):
-    def do(cls, *args,**kwargs):
+    def do(cls, *args, **kwargs):
         return getattr(Session, name)(*args, **kwargs)
     return classmethod(do)
-for prop in ('close_all','object_session', 'identity_key'):
+for prop in ('close_all', 'object_session', 'identity_key'):
     setattr(ScopedSession, prop, clslevel(prop))
     
 class _ScopedExt(MapperExtension):

lib/sqlalchemy/orm/strategies.py

 from sqlalchemy.sql import util as sql_util
 from sqlalchemy.sql import visitors, expression, operators
 from sqlalchemy.orm import mapper, attributes
-from sqlalchemy.orm.interfaces import LoaderStrategy, StrategizedOption,\
+from sqlalchemy.orm.interfaces import LoaderStrategy, StrategizedOption, \
      MapperOption, PropertyOption, serialize_path, deserialize_path
 from sqlalchemy.orm import session as sessionlib
 from sqlalchemy.orm import util as mapperutil
     def __setstate__(self, state):
         self.state = state['state']
         self.key = state['key']
-        self.options= state['options']
+        self.options = state['options']
         self.path = deserialize_path(state['path'])
         
     def __call__(self):

lib/sqlalchemy/pool.py

     def checkout(self):
         if self.connection is None:
             raise exc.InvalidRequestError("This connection is closed")
-        self.__counter +=1
+        self.__counter += 1
 
         if not self._pool._on_checkout or self.__counter != 1:
             return self
             self._connection_record = None
 
     def close(self):
-        self.__counter -=1
+        self.__counter -= 1
         if self.__counter == 0:
             self._close()
 
         return "NullPool"
 
     def do_return_conn(self, conn):
-       conn.close()
+        conn.close()
 
     def do_return_invalid(self, conn):
-       pass
+        pass
 
     def do_get(self):
         return self.create_connection()

lib/sqlalchemy/schema.py

                     (tname, colname) = m.group(1, 2)
                     schema = None
                 else:
-                    (schema,tname,colname) = m.group(1,2,3)
+                    (schema, tname, colname) = m.group(1, 2, 3)
                 if _get_table_key(tname, schema) not in parenttable.metadata:
                     raise exc.InvalidRequestError(
                         "Could not find table '%s' with which to generate a "
         self.name = name
         self.start = start
         self.increment = increment
-        self.optional=optional
+        self.optional = optional
         self.quote = quote
         self.schema = schema
         self.kwargs = kwargs
         if self not in table.constraints:
             table.constraints.add(self)
             for (c, r) in zip(self.__colnames, self.__refcolnames):
-                self.append_element(c,r)
+                self.append_element(c, r)
 
     def append_element(self, col, refcol):
         fk = ForeignKey(refcol, constraint=self, name=self.name, onupdate=self.onupdate, ondelete=self.ondelete, use_alter=self.use_alter)
 
     def add(self, col):
         self.columns.add(col)
-        col.primary_key=True
+        col.primary_key = True
     append_column = add
 
     def replace(self, col):
         self.columns.replace(col)
 
     def remove(self, col):
-        col.primary_key=False
+        col.primary_key = False
         del self.columns[col.key]
 
     def copy(self):

lib/sqlalchemy/sql/compiler.py

         return truncname
     
     def _process_anon(self, match):
-        (ident, derived) = match.group(1,2)
+        (ident, derived) = match.group(1, 2)
 
         key = ('anonymous', ident)
         if key in self.generated_ids:

lib/sqlalchemy/sql/expression.py

             co = _ColumnClause(self.anon_label, selectable, type_=getattr(self, 'type', None))
 
         co.proxies = [self]
-        selectable.columns[name]= co
+        selectable.columns[name] = co
         return co
 
     def anon_label(self):
     """Represent an element that can be used within the ``FROM`` clause of a ``SELECT`` statement."""
 
     __visit_name__ = 'fromclause'
-    named_with_column=False
+    named_with_column = False
     _hide_froms = []
     quote = False
 
         return fromclause in util.Set(self._cloned_set)
 
     def replace_selectable(self, old, alias):
-      """replace all occurences of FromClause 'old' with the given Alias object, returning a copy of this ``FromClause``."""
-
-      global ClauseAdapter
-      if ClauseAdapter is None:
-          from sqlalchemy.sql.util import ClauseAdapter
-      return ClauseAdapter(alias).traverse(self)
+        """replace all occurences of FromClause 'old' with the given Alias object, returning a copy of this ``FromClause``."""
+
+        global ClauseAdapter
+        if ClauseAdapter is None:
+            from sqlalchemy.sql.util import ClauseAdapter
+        return ClauseAdapter(alias).traverse(self)
 
     def correspond_on_equivalents(self, column, equivalents):
         col = self.corresponding_column(column, require_embedded=True)
 
     def _convert_to_unique(self):
         if not self.unique:
-            self.unique=True
+            self.unique = True
             self.key = "{ANON %d %s}" % (id(self), self._orig_key or 'param')
 
     def _get_from_objects(self, **modifiers):
             self._oid_column = self.element.oid_column._make_proxy(self)
 
     def _copy_internals(self, clone=_clone):
-       self._reset_exported()
-       self.element = _clone(self.element)
-       baseselectable = self.element
-       while isinstance(baseselectable, Alias):
-           baseselectable = baseselectable.selectable
-       self.original = baseselectable
+        self._reset_exported()
+        self.element = _clone(self.element)
+        baseselectable = self.element
+        while isinstance(baseselectable, Alias):
+            baseselectable = baseselectable.selectable
+        self.original = baseselectable
 
     def get_children(self, column_collections=True, aliased_selectables=True, **kwargs):
         if column_collections:
                     counter = 1
                     while label in self.table.c:
                         label = self.__label + "_" + str(counter)
-                        counter +=1
+                        counter += 1
                     self.__label = label
             else:
                 self.__label = self.name
         
         """
         s = self._generate()
-        s._should_correlate=False
+        s._should_correlate = False
         if fromclauses == (None,):
             s._correlate = util.Set()
         else:
     def append_correlation(self, fromclause):
         """append the given correlation expression to this select() construct."""
         
-        self._should_correlate=False
+        self._should_correlate = False
         self._correlate = self._correlate.union([fromclause])
 
     def append_column(self, column):
         self._bind = bind
         self.table = table
         self.select = None
-        self.inline=inline
+        self.inline = inline
         if prefixes:
             self._prefixes = [_literal_as_text(p) for p in prefixes]
         else:
 
 class _IdentifiedClause(ClauseElement):
     supports_execution = True
-    quote=False
+    quote = False
     
     def __init__(self, ident):
         self.ident = ident

lib/sqlalchemy/topological.py

                     for n in lead.cycles:
                         if n is not lead:
                             n._cyclical = True
-                            for (n,k) in list(edges.edges_by_parent(n)):
+                            for (n, k) in list(edges.edges_by_parent(n)):
                                 edges.add((lead, k))
-                                edges.remove((n,k))
+                                edges.remove((n, k))
                 continue
             else:
                 # long cycles not allowed
         nodealldeps = node.all_deps()
         if nodealldeps:
             # iterate over independent node indexes in reverse order so we can efficiently remove them
-            for index in xrange(len(independents)-1,-1,-1):
+            for index in xrange(len(independents) - 1, -1, -1):
                 child, childsubtree, childcycles = independents[index]
                 # if there is a dependency between this node and an independent node
                 if (childsubtree.intersection(nodealldeps) or childcycles.intersection(node.dependencies)):
                     # remove the child from list of independent subtrees
                     independents[index:index+1] = []
         # add node as a new independent subtree
-        independents.append((node,subtree,cycles))
+        independents.append((node, subtree, cycles))
     # choose an arbitrary node from list of all independent subtrees
     head = independents.pop()[0]
     # add all other independent subtrees as a child of the chosen root

lib/sqlalchemy/types.py

         return self.impl.copy_value(value)
 
     def compare_values(self, x, y):
-        return self.impl.compare_values(x,y)
+        return self.impl.compare_values(x, y)
 
     def is_mutable(self):
         return self.impl.is_mutable()
             if value is None:
                 return None
             return dt.datetime.utcfromtimestamp(0) + value
-            
+
     def process_result_value(self, value, dialect):
         if dialect.__class__ in self.__supported:
             return value
                 return None
             return value - dt.datetime.utcfromtimestamp(0)
 
-class FLOAT(Float): pass
+class FLOAT(Float):
+    """The SQL FLOAT type."""
+
+
+class NUMERIC(Numeric):
+    """The SQL NUMERIC type."""
+
+
+class DECIMAL(Numeric):
+    """The SQL DECIMAL type."""
+
+
+class INT(Integer):
+    """The SQL INT or INTEGER type."""
+
+
+INTEGER = INT
+
+class SMALLINT(Smallinteger):
+    """The SQL SMALLINT type."""
+
+
+class TIMESTAMP(DateTime):
+    """The SQL TIMESTAMP type."""
+
+
+class DATETIME(DateTime):
+    """The SQL DATETIME type."""
+
+
+class DATE(Date):
+    """The SQL DATE type."""
+
+
+class TIME(Time):
+    """The SQL TIME type."""
+
+
 TEXT = Text
-class NUMERIC(Numeric): pass
-class DECIMAL(Numeric): pass
-class INT(Integer): pass
-INTEGER = INT
-class SMALLINT(Smallinteger): pass
-class TIMESTAMP(DateTime): pass
-class DATETIME(DateTime): pass
-class DATE(Date): pass
-class TIME(Time): pass
-class CLOB(Text): pass
-class VARCHAR(String): pass
-class CHAR(String): pass
-class NCHAR(Unicode): pass
-class BLOB(Binary): pass
-class BOOLEAN(Boolean): pass
+
+class CLOB(Text):
+    """The SQL CLOB type."""
+
+
+class VARCHAR(String):
+    """The SQL VARCHAR type."""
+
+
+class CHAR(String):
+    """The SQL CHAR type."""
+
+
+class NCHAR(Unicode):
+    """The SQL NCHAR type."""
+
+
+class BLOB(Binary):
+    """The SQL BLOB type."""
+
+
+class BOOLEAN(Boolean):
+    """The SQL BOOLEAN type."""
 
 NULLTYPE = NullType()
 

lib/sqlalchemy/util.py

     class deque(list):
         def appendleft(self, x):
             self.insert(0, x)
-        
+
         def extendleft(self, iterable):
             self[0:0] = list(iterable)
 
         def popleft(self):
             return self.pop(0)
-            
+
         def rotate(self, n):
             for i in xrange(n):
                 self.appendleft(self.pop())
-                
+
 def to_list(x, default=None):
     if x is None:
         return default
 def array_as_starargs_decorator(fn):
     """Interpret a single positional array argument as
     *args for the decorated method.
-    
+
     """
 
     def starargs_as_list(self, *args, **kwargs):
             return fn(self, *args, **kwargs)
     starargs_as_list.__doc__ = fn.__doc__
     return function_named(starargs_as_list, fn.__name__)
-    
+
 def to_set(x):
     if x is None:
         return Set()
 
 def unbound_method_to_callable(func_or_cls):
     """Adjust the incoming callable such that a 'self' argument is not required."""
-    
+
     if isinstance(func_or_cls, types.MethodType) and not func_or_cls.im_self:
         return func_or_cls.im_func
     else:
             return specimen.__emulates__
 
     isa = isinstance(specimen, type) and issubclass or isinstance
-    if isa(specimen, list): return list
-    if isa(specimen, set_types): return Set
-    if isa(specimen, dict): return dict
+    if isa(specimen, list):
+        return list
+    elif isa(specimen, set_types):
+        return Set
+    elif isa(specimen, dict):
+        return dict
 
     if hasattr(specimen, 'append'):
         return list
 
 
 class NotImplProperty(object):
-  """a property that raises ``NotImplementedError``."""
+    """a property that raises ``NotImplementedError``."""
 
-  def __init__(self, doc):
-      self.__doc__ = doc
+    def __init__(self, doc):
+        self.__doc__ = doc
 
-  def __set__(self, obj, value):
-      raise NotImplementedError()
+    def __set__(self, obj, value):
+        raise NotImplementedError()
 
-  def __delete__(self, obj):
-      raise NotImplementedError()
+    def __delete__(self, obj):
+        raise NotImplementedError()
 
-  def __get__(self, obj, owner):
-      if obj is None:
-          return self
-      else:
-          raise NotImplementedError()
+    def __get__(self, obj, owner):
+        if obj is None:
+            return self
+        else:
+            raise NotImplementedError()
 
 class OrderedProperties(object):
     """An object that maintains the order in which attributes are set upon it.
 
     def __contains__(self, key):
         return key in self._data
-    
+
     def update(self, value):
         self._data.update(value)
-        
+
     def get(self, key, default=None):
         if key in self:
             return self[key]
     def remove(self, element):
         Set.remove(self, element)
         self._list.remove(element)
-    
+
     def insert(self, pos, element):
         if element not in self:
             self._list.insert(pos, element)
         Set.add(self, element)
-        
+
     def discard(self, element):
         if element in self:
             self._list.remove(element)
         return iter(self._list)
 
     def __repr__(self):
-      return '%s(%r)' % (self.__class__.__name__, self._list)
+        return '%s(%r)' % (self.__class__.__name__, self._list)
 
     __str__ = __repr__
 
     def update(self, iterable):
-      add = self.add
-      for i in iterable:
-          add(i)
-      return self
+        add = self.add
+        for i in iterable:
+            add(i)
+        return self
 
     __ior__ = update
 
     def union(self, other):
-      result = self.__class__(self)
-      result.update(other)
-      return result
+        result = self.__class__(self)
+        result.update(other)
+        return result
 
     __or__ = union
 
     __iand__ = intersection_update
 
     def symmetric_difference_update(self, other):
-      Set.symmetric_difference_update(self, other)
-      self._list =  [ a for a in self._list if a in self]
-      self._list += [ a for a in other._list if a in self]
-      return self
+        Set.symmetric_difference_update(self, other)
+        self._list =  [ a for a in self._list if a in self]
+        self._list += [ a for a in other._list if a in self]
+        return self
 
     __ixor__ = symmetric_difference_update
 
 class WeakCompositeKey(object):
     """an weak-referencable, hashable collection which is strongly referenced
     until any one of its members is garbage collected.
-    
+
     """
     keys = Set()
-    
+
     def __init__(self, *args):
         self.args = [self.__ref(arg) for arg in args]
         WeakCompositeKey.keys.add(self)
-    
+
     def __ref(self, arg):
         if isinstance(arg, type):
             return weakref.ref(arg, self.__remover)
         else:
             return lambda: arg
-            
+
     def __remover(self, wr):
         WeakCompositeKey.keys.discard(self)
-        
+
     def __hash__(self):
         return hash(tuple(self))
-        
+
     def __cmp__(self, other):
         return cmp(tuple(self), tuple(other))
-    
+
     def __iter__(self):
         return iter([arg() for arg in self.args])
-        
+
 class _symbol(object):
     def __init__(self, name):
         """Construct a new named symbol."""
 
     def _cleanup(self, wr, key=None):
         if key is None:
-            key=wr.key
+            key = wr.key
         try:
             del self._weakrefs[key]
         except (KeyError, AttributeError):  # pragma: no cover