Commits

Mike Bayer  committed 34b9501

- rewrite code examples with corrected formatting

  • Participants
  • Parent commits d93717a

Comments (0)

Files changed (50)

File UsageRecipes/AutoRelationships.md

     
     def camelcase(name):
         return (
-                name[0](0).upper() +
-                name[1:](1:)
+                name[0].upper() +
+                name[1:]
             )
     
     def map_metadata(Base, metadata):
         for table in metadata.tables.values():
             clsname = camelcase(table.name).encode('ascii')
             cls = type(clsname, (Base,), {"name": clsname, "__table__": table})
-            result[clsname](clsname) = cls
-            table_to_class[table](table) = cls
+            result[clsname] = cls
+            table_to_class[table] = cls
     
         for table in metadata.tables.values():
-            local_cls = table_to_class[table](table)
+            local_cls = table_to_class[table]
             for constraint in table.constraints:
                 if isinstance(constraint, ForeignKeyConstraint):
                     fks = constraint.elements
-                    referred_table = fks[0](0).column.table
-                    referred_cls = table_to_class[referred_table](referred_table)
+                    referred_table = fks[0].column.table
+                    referred_cls = table_to_class[referred_table]
     
                     setattr(
                         local_cls,
                         referred_cls.__name__.lower(),
                         relationship(referred_cls,
-                                foreign_keys=[for fk in constraint.elements](fk.parent),
+                                foreign_keys=[fk.parent for fk in constraint.elements],
                                 backref=backref(
                                         local_cls.__name__.lower() + "_collection",
                                         passive_deletes="all",  # optional, replace this 
     
         classes = map_metadata(Base, reflected_metadata)
     
-        A, B, C, D = classes["A"]("A"), classes["B"]("B"), classes["C"]("C"), classes["D"]("D")
+        A, B, C, D = classes["A"], classes["B"], classes["C"], classes["D"]
         from sqlalchemy.orm import Session
         sess = Session(e)
     
-        sess.add_all([       A(d=D(), b_collection=[B(), B()](
-    )),
-            D(c_collection=[C()](C(),))
+        sess.add_all([
+            A(d=D(), b_collection=[B(), B()]),
+            D(c_collection=[C(), C()])
         ])
         sess.commit()
     

File UsageRecipes/AutoSequenceGeneration.md

 
     class ClassDefaults(DeclarativeMeta):
             def __init__(cls,classname, bases, dict_):
-                    if not ( dict_.has_key('__mapper_args__') and dict_['__mapper_args__']('__mapper_args__').has_key('polymorphic_identity') ):
+                    if not ( dict_.has_key('__mapper_args__') and dict_['__mapper_args__'].has_key('polymorphic_identity') ):
                             # Only add the key if we are not creating a polymorphic SQLAlchemy object, because SQLAlchemy
                             # does not want a separate 'id' key added in that case.
                             # seqprefix can be None
                             seqprefix = getattr(cls,'__tablename__',None)
                             
                              # for SQLAlchemy 0.6_beta1 and earlier, use this:
-                             dict_['id']('id') = PrimaryKey(seqprefix=seqprefix)
+                             dict_['id'] = PrimaryKey(seqprefix=seqprefix)
     
                              # for SQLAlchemy 0.6_beta2 and later, use this:
                              #cls.id = PrimaryKey(seqprefix=seqprefix)

File UsageRecipes/BakedQuery.md

             """Freeze the statement used by this Query."""
     
             if name not in cache:
-                cache[name](name) = context = self._compile_context()
+                cache[name] = context = self._compile_context()
                 del context.session
                 del context.query
-            self._baked_context = cache[name](name)
+            self._baked_context = cache[name]
             self._baked_cache = cache
     
         def _compile_context(self, **kw):
         s = Session(e, query_cls=BakedQuery)
         Base.metadata.create_all(e)
     
-        s.add_all([data="data: %d" % i) for i in xrange(100)](Foo(id=i,))
+        s.add_all([Foo(id=i, data="data: %d" % i) for i in xrange(100)])
     
         cache = {}
     

File UsageRecipes/CaseBasedPolymorphicOn.md

         type = Column(String(20))
     
         discriminator_expr = case(
-            ["employee")]((type=='employee',),
+            [(type=='employee', "employee")],
             else_="person"
         )
         discriminator = column_property(discriminator_expr)
     p3.type = "engineer"
     p4.type = "manager"
     
-    s.add_all([p2, p3, p4](p1,))
+    s.add_all([p1, p2, p3, p4])
     s.commit()
     s.close()
     
-    r = [for x in s.query(Person).order_by(Person.id)](type(x))
+    r = [type(x) for x in s.query(Person).order_by(Person.id)]
     assert \
         r == \
-        [Employee, Person, Person](Person,)
+        [Person, Employee, Person, Person]
     

File UsageRecipes/DatabaseCrypt.md

     
         session = Session(engine)
     
-        session.add_all([       User(name='user1', password="farb%349"),
+        session.add_all([
+            User(name='user1', password="farb%349"),
             User(name='user2', password="vy6kia%345"),
             User(name='user3', password="0sm3EF88s"),
-        ](
-    ))
+        ])
         session.commit()
     
     
     
         session = Session(engine)
     
-        session.add_all([       User(name='user1', password="farb%349"),
+        session.add_all([
+            User(name='user1', password="farb%349"),
             User(name='user2', password="vy6kia%345"),
             User(name='user3', password="0sm3EF88s"),
-        ](
-    ))
+        ])
         session.commit()
     
     

File UsageRecipes/DeclarativeAbstractConcreteBase.md

     
     class DeclarativeAbstractConcreteBase(object):
     
-        _mapper_args = [   @classmethod
+        _mapper_args = []
+    
+        @classmethod
         def __mapper_cls__(cls, *args, **kw):
             """Declarative will use this function in lieu of 
             calling mapper() directly.
             # docs.  We generate it based on what we find in 
             # __subclasses__().
             pjoin = polymorphic_union(dict(
-                (klass.__mapper_args__['polymorphic_identity'](]
-    
-    ), klass.__table__)
+                (klass.__mapper_args__['polymorphic_identity'], klass.__table__)
                 for klass in cls.__subclasses__()
                 if hasattr(klass, '__mapper_args__')
             ), 'type', 'pjoin')
             # iterate through our collected sets of mapper
             # args and call mapper() for each.
             for args, kw in cls._mapper_args:
-                klass = args[0](0)
+                klass = args[0]
                 # the registry is global to the 
                 # base here so ensure we only deal with 
                 # classes in this hierarchy 

File UsageRecipes/DeclarativeComputeTotalOfSubquery.md

     parent=session.query(Parent).filter_by(name="foo").first()
     qa=session.query(Host.id).filter_by(parent=parent).subquery()
     qb=session.query(func.sum(HostFilesystem.sizeMB)).filter(HostFilesystem.host_id.in_(qa))
-    subtotal=qb.first()[0](0)
+    subtotal=qb.first()[0]
 
 
 Using subqueries such as this are generally much more efficient than manually iterating over each record using Python in order to calculate the subtotal iteratively. The reason why the above method is generally much more efficient is because network round-trips (as well as overall network traffic) is dramatically reduced when computation is pushed to the remote database server. For larger data sets, the difference in performance between the two methods can be extremely large.

File UsageRecipes/DependentTables.md

     #!python
     def find_dependent_tables(table):
         metadata = table.metadata
-        stack = [table](table)
+        stack = [table]
         result = set()
         while stack:
             t = stack.pop()

File UsageRecipes/DisjointEagerLoading.md

     
     # test data
     
-    session.add_all([   Parent(
+    session.add_all([
+        Parent(
             data="parent %d" % i, 
-            children=set([Child(data="child %d parent %d" % (j, i)) for j in xrange(50)](
-    ))
+            children=set([Child(data="child %d parent %d" % (j, i)) for j in xrange(50)])
         ) 
         for i in xrange(100)
     ])
             
         collections = dict((k, list(v)) for k, v in groupby(
                         child_q, 
-                        lambda x:tuple([c.key) for c in remote_cols](getattr(x,))
+                        lambda x:tuple([getattr(x, c.key) for c in remote_cols])
                     ))
         
         parents = query.all()
                 p, 
                 attr.key, 
                 collections.get(
-                    tuple([c.key) for c in local_cols](getattr(p,)), 
+                    tuple([getattr(p, c.key) for c in local_cols]), 
                     ())
             )
         return parents

File UsageRecipes/DropEverything.md

     
     metadata = MetaData()
     
-    tbs = [= [](]
-    all_fks)
+    tbs = []
+    all_fks = []
     
     for table_name in inspector.get_table_names():
-        fks = [   for fk in inspector.get_foreign_keys(table_name):
-            if not fk['name'](]
-    ):
+        fks = []
+        for fk in inspector.get_foreign_keys(table_name):
+            if not fk['name']:
                 continue
             fks.append(
-                ForeignKeyConstraint((),(),name=fk['name']('name'))
+                ForeignKeyConstraint((),(),name=fk['name'])
                 )
         t = Table(table_name,metadata,*fks)
         tbs.append(t)

File UsageRecipes/EntityName.md

     
     sess = sessionmaker()()
     
-    sess.add_all([T1Foo(), T2Foo(), T1Foo()](T1Foo(),))
+    sess.add_all([T1Foo(), T1Foo(), T2Foo(), T1Foo()])
     
     print sess.query(T1Foo).all()
     print sess.query(T2Foo).all()
     
     sess = sessionmaker(engine)()
     
-    sess.add_all([T1Foo(data='t2'), T2Foo(data='t3'),
-                 T1Foo(data='t4')](T1Foo(data='t1'),))
+    sess.add_all([T1Foo(data='t1'), T1Foo(data='t2'), T2Foo(data='t3'),
+                 T1Foo(data='t4')])
     
     print sess.query(T1Foo).all()
     print sess.query(T2Foo).all()

File UsageRecipes/ExpireRelationshipOnFKChange.md

                 # optional behavior #1 - expire the "User.articles"
                 # collection on the existing "user" object
                 if prop.back_populates and prop.key in target.__dict__:
-                    sess.expire(getattr(target, prop.key), [prop.back_populates](prop.back_populates))
+                    sess.expire(getattr(target, prop.key), [prop.back_populates])
     
                 # behavior #2 - expire Article.user
-                sess.expire(target, [prop.key](prop.key))
+                sess.expire(target, [prop.key])
     
                 # optional behavior #3 - "trick" the ORM by actually
                 # setting the value ahead of time, then emitting a load
                 # Other techniques here including looking in the identity
                 # map for "value", if this is a simple many-to-one get.
                 if prop.back_populates:
-                    target.__dict__[colkey](colkey) = value
+                    target.__dict__[colkey] = value
                     new = getattr(target, prop.key)
                     if new is not None:
-                        sess.expire(new, [prop.back_populates](prop.back_populates))
+                        sess.expire(new, [prop.back_populates])
     
     
     

File UsageRecipes/ExpiryMemoized.md

         def __get__(self, obj, cls):
             if obj is None:
                 return self
-            self.all_memoized[cls](cls).add(self.__name__)
-            obj.__dict__[self.__name__](self.__name__) = result = self.fget(obj)
+            self.all_memoized[cls].add(self.__name__)
+            obj.__dict__[self.__name__] = result = self.fget(obj)
             return result
     
     @event.listens_for(Session, "after_flush")
         """expire all orm_memoized en masse for a given flush"""
     
         for obj in session.identity_map.values():
-            for name in orm_memoized.all_memoized[obj.__class__](obj.__class__):
+            for name in orm_memoized.all_memoized[obj.__class__]:
                 obj.__dict__.pop(name, None)
 
 
         def __get__(self, obj, cls):
             if obj is None:
                 return self
-            self.all_memoized[cls](cls).add(self.__name__)
-            obj.__dict__[self.__name__](self.__name__) = result = self.fget(obj)
+            self.all_memoized[cls].add(self.__name__)
+            obj.__dict__[self.__name__] = result = self.fget(obj)
             return result
     
     @event.listens_for(Session, "after_flush")
         """expire all orm_memoized en masse for a given flush"""
     
         for obj in session.identity_map.values():
-            for name in orm_memoized.all_memoized[obj.__class__](obj.__class__):
+            for name in orm_memoized.all_memoized[obj.__class__]:
                 obj.__dict__.pop(name, None)
     
     Base= declarative_base()
     Base.metadata.create_all(e)
     s = Session(e)
     
-    s.add_all([   MyData(related=[
+    s.add_all([
+        MyData(related=[
             MyRelated(),
             MyRelated(),
             MyRelated(),
             MyRelated(),
             MyRelated(),
-        ](
-    ))
+        ])
     ])
     
     s.commit()
     
     assert md.related_count == 5
     
-    rel = s.query(MyRelated)[0:3](0:3)
+    rel = s.query(MyRelated)[0:3]
     for mr in rel:
         s.delete(mr)
     s.flush()

File UsageRecipes/FunctionAttribute.md

     
     sess = sessionmaker(engine)()
     
-    sess.add_all([   Foo(some_uppercase_thing="value 1"),
+    sess.add_all([
+        Foo(some_uppercase_thing="value 1"),
         Foo(some_uppercase_thing="value 2"),
         Foo(some_uppercase_thing="value 3"),
         Foo(some_uppercase_thing="value 4"),
-    ](
-    ))
+    ])
     
     sess.commit()
     
-    assert sess.query(Foo.some_uppercase_thing).all() == [   ("value 1", ),
+    assert sess.query(Foo.some_uppercase_thing).all() == [
+        ("value 1", ),
         ("value 2", ),
         ("value 3", ),
         ("value 4", ),
-    ](
-    )
+    ]
     
-    assert sess.query(Foo.some_uppercase_thing).filter(Foo.some_uppercase_thing == 'value 2').all() == [   ("value 2", ),
-    ](
-    )
+    assert sess.query(Foo.some_uppercase_thing).filter(Foo.some_uppercase_thing == 'value 2').all() == [
+        ("value 2", ),
+    ]
     
-    assert sess.execute(select([Foo.__table__.c.data](Foo.__table__.c.data))).fetchall() == [   ("VALUE 1", ),
+    assert sess.execute(select([Foo.__table__.c.data])).fetchall() == [
+        ("VALUE 1", ),
         ("VALUE 2", ),
         ("VALUE 3", ),
         ("VALUE 4", ),
-    ](
-    )
+    ]
     
 
 
     
     sess = sessionmaker(engine)()
     
-    sess.add_all([   Foo(some_uppercase_thing="value 1"),
+    sess.add_all([
+        Foo(some_uppercase_thing="value 1"),
         Foo(some_uppercase_thing="value 2"),
         Foo(some_uppercase_thing="value 3"),
         Foo(some_uppercase_thing="value 4"),
-    ](
-    ))
+    ])
     
     sess.commit()
     
-    assert sess.query(Foo.some_uppercase_thing).all() == [   ("value 1", ),
+    assert sess.query(Foo.some_uppercase_thing).all() == [
+        ("value 1", ),
         ("value 2", ),
         ("value 3", ),
         ("value 4", ),
-    ](
-    )
+    ]
     
-    assert sess.query(Foo.some_uppercase_thing).filter(Foo.some_uppercase_thing == 'value 2').all() == [   ("value 2", ),
-    ](
-    )
+    assert sess.query(Foo.some_uppercase_thing).filter(Foo.some_uppercase_thing == 'value 2').all() == [
+        ("value 2", ),
+    ]
     
-    assert sess.execute(select([foo_table.c.data](foo_table.c.data))).fetchall() == [   ("VALUE 1", ),
+    assert sess.execute(select([foo_table.c.data])).fetchall() == [
+        ("VALUE 1", ),
         ("VALUE 2", ),
         ("VALUE 3", ),
         ("VALUE 4", ),
-    ](
-    )
+    ]
 

File UsageRecipes/GenericOrmBaseClass.md

 Example:
 
     #!python
-    In [1](1): import sqlalchemy as sa
+    In [1]: import sqlalchemy as sa
     
-    In [2](2): from OrmObject import OrmObject
+    In [2]: from OrmObject import OrmObject
     
-    In [3](3): metadata = sa.MetaData()
+    In [3]: metadata = sa.MetaData()
     
-    In [4](4): user_table = sa.Table('users', metadata,
+    In [4]: user_table = sa.Table('users', metadata,
        ...:     sa.Column('user_id', sa.Integer, primary_key=True),
        ...:     sa.Column('username', sa.Unicode(32), nullable=False),
        ...:     sa.Column('description', sa.Unicode(1000), nullable=False, default=''),
        ...: )
     
-    In [5](5): class User(OrmObject): pass
+    In [5]: class User(OrmObject): pass
        ...:
     
-    In [6](6): import sqlalchemy.orm
+    In [6]: import sqlalchemy.orm
     
-    In [7](7): sa.orm.mapper(User, user_table)
-    Out[7](7): <sqlalchemy.orm.mapper.Mapper object at 0x85c722c>
+    In [7]: sa.orm.mapper(User, user_table)
+    Out[7]: <sqlalchemy.orm.mapper.Mapper object at 0x85c722c>
     
-    In [8](8): u = User(username='Jack')
+    In [8]: u = User(username='Jack')
     
-    In [9](9): print u
+    In [9]: print u
     User(username='Jack')
     
-    In [10](10): u = User(username='Sam', description='Great Guy')
+    In [10]: u = User(username='Sam', description='Great Guy')
     
-    In [11](11): print u
+    In [11]: print u
     User(username='Sam', description='Great Guy')
     
-    In [12](12): u.description = ''
+    In [12]: u.description = ''
     
-    In [13](13): print u
+    In [13]: print u
     User(username='Sam')
     
-    In [14](14): u = User(bogus='Nascar Fan')
+    In [14]: u = User(bogus='Nascar Fan')
     ---------------------------------------------------------------------------
     <type 'exceptions.AttributeError'>        Traceback (most recent call last)
     ... ...
         def __init__(self, **kw):
             for key in kw:
                 if key in self.c:
-                    setattr(self, key, kw[key](key))
+                    setattr(self, key, kw[key])
                 else:
                     raise AttributeError('Cannot set attribute which is' +
                                          'not column in mapped table: %s' % (key,))
         
         def __repr__(self):
-            atts = [       for key in self.c.keys():
+            atts = []
+            for key in self.c.keys():
                 if key in self.__dict__:
                     if not (hasattr(self.c.get(key).default, 'arg') and
                             getattr(self.c.get(key).default, 'arg') == getattr(self, key)):
                         atts.append( (key, getattr(self, key)) )
     
-            return self.__class__.__name__ + '(' + ', '.join(x[0](]
-    ) + '=' + repr(x[1](1)) for x in atts) + ')'
+            return self.__class__.__name__ + '(' + ', '.join(x[0] + '=' + repr(x[1]) for x in atts) + ')'
 
 
 SQLAlchemy 0.5 Implementation:
         def __init__(self, **kw):
             for key in kw:
                 if not key.startswith('_') and key in self.__dict__:
-                    setattr(self, key, kw[key](key))
+                    setattr(self, key, kw[key])
     
         def __repr__(self):
-            attrs = [       for key in self.__dict__:
+            attrs = []
+            for key in self.__dict__:
                 if not key.startswith('_'):
                     attrs.append((key, getattr(self, key)))
-            return self.__class__.__name__ + '(' + ', '.join(x[0](]
-    ) + '=' +
-                                                repr(x[1](1)) for x in attrs) + ')'
+            return self.__class__.__name__ + '(' + ', '.join(x[0] + '=' +
+                                                repr(x[1]) for x in attrs) + ')'
 
 -- Contributed by ltbarcly(Justin Van Winkle)
 
     Base = sa_declarative.declarative_base(name='Base', bind=engine)
     
     def base_repr(self):
-        attrs = [   for key in self.__dict__:
+        attrs = []
+        for key in self.__dict__:
             if not key.startswith('_'):
                 attrs.append((key, getattr(self, key)))
-            return self.__class__.__name__ + '(' + ', '.join(x[0](]
-    ) + '=' +
-                                                             repr(x[1](1)) for x in attrs) + ')'
+            return self.__class__.__name__ + '(' + ', '.join(x[0] + '=' +
+                                                             repr(x[1]) for x in attrs) + ')'
     
     Base.__repr__ = base_repr
 

File UsageRecipes/GlobalFilter.md

     
     session = sessionmaker()()
     
-    c1, c2, c3, c4, c5 = [   Child(timestamp=datetime.datetime(2009, 10, 15, 12, 00, 00)),
+    c1, c2, c3, c4, c5 = [
+        Child(timestamp=datetime.datetime(2009, 10, 15, 12, 00, 00)),
         Child(timestamp=datetime.datetime(2009, 10, 17, 12, 00, 00)),
         Child(timestamp=datetime.datetime(2009, 10, 20, 12, 00, 00)),
         Child(timestamp=datetime.datetime(2009, 10, 12, 12, 00, 00)),
         Child(timestamp=datetime.datetime(2009, 10, 17, 12, 00, 00)),
-    ](
-    )
+    ]
     
-    p1, p2 = [   timestamp=datetime.datetime(2009, 10, 15, 12, 00, 00),
-        children=[c1, c2, c3](
+    p1, p2 = [
     Parent(
-    )
+        timestamp=datetime.datetime(2009, 10, 15, 12, 00, 00),
+        children=[c1, c2, c3]
     ),
     Parent(
         timestamp=datetime.datetime(2009, 10, 17, 12, 00, 00),
-        children=[c5](c4,)
+        children=[c4, c5]
     )]
     
-    session.add_all([p2](p1,))
+    session.add_all([p1, p2])
     session.commit()
 
 
             options(eagerload(Parent.temporal_children)).\
             all()
     
-    assert parents[0](0) == p2
-    assert parents[0](0).temporal_children == [c5](c5)
+    assert parents[0] == p2
+    assert parents[0].temporal_children == [c5]
 
 
 The above pattern will apply the daterange anywhere in the query it's specified, including if filtering is performed on `Parent.temporal_children`.
             ).all()
     
     
-    assert parents[0](0) == p2
-    assert parents[0](0).temporal_children == [c5](c5)
+    assert parents[0] == p2
+    assert parents[0].temporal_children == [c5]
     
     session.expire_all()
     
             all()
     
     
-    assert parents[0](0) == p2
-    assert parents[0](0).temporal_children == [c5](c5)
+    assert parents[0] == p2
+    assert parents[0].temporal_children == [c5]
     
     session.expire_all()
     
             all()
     
     
-    assert parents[0](0) == p1
-    assert parents[0](0).temporal_children == [c2](c1,)
+    assert parents[0] == p1
+    assert parents[0].temporal_children == [c1, c2]
     
 
 
 
 
     #!python
-    session.expire(parents[0](0), ['temporal_children']('temporal_children'))
-    assert parents[0](0).temporal_children == [c2](c1,)
+    session.expire(parents[0], ['temporal_children'])
+    assert parents[0].temporal_children == [c1, c2]
     
-    session.expire(parents[0](0))
-    assert parents[0](0).temporal_children == [c2](c1,)
+    session.expire(parents[0])
+    assert parents[0].temporal_children == [c1, c2]
 
 
 There's no public API at the moment to clear out these options.   For new criterion, you need to start again with a new `Parent` instance:
             datetime.datetime(2009, 10, 15, 12, 00, 00), 
             datetime.datetime(2009, 10, 21, 12, 00, 00)
     )).filter(Parent.id==p1.id).first()
-    assert p1.temporal_children == [c2, c3](c1,)
+    assert p1.temporal_children == [c1, c2, c3]

File UsageRecipes/IteratePropsInCreationOrder.md

         # that of each Column.  So we need to look at the actual
         # Column object.
         if isinstance(prop, ColumnProperty):
-            return prop.columns[0](0)._creation_order
+            return prop.columns[0]._creation_order
         else:
             return prop._creation_order
     props.sort(key=_order_for_prop)
     
-    print [for p in props](p.key)
+    print [p.key for p in props]

File UsageRecipes/ManyToManyOrphan.md

     r3 = Entry()
     t1, t2, t3, t4 = Tag("t1"), Tag("t2"), Tag("t3"), Tag("t4")
     
-    r1.tags.extend([t2](t1,))
-    r2.tags.extend([t3](t2,))
-    r3.tags.extend([t4](t4))
-    s.add_all([r2, r3](r1,))
+    r1.tags.extend([t1, t2])
+    r2.tags.extend([t2, t3])
+    r3.tags.extend([t4])
+    s.add_all([r1, r2, r3])
     
     assert s.query(Tag).count() == 4
     
         # in 0.7 use attributes.get_state_history(state, 'tags').deleted
         if state.attrs.tags.history.deleted:
             sess_key = state.session_id
-            ctx['orphaned_tags')]((sess_key,) = True
+            ctx[(sess_key, 'orphaned_tags')] = True
     
     @event.listens_for(Entry, 'after_delete', raw=True)
     def _del_entry(mapper, connection, state):
         sess_key = state.session_id
-        ctx['orphaned_tags')]((sess_key,) = True
+        ctx[(sess_key, 'orphaned_tags')] = True
     
     
     @event.listens_for(Session, 'after_flush')
             session.query(Tag).\
                 filter(~Tag.entries.any()).\
                 delete(synchronize_session=False)
-            del ctx['orphaned_tags')]((session.hash_key,)
+            del ctx[(session.hash_key, 'orphaned_tags')]
     
     
     

File UsageRecipes/NamingConventions.md

                             )
     
         """
-        kw['primary_key']('primary_key') = True
+        kw['primary_key'] = True
         c = Column(Integer, **kw)
     
         @event.listens_for(c, "before_parent_attach")
                             )
         """
     
-        kw['doc']('doc') = "Foreign key referencing %s.%s_id" % (tablename, tablename)
+        kw['doc'] = "Foreign key referencing %s.%s_id" % (tablename, tablename)
         use_alter = kw.pop('use_alter', False)
         return Column("%s_id" % tablename,
                             Integer, 
     def _fk_constraint_name(const, table):
         if const.name:
             return
-        fk = const.elements[0](0)
+        fk = const.elements[0]
         reftable, refcol = fk.target_fullname.split(".")
         const.name = "fk_%s_%s_%s" % (
                                     table.name,
     def _unique_constraint_name(const, table):
         const.name = "uq_%s_%s" % (
             table.name,
-            list(const.columns)[0](0).name
+            list(const.columns)[0].name
         )
     
     @event.listens_for(CheckConstraint, "after_parent_attach")

File UsageRecipes/PGArrayIndex.md

     def compile_indexed(element, compiler, **kw):
         ret = compiler.process(element.expr, **kw)
         if element.index is not None:
-           ret = "%s[%d](%d)" % (ret, element.index)
+           ret = "%s[%d]" % (ret, element.index)
         return ret
         
     if __name__ == '__main__':
     
         expr1 = as_indexed(test.c.mydata)
         assert isinstance(expr1.type, Float)
-        expr2 = expr1[1](1)
+        expr2 = expr1[1]
         assert isinstance(expr2.type, Float)
-        expr3 = expr2[1](1)
+        expr3 = expr2[1]
         assert isinstance(expr3.type, Float)
         
         pg_dialect = dialect()
-        print select([               test.c.mykey, 
-                    func.sum(as_indexed(test.c.mydata)[1](
-    )[1](1))
+        print select([
+                    test.c.mykey, 
+                    func.sum(as_indexed(test.c.mydata)[1][1])
                 ]).group_by(test.c.mykey).\
                 compile(dialect=pg_dialect)
     
-        print select([               test.c.mykey, 
-                    as_indexed(test.c.mydata)[1](
-    ) + 35.5
+        print select([
+                    test.c.mykey, 
+                    as_indexed(test.c.mydata)[1] + 35.5
                 ]).\
                 compile(dialect=pg_dialect)

File UsageRecipes/PGValues.md

     
         def _populate_column_collection(self):
             self._columns.update(
-                [% i, column("column%d" % i))
-                        for i in xrange(1, len(self.list[0](("column%d")) + 1)]
+                [("column%d" % i, column("column%d" % i))
+                        for i in xrange(1, len(self.list[0]) + 1)]
             )
     
     @compiles(values)
     if __name__ == '__main__':
         t1 = table('t1', column('a'), column('b'))
         t2 = values((1, 0.5), (2, -0.5)).alias('weights')
-        print select([t2](t1,)).select_from(t1.join(t2, t1.c.a==t2.c.column2))
+        print select([t1, t2]).select_from(t1.join(t2, t1.c.a==t2.c.column2))
 
 
 renders:

File UsageRecipes/PartitionTable.md

             Column('data', String(50))
         )
         
-        print select([t1](t1)).where(t1.c.data == 'foo')
+        print select([t1]).where(t1.c.data == 'foo')
     
         print
         
         t1_partition_a = Partition(t1, "partition_a")
-        print select([t1_partition_a](t1_partition_a)).where(t1_partition_a.c.data=='foo')
+        print select([t1_partition_a]).where(t1_partition_a.c.data=='foo')
         
         print
         
         t1_p_alias = t1_partition_a.alias()
-        print select([t1_p_alias](t1_p_alias)).where(t1_p_alias.c.data=='foo')
+        print select([t1_p_alias]).where(t1_p_alias.c.data=='foo')
         
 
 

File UsageRecipes/PgsqlRecursive.md

 
 
     #!python
-    In [1](1): from sqlalchemy import select
+    In [1]: from sqlalchemy import select
     
-    In [2](2): import hierarchy as hie
+    In [2]: import hierarchy as hie
     
-    In [3](3): x = hie.Hierarchy(model.category, select([model.category.c.name](model.category.c.id,)))
+    In [3]: x = hie.Hierarchy(model.category, select([model.category.c.id, model.category.c.name]))
     
-    In [4](4): Session.execute(x).fetchall()
-    Out[4](4): 
-    [u'Technology', 1, [1]((1,), False), # third column tells us the level of the row
-     (2, u'Languages', 2, [2](1,), False), # fourth column give us a list with all the nodes from the root to the present id
-     (3, u'Python', 3, [2, 3](1,), True), # fifth column tells us if the current row is a leaf node or not
-     (4, u'Java', 3, [2, 4](1,), True),
-     (5, u'PHP', 3, [2, 5](1,), True),
-     (7, u'Databases', 2, [7](1,), False),
-     (6, u'SQLite', 3, [7, 6](1,), True),
-     (8, u'MySQL', 3, [7, 8](1,), True),
-     (9, u'PostgreSQL', 3, [7, 9](1,), True)]
+    In [4]: Session.execute(x).fetchall()
+    Out[4]: 
+    [(1, u'Technology', 1, [1], False), # third column tells us the level of the row
+     (2, u'Languages', 2, [1, 2], False), # fourth column give us a list with all the nodes from the root to the present id
+     (3, u'Python', 3, [1, 2, 3], True), # fifth column tells us if the current row is a leaf node or not
+     (4, u'Java', 3, [1, 2, 4], True),
+     (5, u'PHP', 3, [1, 2, 5], True),
+     (7, u'Databases', 2, [1, 7], False),
+     (6, u'SQLite', 3, [1, 7, 6], True),
+     (8, u'MySQL', 3, [1, 7, 8], True),
+     (9, u'PostgreSQL', 3, [1, 7, 9], True)]
     
-    In [5](5): rs = Session.execute(x).fetchall()
+    In [5]: rs = Session.execute(x).fetchall()
     
-    In [6](6): for ev in rs:
+    In [6]: for ev in rs:
                if ev.level == 1:
                   print(ev.id, ev.name)
                else:

File UsageRecipes/PolymorphicOnAnotherTable.md

         id = Column(Integer, primary_key=True)
         data = Column(String)
         type_id = Column(ForeignKey('atype.id'))
-        type_name = column_property(select([AType.name](AType.name)).where(AType.id == id))
+        type_name = column_property(select([AType.name]).where(AType.id == id))
         type = relationship(AType)
     
         __mapper_args__ = {
     
     a_type, asub_type = AType(name="a"), AType(name="asub")
     
-    sess.add_all([   A(data='a1', type=a_type),
+    sess.add_all([
+        A(data='a1', type=a_type),
         ASub(data='asub1', type=asub_type),
         ASub(data='asub2', type=asub_type),
         A(data='a2', type=a_type),
-    ](
-    ))
+    ])
     sess.commit()
     
     sess = Session(e)

File UsageRecipes/PostgreSQLInheritance.md

     
     s = Session(e)
     
-    s.add_all([   Child1(data='c1'),
+    s.add_all([
+        Child1(data='c1'),
         Child2(data='c2'),
         Child1(data='c3'),
         Child2(data='c4'),
         Child1(data='c5'),
-    ](
-    ))
+    ])
     s.commit()
     s.close()
     

File UsageRecipes/PreFilteredQuery.md

             
         sess = Session()
     
-        sess.add_all([       User(name='u1', public=True, addresses=[Address(email='u1a1', public=True), Address(email='u1a2', public=True)](
-    )),
-            User(name='u2', public=True, addresses=[public=False), Address(email='u2a2', public=True)](Address(email='u2a1',)),
-            User(name='u3', public=False, addresses=[public=False), Address(email='u3a2', public=False)](Address(email='u3a1',)),
-            User(name='u4', public=False, addresses=[public=False), Address(email='u4a2', public=True)](Address(email='u4a1',)),
-            User(name='u5', public=True, addresses=[public=True), Address(email='u5a2', public=False)](Address(email='u5a1',))
+        sess.add_all([
+            User(name='u1', public=True, addresses=[Address(email='u1a1', public=True), Address(email='u1a2', public=True)]),
+            User(name='u2', public=True, addresses=[Address(email='u2a1', public=False), Address(email='u2a2', public=True)]),
+            User(name='u3', public=False, addresses=[Address(email='u3a1', public=False), Address(email='u3a2', public=False)]),
+            User(name='u4', public=False, addresses=[Address(email='u4a1', public=False), Address(email='u4a2', public=True)]),
+            User(name='u5', public=True, addresses=[Address(email='u5a1', public=True), Address(email='u5a2', public=False)])
         ])
     
         sess.commit()
     
-        entries = [   for ad in sess.query(Address):
+        entries = []
+        for ad in sess.query(Address):
             assert ad.public
             user = ad.user
             if user:
             else:
                 entries.append((ad.email, "none"))
         
-        assert entries == [(u'u1a1', u'u1'), (u'u1a2', u'u1'), (u'u2a2', u'u2'), (u'u4a2', 'none'), (u'u5a1', u'u5')](]
-    )
+        assert entries == [(u'u1a1', u'u1'), (u'u1a2', u'u1'), (u'u2a2', u'u2'), (u'u4a2', 'none'), (u'u5a1', u'u5')]
     
         a1 = sess.query(Address).filter_by(email='u1a1').one()
         a1.user.public = False

File UsageRecipes/Profiling.md

      
         conn.execute(
             t1.insert(), 
-            [%d" % x} for x in xrange(100000)]({"data":"entry)
+            [{"data":"entry %d" % x} for x in xrange(100000)]
         )
      
         conn.execute(
      
         conn.execute(
             t1.insert(), 
-            [%d" % x} for x in xrange(100000)]({"data":"entry)
+            [{"data":"entry %d" % x} for x in xrange(100000)]
         )
      
         conn.execute(

File UsageRecipes/Py2exePy2app.md

 ##### setup.py
 
     ...
-    "packages": ["sqlalchemy.databases.sqlite"]("sqlalchemy.databases.sqlite")
+    "packages": ["sqlalchemy.databases.sqlite"]
     ...
 
 
 
 ##### setup.py
 
-    includes=["sqlalchemy.databases.sqlite"]("sqlalchemy.databases.sqlite") 
+    includes=["sqlalchemy.databases.sqlite"] 
 
 
 ##### Command Line

File UsageRecipes/QuickHybrid.md

                 super(decorate, self).__init__(fget, *arg, **kw)
     
                 assert fget.__name__.startswith("_")
-                self.attrname = fget.__name__[1:](1:)
+                self.attrname = fget.__name__[1:]
                 self.colname = colname = fget.__name__
     
                 @hybrid_property

File UsageRecipes/RelationshipOnCast.md

     # argument can be used to give it the push it needs.
     B.a = relationship(A, 
                 primaryjoin=cast(A.a_id, Integer)==B.a_id, 
-                _local_remote_pairs=[A.__table__.c.a_id)]((B.__table__.c.a_id,),
+                _local_remote_pairs=[(B.__table__.c.a_id, A.__table__.c.a_id)],
                 foreign_keys=B.a_id, 
     
                 # _local_remote_pairs doesn't automatically propagate to 
                 # the backref, so need it here as well.
                 backref=backref("bs", 
-                            _local_remote_pairs=[                           (A.__table__.c.a_id, B.__table__.c.a_id)
-                        ](
-    ))
+                            _local_remote_pairs=[
+                                (A.__table__.c.a_id, B.__table__.c.a_id)
+                        ])
             )
     
     # we demonstrate with SQLite, but the important part
     
     s = Session(e)
     
-    s.add_all([   A(a_id="1"),
-        A(a_id="2", bs=[B(), B()](
-    )),
-        A(a_id="3", bs=[B()](B())),
+    s.add_all([
+        A(a_id="1"),
+        A(a_id="2", bs=[B(), B()]),
+        A(a_id="3", bs=[B()]),
     ])
     s.commit()
     

File UsageRecipes/RelationshipToLatest.md

         latest_b = relationship(lambda: B, 
                         primaryjoin=lambda: and_(
                                 A.id==B.a_id, 
-                                B.date==select([func.max(B.date)](func.max(B.date))).
+                                B.date==select([func.max(B.date)]).
                                           where(B.a_id==A.id).
                                           correlate(A.__table__)
                             )
     Base.metadata.create_all(e)
     s = Session(e)
     
-    s.add_all([   A(bs=[
+    s.add_all([
+        A(bs=[
                 B(date=datetime.date(2011, 10, 5)),
                 B(date=datetime.date(2011, 8, 4)),
                 B(date=datetime.date(2011, 9, 17)),
-            ](
-    )),
-        A(bs=[           B(date=datetime.date(2011, 10, 5)),
+            ]),
+        A(bs=[
+                B(date=datetime.date(2011, 10, 5)),
                 B(date=datetime.date(2011, 8, 4)),
                 B(date=datetime.date(2011, 9, 17)),
-            ](
-    )),
+            ]),
     ])
     s.commit()
     

File UsageRecipes/SafeCounterColumns.md

     customers = Table('customers', metadata,
         Column('cust_id', Integer, primary_key=True),
         Column('name', String(40)),
-        [...](...)
+        [...]
         Column('next_order_id', Integer, default=1))
     
     CREATE TABLE customers (
       cust_id INTEGER NOT NULL,
       cust_name VARCHAR(40),
-      [...](...)
+      [...]
       next_order_id INTEGER,
       PRIMARY KEY (cust_id))
 
 In SQL, you can prevent this by selecting the row "for UPDATE".  This locks just that row until the row is next updated:
 
 
-    SELECT next_order_id FROM customers WHERE [...](...) FOR UPDATE
+    SELECT next_order_id FROM customers WHERE [...] FOR UPDATE
     -- Now the row is locked
     UPDATE customers SET next_order_id = next_order_id + 1 WHERE cust_id = xx
     -- Now the row is unlocked

File UsageRecipes/SchemaDisplay.md

     from sqlalchemy.orm import class_mapper
     
     # lets find all the mappers in our model
-    mappers = [attr in dir(model):
-        if attr[0](]
-    for) == '_': continue
+    mappers = []
+    for attr in dir(model):
+        if attr[0] == '_': continue
         try:
             cls = getattr(model, attr)
             mappers.append(class_mapper(cls))

File UsageRecipes/SelectAsLabel.md

 you can do:
 
 
-    select([tbl.c.foo.label('bar')](tbl.c.foo.label('bar')))
+    select([tbl.c.foo.label('bar')])
 

File UsageRecipes/SelectInto.md

             column('x3')
         )
     
-        print SelectInto([marker.c.x2](marker.c.x1,), "tmp_markers").\
+        print SelectInto([marker.c.x1, marker.c.x2], "tmp_markers").\
                 where(marker.c.x3==5).\
-                where(marker.c.x1.in_([5](1,)))
+                where(marker.c.x1.in_([1, 5]))

File UsageRecipes/SelectIntoOutfile.md

         )
     
     
-    e = SelectIntoOutfile(select([s.dim_date_table](s.dim_date_table)).where(s.dim_date_table.c.Year==2009), '/tmp/test.txt')
+    e = SelectIntoOutfile(select([s.dim_date_table]).where(s.dim_date_table.c.Year==2009), '/tmp/test.txt')
     print e
     eng.execute(e)
     

File UsageRecipes/SelectUpperLike.md

             {'pk':4, 'info':'pk_4_data'},
             {'pk':5, 'info':'pk_5_data'})
     
-    select([info_table](info_table), func.upper(info_table.c.info).like('%3_DAT%')
+    select([info_table], func.upper(info_table.c.info).like('%3_DAT%')
     
-    # Prints: [u'pk_3_data')]((3,)
+    # Prints: [(3, u'pk_3_data')]
     print repr(sas_result)

File UsageRecipes/SessionIndexing.md

             # get a dictionary for this session
             if session not in self._by_session:
                 # per session we store a dictionary of sets
-                self._by_session[session](session) = by_session = \
+                self._by_session[session] = by_session = \
                             collections.defaultdict(weakref.WeakSet)
             else:
-                by_session = self._by_session[session](session)
+                by_session = self._by_session[session]
     
             # find all the indexes for this object's class,
             # and superclasses too.
             for cls in typ.__mro__:
                 if cls in self._index_fns:
                     # all the "index" functions for this class
-                    for name, rec in self._index_fns[cls](cls).items():
-                        if rec['include_subclasses']('include_subclasses') or cls is rec['cls']('cls'):
+                    for name, rec in self._index_fns[cls].items():
+                        if rec['include_subclasses'] or cls is rec['cls']:
                             # call the indexing function, build a key
-                            key = name, rec['fn']('fn')(instance)
-                            by_session[key](key).add(instance)
+                            key = name, rec['fn'](instance)
+                            by_session[key].add(instance)
     
         def indexed(self, cls, name, include_subclasses=True):
             """Log a function as indexing a certain class."""
     
             if cls not in self._index_fns:
-                self._index_fns[cls](cls) = byclass = {}
+                self._index_fns[cls] = byclass = {}
             else:
-                byclass = self._index_fns[cls](cls)
+                byclass = self._index_fns[cls]
             def decorate(fn):
-                byclass[name](name) = {
+                byclass[name] = {
                     "fn": fn,
                     "cls": cls,
                     "include_subclasses": include_subclasses
                 if by_session is None:
                     return set()
                 key = name, value
-                return set(by_session[key](key)).intersection(
+                return set(by_session[key]).intersection(
                             set(sess.identity_map.values()).union(sess.new))
             return go
     
     
         s1, s2, s3 = Session(), Session(), Session()
     
-        s1.add_all([b1, b2, d2, e3, ad_c](a1,))
-        s2.add_all([c2, e1, e2, ad_a](a2,))
-        s3.add_all([c1, d1, d3, ad_b](b3,))
+        s1.add_all([a1, b1, b2, d2, e3, ad_c])
+        s2.add_all([a2, c2, e1, e2, ad_a])
+        s3.add_all([b3, c1, d1, d3, ad_b])
     
-        assert indexes.user_byname(s1, "b") == set([b2](b1,))
-        assert indexes.user_byname(s2, "e") == set([e2](e1,))
-        assert indexes.user_byname(s2, "c") == set([c2](c2))
-        assert indexes.user_byname(s3, "b") == set([b3](b3))
-        assert indexes.address_byname(s3, "b") == set([ad_b](ad_b))
+        assert indexes.user_byname(s1, "b") == set([b1, b2])
+        assert indexes.user_byname(s2, "e") == set([e1, e2])
+        assert indexes.user_byname(s2, "c") == set([c2])
+        assert indexes.user_byname(s3, "b") == set([b3])
+        assert indexes.address_byname(s3, "b") == set([ad_b])
         assert indexes.address_byname(s3, "c") == set()
-        assert indexes.address_byname(s1, "c") == set([ad_c](ad_c))
+        assert indexes.address_byname(s1, "c") == set([ad_c])
     
         s2.expunge(e2)
-        assert indexes.user_byname(s2, "e") == set([e1](e1))
+        assert indexes.user_byname(s2, "e") == set([e1])
         s2.close()
         assert indexes.user_byname(s2, "e") == set([])
     

File UsageRecipes/SessionModifiedSQL.md

         DBAPI connection.
     
         """
-        connection.info['session_info']('session_info') = session.info
+        connection.info['session_info'] = session.info
     
     @contextmanager
     def session_comment(session, comment):
         """Apply the given comment to all SQL emitted by the given Session.
         """
-        session.info["comment"]("comment") = comment
+        session.info["comment"] = comment
         yield
-        del session.info["comment"]("comment")
+        del session.info["comment"]
     
     @contextmanager
     def session_shardid(session, shardid):
         """Apply the "shard" id to all SQL emitted by the given Session.
         """
-        session.info["shardid"]("shardid") = shardid
+        session.info["shardid"] = shardid
         yield
-        del session.info["shardid"]("shardid")
+        del session.info["shardid"]
     
     
     @event.listens_for(Engine, "before_cursor_execute", retval=True)
         """
         session_info = connection.info.get('session_info', {})
         if "comment" in session_info:
-            statement = statement + " -- %s" % session_info["comment"]("comment")
+            statement = statement + " -- %s" % session_info["comment"]
         return statement, parameters
     
     @event.listens_for(Engine, "before_cursor_execute", retval=True)
         """
         session_info = connection.info.get('session_info', {})
         if "shardid" in session_info:
-            statement = statement.replace("_shardid_", session_info["shardid"]("shardid"))
+            statement = statement.replace("_shardid_", session_info["shardid"])
         return statement, parameters
     
     
     
         e = create_engine("sqlite://")
     
-        Base.metadata.create_all(e, tables=[A.__table__](A.__table__))
+        Base.metadata.create_all(e, tables=[A.__table__])
     
         s = Session(e)
     
         with session_shardid(s, "s1"):
-            Base.metadata.create_all(s.connection(), tables=[B.__table__](B.__table__))
+            Base.metadata.create_all(s.connection(), tables=[B.__table__])
     
         with session_shardid(s, "s2"):
-            Base.metadata.create_all(s.connection(), tables=[B.__table__](B.__table__))
+            Base.metadata.create_all(s.connection(), tables=[B.__table__])
     
         s.add(A(data='d1'))
         s.commit()

File UsageRecipes/StringComparisonFilter.md

                     like_op, notlike_op, contains_op, startswith_op, endswith_op
     
     class LowerCaseString(String):
-        case_sensitive = set([ne, gt, lt, ge, le,
+        case_sensitive = set([eq, ne, gt, lt, ge, le,
                             between_op, like_op, notlike_op,
-                            contains_op, startswith_op, endswith_op](eq,))
+                            contains_op, startswith_op, endswith_op])
         class Comparator(String.Comparator):
     
             def operate(self, op, *other, **kw):
                 if op in LowerCaseString.case_sensitive:
-                    other = [for o in other](func.lower(other))
+                    other = [func.lower(other) for o in other]
                     self = func.lower(self.expr)
                     return op(self, *other, **kw)
                 else:
     
     t = Table('t', MetaData(), Column('x', LowerCaseString))
     
-    for expr in [   t.c.x == "hi",
+    for expr in [
+        t.c.x == "hi",
         "hi" == t.c.x,
         t.c.x == t.alias().c.x,
         t.c.x + "hi",
         t.c.x.like("hi"),
         t.c.x.contains("hi"),
-    ](
-    ):
+    ]:
         print (expr)

File UsageRecipes/UniqueConstraintCollection.md

     
         # flush *just this one item*.  This is a special
         # feature of flush, not for general use.
-        sess.flush([item](item))
+        sess.flush([item])
     
     e = create_engine("sqlite://", echo=True)
     Base.metadata.create_all(e)
     
     s = Session(e)
     
-    p1 = Parent(children=[   Child(name='c1'),
+    p1 = Parent(children=[
+        Child(name='c1'),
         Child(name='c2')
-    ](
-    ))
+    ])
     s.add(p1)
     s.commit()
     
-    p1.children = [   Child(name='c2'),
+    p1.children = [
+        Child(name='c2'),
         Child(name='c3')
-    ](
-    )
+    ]
     s.commit()
     

File UsageRecipes/UniqueConstraintExceptionHandling.md

     except sqlalchemy.exc.IntegrityError, exc:
             reason = exc.message
             if reason.endswith('is not unique'):
-                    print "%s already exists" % exc.params[0](0)
+                    print "%s already exists" % exc.params[0]
                     sess.rollback()

File UsageRecipes/UniqueObject.md

     
         key = (cls, hashfunc(*arg, **kw))
         if key in cache:
-            return cache[key](key)
+            return cache[key]
         else:
             with session.no_autoflush:
                 q = session.query(cls)
                 if not obj:
                     obj = constructor(*arg, **kw)
                     session.add(obj)
-            cache[key](key) = obj
+            cache[key] = obj
             return obj
 
 

File UsageRecipes/ValidateAllOccurrencesOfType.md

         """
         for prop in mapper.iterate_properties:
             if hasattr(prop, 'columns'):
-                if isinstance(prop.columns[0](0).type, StoresMySpecialType):
+                if isinstance(prop.columns[0].type, StoresMySpecialType):
                     event.listen(
                         getattr(class_, prop.key), 
                         "set", 
             id = Column(Integer, primary_key=True)
             data = Column(StoresMySpecialType)
     
-        somethings = [       Something(data="who"),
+        somethings = [
+            Something(data="who"),
             Something(data="is"),
             Something(data="the prettiest!")
-        ](
-    )
+        ]
     
         # the data is coerced to the special type
         # upon set.

File UsageRecipes/ValidateOnType.md

             return
         @event.listens_for(inst, "set", retval=True)
         def set_(instance, value, oldvalue, initiator):
-            validator = validators.get(inst.property.columns[0](0).type.__class__)
+            validator = validators.get(inst.property.columns[0].type.__class__)
             if validator:
                 return validator(value)
             else:

File UsageRecipes/VersionedMap.md

             # new elements in the "added" group
             # are moved to our new collection.
             for elem in hist.added:
-                self.elements[elem.name](elem.name) = elem
+                self.elements[elem.name] = elem
     
             # copy elements in the 'unchanged' group.
             # the new ones associate with the new ConfigData,
             # the old ones stay associated with the old ConfigData
             for elem in hist.unchanged:
-                self.elements[elem.name](elem.name) = ConfigValueAssociation(elem.config_value)
+                self.elements[elem.name] = ConfigValueAssociation(elem.config_value)
             
             # we also need to expire changes on each ConfigValueAssociation
             # that is to remain associated with the old ConfigData.
             
             """
             if value != self.config_value.value:
-                self.config_data.elements[self.name](self.name) = \
+                self.config_data.elements[self.name] = \
                         ConfigValueAssociation(
                             ConfigValue(self.config_value.name, value)
                         )
         sess.commit()
         version_one = config.id
         
-        config.data['user_name']('user_name') = 'yahoo'
+        config.data['user_name'] = 'yahoo'
         sess.commit()
         
         version_two = config.id
                 order_by(ConfigValue.originating_config_id).\
                 all()
         
-        assert [h.originating_config_id) for h in history]((h.value,) == \
-                [version_one), ('yahoo', version_two)](('twitter',)
+        assert [(h.value, h.originating_config_id) for h in history] == \
+                [('twitter', version_one), ('yahoo', version_two)]
         

File UsageRecipes/VersionedRows.md

     
     engine = create_engine('sqlite://', echo=True)
     
-    Session = sessionmaker(engine, extension=[VersionExtension()](VersionExtension()))
+    Session = sessionmaker(engine, extension=[VersionExtension()])
     
     # example 1, simple versioning
     
     session.commit()
     
     assert session.query(Example.id, Example.data).order_by(Example.id).all() == \
-            ['e1'), (2, 'e2')]((1,)
+            [(1, 'e1'), (2, 'e2')]
     
     # example 2, versioning with a parent
         
     
         def new_version(self, session):
             # expire parent's reference to us
-            session.expire(self.parent, ['child']('child'))
+            session.expire(self.parent, ['child'])
             
             # create new version
             Versioned.new_version(self, session)
     
     assert p1.child_id == 2
     assert session.query(Child.id, Child.data).order_by(Child.id).all() == \
-        ['c1'), (2, 'c2')]((1,)
+        [(1, 'c1'), (2, 'c2')]

File UsageRecipes/Views.md

         # is undefined and this breaks this demo as the query cannot find the column 'stuff_view.data'
         # See http://www.sqlite.org/c3ref/column_name.html
         stuff_view = view("stuff_view", metadata, 
-                        select([stuff.c.data.label('data'), more_stuff.c.data.label('moredata')](stuff.c.id.label('id'),)).\
+                        select([stuff.c.id.label('id'), stuff.c.data.label('data'), more_stuff.c.data.label('moredata')]).\
                         select_from(stuff.join(more_stuff)).\
                         where(stuff.c.data.like(text('"%orange%"'))
                         )
                     )
         
         # the ORM would appreciate this
-        assert stuff_view.primary_key == [stuff_view.c.id](stuff_view.c.id)
+        assert stuff_view.primary_key == [stuff_view.c.id]
         
         metadata.create_all()
         
         )
         
         assert set(
-                r[0:2](0:2) for r in engine.execute(select([stuff_view.c.moredata](stuff_view.c.data,))).fetchall()
-            ) == set(['foobar'), ('orange julius', 'foobar')](('oranges',))
+                r[0:2] for r in engine.execute(select([stuff_view.c.data, stuff_view.c.moredata])).fetchall()
+            ) == set([('oranges', 'foobar'), ('orange julius', 'foobar')])
         
         metadata.drop_all()

File UsageRecipes/WindowFunctionsByDefault.md

                                         sql.literal_column("ROW_NUMBER() OVER (ORDER BY %s)" % orderby
                                         ).label("rownum")).order_by(None) 
                 
-                limitselect = sql.select([for c in element.alias().c if c.key != 'rownum'](c))
+                limitselect = sql.select([c for c in element.alias().c if c.key != 'rownum'])
                 limitselect._window_visit = True
                 limitselect._is_wrapper = True
                 
             
                 element = limitselect
         
-        kw['iswrapper']('iswrapper') = getattr(element, '_is_wrapper', False)
+        kw['iswrapper'] = getattr(element, '_is_wrapper', False)
         
         return compiler.visit_select(element, **kw)
     
         
         t1 = table('t1', column('c1'), column('c2'), column('c3'))
         
-        s = select([t1](t1)).order_by(t1.c.c2).limit(3).offset(5)
+        s = select([t1]).order_by(t1.c.c2).limit(3).offset(5)
         print s
     
 
                                         sql.literal_column("ROW_NUMBER() OVER (ORDER BY %s)" % orderby
                                         ).label("rownum")).order_by(None) 
     
-                    limitselect = sql.select([for c in element.alias().c if c.key != 'rownum'](c))
+                    limitselect = sql.select([c for c in element.alias().c if c.key != 'rownum'])
                     limitselect._window_visit = True
                     limitselect._is_wrapper = True
     
     
                     element = limitselect
     
-        kw['iswrapper']('iswrapper') = getattr(element, '_is_wrapper', False)
+        kw['iswrapper'] = getattr(element, '_is_wrapper', False)
     
         return compiler.visit_select(element, **kw)

File UsageRecipes/WindowedRangeQuery.md

         if windowsize > 1:
             q = q.filter("rownum %% %d=1" % windowsize)
     
-        intervals = [for id, in q](id)
+        intervals = [id for id, in q]
     
         while intervals:
             start = intervals.pop(0)
             if intervals:
-                end = intervals[0](0)
+                end = intervals[0]
             else:
                 end = None
             yield int_for_range(start, end)
         Base.metadata.create_all(e)
     
         # get some random list of unique values
-        data = set([1000000) for i in xrange(10000)](random.randint(1,))
+        data = set([random.randint(1, 1000000) for i in xrange(10000)])
     
         s = Session(e)
-        s.add_all([data=j) for i, j in enumerate(data)](Widget(id=i,))
+        s.add_all([Widget(id=i, data=j) for i, j in enumerate(data)])
         s.commit()
     
         q = s.query(Widget)