Commits

Mike Bayer committed 93d5424

- further changes to attributes with regards to "trackparent". the "commit" operation
now sets a "hasparent" flag for all attributes to all objects. that way lazy loads
via callables get included in trackparent, and eager loads do as well because the mapper
calls commit() on all objects at load time. this is a less shaky method than the "optimistic"
thing in the previous commit, but uses more memory and involves more overhead.
- some tweaks/cleanup to unit tests

Comments (0)

Files changed (8)

 - mappers can tell if one of their objects is an "orphan" based
 on interactions with the attribute package. this check is based
 on a status flag maintained for each relationship 
-when objects are attached and detached from each other.  if the
-status flag is not present, its assumed to be "False" for a 
-transient instance and assumed to be "True" for a persisted/detached
- instance.
+when objects are attached and detached from each other.
 - it is now invalid to declare a self-referential relationship with
 "delete-orphan" (as the abovementioned check would make them impossible
 to save)

lib/sqlalchemy/attributes.py

         return self.get(obj)
 
     def hasparent(self, item, optimistic=False):
-        """return True if the given item is attached to a parent object 
-        via the attribute represented by this InstrumentedAttribute.
-        
-        optimistic indicates what we should return if the given item has no "hasparent"
-        record at all for the given attribute."""
-        return item._state.get(('hasparent', id(self)), optimistic)
+        """return the boolean value of a "hasparent" flag attached to the given item.
+        """
+        return item._state.get(('hasparent', id(self)), False)
         
     def sethasparent(self, item, value):
         """sets a boolean flag on the given item corresponding to whether or not it is
         attached to a parent object via the attribute represented by this InstrumentedAttribute."""
         if item is not None:
             item._state[('hasparent', id(self))] = value
-
+            
     def get_history(self, obj, passive=False):
         """return a new AttributeHistory object for the given object/this attribute's key.
         
                     values = callable_()
                     l = InstrumentedList(self, obj, self._adapt_list(values), init=False)
                     
-                    # mark loaded instances with "hasparent" status.  commented out
-                    # because loaded objects use "optimistic" parent-checking
-                    #if self.trackparent and values is not None:
-                    #    [self.sethasparent(v, True) for v in values if v is not None]
-                    
                     # if a callable was executed, then its part of the "committed state"
                     # if any, so commit the newly loaded data
                     orig = state.get('original', None)
                     if orig is not None:
                         orig.commit_attribute(self, obj, l)
+                    
                 else:
                     # note that we arent raising AttributeErrors, just creating a new
                     # blank list and setting it.
                     value = callable_()
                     obj.__dict__[self.key] = value
 
-                    # mark loaded instances with "hasparent" status.  commented out
-                    # because loaded objects use "optimistic" parent-checking
-                    #if self.trackparent and value is not None:
-                    #    self.sethasparent(value, True)
-                    
                     # if a callable was executed, then its part of the "committed state"
                     # if any, so commit the newly loaded data
                     orig = state.get('original', None)
         if attr.uselist:
             if value is not False:
                 self.data[attr.key] = [x for x in value]
+                if attr.trackparent:
+                    [attr.sethasparent(x, True) for x in self.data[attr.key]]
             elif obj.__dict__.has_key(attr.key):
                 self.data[attr.key] = [x for x in obj.__dict__[attr.key]]
+                if attr.trackparent:
+                    [attr.sethasparent(x, True) for x in self.data[attr.key]]
         else:
             if value is not False:
                 self.data[attr.key] = value
+                if attr.trackparent:
+                    attr.sethasparent(self.data[attr.key], True)
             elif obj.__dict__.has_key(attr.key):
                 self.data[attr.key] = obj.__dict__[attr.key]
-                        
+                if attr.trackparent:
+                    attr.sethasparent(self.data[attr.key], True)
     def rollback(self, manager, obj):
         for attr in manager.managed_attributes(obj.__class__):
             if self.data.has_key(attr.key):

lib/sqlalchemy/orm/mapper.py

         #self.compile()
     
     def _is_orphan(self, obj):
-        optimistic = hasattr(obj, '_instance_key')
         for (key,klass) in self.delete_orphans:
-            if not getattr(klass, key).hasparent(obj, optimistic=optimistic):
+            if not getattr(klass, key).hasparent(obj):
+                if not has_identity(obj):
+                    raise exceptions.FlushError("instance %s is an unsaved, pending instance and is an orphan" % obj)
                 return True
         else:
             return False
 
         if not postupdate:
             for obj in objects:
-                if not hasattr(obj, "_instance_key"):
+                if not has_identity(obj):
                     self.extension.before_insert(self, connection, obj)
                 else:
                     self.extension.before_update(self, connection, obj)
                 # 'postupdate' means a PropertyLoader is telling us, "yes I know you 
                 # already inserted/updated this row but I need you to UPDATE one more 
                 # time"
-                isinsert = not postupdate and not hasattr(obj, "_instance_key")
+                isinsert = not postupdate and not has_identity(obj)
                 hasdata = False
                 for col in table.columns:
                     if col is self.version_id_col:
     else:
         return repr(obj)
 
+def has_identity(object):
+    return hasattr(object, '_instance_key')
+    
 def has_mapper(object):
     """returns True if the given object has a mapper association"""
     return hasattr(object, '_entity_name')

lib/sqlalchemy/orm/properties.py

 
         if self.cascade.delete_orphan:
             if self.parent.class_ is self.mapper.class_:
-                raise exceptions.ArgumentError("Cant establish 'delete-orphan' cascade rule on a self-referential relationship.  You probably want cascade='all', which includes delete cascading but not orphan detection.")
+                raise exceptions.ArgumentError("Cant establish 'delete-orphan' cascade rule on a self-referential relationship (attribute '%s' on class '%s').  You probably want cascade='all', which includes delete cascading but not orphan detection." %(self.key, self.parent.class_.__name__))
             self.mapper.primary_mapper().delete_orphans.append((self.key, self.parent.class_))
             
         if self.secondaryjoin is not None and self.secondary is None:
                 return None
             else:
                 return mapper.object_mapper(instance).props[self.key].setup_loader(instance)
+        
         def lazyload():
             params = {}
             allparams = True
+            # if the instance wasnt loaded from the database, then it cannot lazy load
+            # child items.  one reason for this is that a bi-directional relationship
+            # will not update properly, since bi-directional uses lazy loading functions
+            # in both directions, and this instance will not be present in the lazily-loaded
+            # results of the other objects since its not in the database
+            if not mapper.has_identity(instance):
+                return None
             #print "setting up loader, lazywhere", str(self.lazywhere), "binds", self.lazybinds
             for col, bind in self.lazybinds.iteritems():
                 params[bind.key] = self.parent._getattrbycolumn(instance, col)

test/base/attributes.py

         p1 = Post()
         Blog.posts.set_callable(b, lambda:[p1])
         Post.blog.set_callable(p1, lambda:b)
-
+        manager.commit(p1, b)
         # assert connections
         assert p1.blog is b
         assert p1 in b.posts
 
-        # no orphans (but we are using optimistic checks)
-        assert getattr(Blog, 'posts').hasparent(p1, optimistic=True)
-        assert getattr(Post, 'blog').hasparent(b, optimistic=True)
+        # no orphans
+        assert getattr(Blog, 'posts').hasparent(p1)
+        assert getattr(Post, 'blog').hasparent(b)
         
-        # lazy loads currently not processed for "hasparent" status, so without
-        # optimistic, it returns false
-        assert not getattr(Blog, 'posts').hasparent(p1, optimistic=False)
-        assert not getattr(Post, 'blog').hasparent(b, optimistic=False)
-        
-        # ok what about non-optimistic.  well, dont use lazy loaders,
-        # assign things manually, so the "hasparent" flags get set
+        # manual connections
         b2 = Blog()
         p2 = Post()
         b2.posts.append(p2)
-        assert getattr(Blog, 'posts').hasparent(p2, optimistic=False)
-        assert getattr(Post, 'blog').hasparent(b2, optimistic=False)
+        assert getattr(Blog, 'posts').hasparent(p2)
+        assert getattr(Post, 'blog').hasparent(b2)
         
     def testinheritance(self):
         """tests that attributes are polymorphic"""

test/orm/mapper.py

 
 class MapperSuperTest(AssertMixin):
     def setUpAll(self):
-        db.echo = False
         tables.create()
         tables.data()
-        db.echo = testbase.echo
     def tearDownAll(self):
-        db.echo = False
         tables.drop()
-        db.echo = testbase.echo
     def tearDown(self):
         clear_mappers()
     def setUp(self):
         self.assert_(u is not u2)
 
     def testunicodeget(self):
-        """tests that Query.get properly sets up the type for the bind parameter.  using unicode would normally fail 
+        """test that Query.get properly sets up the type for the bind parameter.  using unicode would normally fail 
         on postgres, mysql and oracle unless it is converted to an encoded string"""
         metadata = BoundMetaData(db)
         table = Table('foo', metadata, 
         self.assert_(a not in u.addresses)
 
     def testbadconstructor(self):
-        """tests that if the construction of a mapped class fails, the instnace does not get placed in the session"""
+        """test that if the construction of a mapped class fails, the instnace does not get placed in the session"""
         class Foo(object):
             def __init__(self, one, two):
                 pass
             pass
             
     def testrefresh_lazy(self):
-        """tests that when a lazy loader is set as a trigger on an object's attribute (at the attribute level, not the class level), a refresh() operation doesnt fire the lazy loader or create any problems"""
+        """test that when a lazy loader is set as a trigger on an object's attribute (at the attribute level, not the class level), a refresh() operation doesnt fire the lazy loader or create any problems"""
         s = create_session()
         mapper(User, users, properties={'addresses':relation(mapper(Address, addresses))})
         q2 = s.query(User).options(lazyload('addresses'))
         self.assert_sql_count(db, go, 1)
 
     def testexpire(self):
+        """test the expire function"""
         s = create_session()
         mapper(User, users, properties={'addresses':relation(mapper(Address, addresses), lazy=False)})
         u = s.get(User, 7)
         self.assert_(u.user_name =='jack')
         
     def testrefresh2(self):
+        """test a hang condition that was occuring on expire/refresh"""
         s = create_session()
         mapper(Address, addresses)
 
         s.refresh(u) #hangs
         
     def testmagic(self):
+        """not sure what this is really testing."""
         mapper(User, users, properties = {
             'addresses' : relation(mapper(Address, addresses))
         })
         })
         q = create_session().query(m)
         q.select_by(email_address='foo')
+
+    def testmappingtojoin(self):
+        """test mapping to a join"""
+        usersaddresses = sql.join(users, addresses, users.c.user_id == addresses.c.user_id)
+        m = mapper(User, usersaddresses, primary_key=[users.c.user_id])
+        q = create_session().query(m)
+        l = q.select()
+        self.assert_result(l, User, *user_result[0:2])
         
-    def testjoinbyfk(self):
-        class UserWithAddress(object):
-			pass
+    def testmappingtoouterjoin(self):
+        """test mapping to an outer join, with a composite primary key that allows nulls"""
+        result = [
+        {'user_id' : 7, 'address_id' : 1},
+        {'user_id' : 8, 'address_id' : 2},
+        {'user_id' : 8, 'address_id' : 3},
+        {'user_id' : 8, 'address_id' : 4},
+        {'user_id' : 9, 'address_id':None}
+        ]
+        
         j = join(users, addresses, isouter=True)
-        m = mapper(UserWithAddress, j, allow_null_pks=True)
+        m = mapper(User, j, allow_null_pks=True, primary_key=[users.c.user_id, addresses.c.address_id])
         q = create_session().query(m)
+        l = q.select()
+        self.assert_result(l, User, *result)
         
     def testjoinvia(self):
+        """test the join_via and join_to functions"""
         m = mapper(User, users, properties={
             'orders':relation(mapper(Order, orders, properties={
                 'items':relation(mapper(Item, orderitems))
         self.assert_result(l, User, user_result[0])
         
     def testorderby(self):
+        """test ordering at the mapper and query level"""
         # TODO: make a unit test out of these various combinations
 #        m = mapper(User, users, order_by=desc(users.c.user_name))
         mapper(User, users, order_by=None)
         
     @testbase.unsupported('firebird') 
     def testfunction(self):
-        """tests mapping to a SELECT statement that has functions in it."""
+        """test mapping to a SELECT statement that has functions in it."""
         s = select([users, (users.c.user_id * 2).label('concat'), func.count(addresses.c.address_id).label('count')],
         users.c.user_id==addresses.c.user_id, group_by=[c for c in users.c]).alias('myselect')
         mapper(User, s)
         
     @testbase.unsupported('firebird') 
     def testcount(self):
+        """test the count function on Query
+        
+        (why doesnt this work on firebird?)"""
         mapper(User, users)
         q = create_session().query(User)
         self.assert_(q.count()==3)
         self.assert_(q.count(users.c.user_id.in_(8,9))==2)
         self.assert_(q.count_by(user_name='fred')==1)
             
-    def testmultitable(self):
-        usersaddresses = sql.join(users, addresses, users.c.user_id == addresses.c.user_id)
-        m = mapper(User, usersaddresses, primary_key=[users.c.user_id])
-        q = create_session().query(m)
-        l = q.select()
-        self.assert_result(l, User, *user_result[0:2])
 
     def testoverride(self):
         # assert that overriding a column raises an error

test/orm/objectstore.py

 class HistoryTest(SessionTest):
     def setUpAll(self):
         SessionTest.setUpAll(self)
-        db.echo = False
         users.create()
         addresses.create()
-        db.echo = testbase.echo
     def tearDownAll(self):
-        db.echo = False
         addresses.drop()
         users.drop()
-        db.echo = testbase.echo
         SessionTest.tearDownAll(self)
         
     def testattr(self):
     @testbase.unsupported('mssql')
     def setUpAll(self):
         SessionTest.setUpAll(self)
-        #db.echo = False
         global table
         global table2
         global table3
         table.create()
         table2.create()
         table3.create()
-        db.echo = testbase.echo
     @testbase.unsupported('mssql')
     def tearDownAll(self):
-        db.echo = False
         table.drop()
         table2.drop()
         table3.drop()
-        db.echo = testbase.echo
         SessionTest.tearDownAll(self)
         
     # not support on sqlite since sqlite's auto-pk generation only works with
     defaults back from the engine."""
     def setUpAll(self):
         SessionTest.setUpAll(self)
-        #db.echo = 'debug'
         use_string_defaults = db.engine.__module__.endswith('postgres') or db.engine.__module__.endswith('oracle') or db.engine.__module__.endswith('sqlite')
 
         if use_string_defaults:
 
     def setUpAll(self):
         SessionTest.setUpAll(self)
-        db.echo = False
         tables.create()
-        db.echo = testbase.echo
     def tearDownAll(self):
-        db.echo = False
         tables.drop()
-        db.echo = testbase.echo
         SessionTest.tearDownAll(self)
         
     def setUp(self):
-        db.echo = False
         keywords.insert().execute(
             dict(name='blue'),
             dict(name='red'),
             dict(name='round'),
             dict(name='square')
         )
-        db.echo = testbase.echo
 
     def tearDown(self):
-        db.echo = False
         tables.delete()
-        db.echo = testbase.echo
 
         #self.assert_(len(ctx.current.new) == 0)
         #self.assert_(len(ctx.current.dirty) == 0)
 class SaveTest2(SessionTest):
 
     def setUp(self):
-        db.echo = False
         ctx.current.clear()
         clear_mappers()
         self.users = Table('users', db,
 #        raise repr(self.addresses) + repr(self.addresses.foreign_keys)
         self.users.create()
         self.addresses.create()
-        db.echo = testbase.echo
 
     def tearDown(self):
-        db.echo = False
         self.addresses.drop()
         self.users.drop()
-        db.echo = testbase.echo
         SessionTest.tearDown(self)
     
     def testbackwardsnonmatch(self):
         pass
 
     def testmanytomanyxtracolremove(self):
-        """tests that a many-to-many on a table that has an extra column can properly delete rows from the table
+        """test that a many-to-many on a table that has an extra column can properly delete rows from the table
         without referencing the extra column"""
         mapper(Keyword, t3)
 

test/orm/session.py

         ))
         s = create_session()
         a = Address()
+        s.save(a)
         try:
-            s.save(a)
-        except exceptions.InvalidRequestError, e:
+            s.flush()
+        except exceptions.FlushError, e:
             pass
-        s.flush()
         assert a.address_id is None, "Error: address should not be persistent"
         
     def test_delete_new_object(self):