Commits

Mike Bayer  committed 5bdf283

- flush() refactor merged from uow_nontree branch r3871-r3885
- topological.py cleaned up, presents three public facing functions which
return list/tuple based structures, without exposing any internals. only
the third function returns the "hierarchical" structure. when results
include "cycles" or "child" items, 2- or 3- tuples are used to represent
results.
- unitofwork uses InstanceState almost exclusively now. new and deleted lists
are now dicts which ref the actual object to provide a strong ref for the
duration that they're in those lists. IdentitySet is only used for the public
facing versions of "new" and "deleted".
- unitofwork topological sort no longer uses the "hierarchical" version of the sort
for the base sort, only for the "per-object" secondary sort where it still
helps to group non-dependent operations together and provides expected insert
order. the default sort deals with UOWTasks in a straight list and is greatly
simplified. Tests all pass but need to see if svilen's stuff still works,
one block of code in _sort_cyclical_dependencies() seems to not be needed anywhere
but i definitely put it there for a reason at some point; if not hopefully we
can derive more test coverage from that.
- the UOWEventHandler is only applied to object-storing attributes, not
scalar (i.e. column-based) ones. cuts out a ton of overhead when setting
non-object based attributes.
- InstanceState also used throughout the flush process, i.e. dependency.py,
mapper.save_obj()/delete_obj(), sync.execute() all expect InstanceState objects
in most cases now.
- mapper/property cascade_iterator() takes InstanceState as its argument,
but still returns lists of object instances so that they are not dereferenced.
- a few tricks needed when dealing with InstanceState, i.e. when loading a list
of items that are possibly fresh from the DB, you *have* to get the actual objects
into a strong-referencing datastructure else they fall out of scope immediately.
dependency.py caches lists of dependent objects which it loads now (i.e. history
collections).
- AttributeHistory is gone, replaced by a function that returns a 3-tuple of
added, unchanged, deleted. these collections still reference the object
instances directly for the strong-referencing reasons mentiontioned, but
it uses less IdentitySet logic to generate.

  • Participants
  • Parent commits e12ca9a

Comments (0)

Files changed (20)

File lib/sqlalchemy/engine/base.py

       
     returns_rows
       True if the statement should return result rows
+
+    postfetch_cols
+     a list of Column objects for which a server-side default
+     or inline SQL expression value was fired off.  applies to inserts and updates.
       
     The Dialect should provide an ExecutionContext via the
     create_execution_context() method.  The `pre_exec` and `post_exec`
 
         raise NotImplementedError()
 
-    def postfetch_cols(self):
-        """return a list of Column objects for which a 'passive' server-side default
-        value was fired off.  applies to inserts and updates."""
-
-        raise NotImplementedError()
 
 class Compiled(object):
     """Represent a compiled SQL expression.
 
         See ExecutionContext for details.
         """
-        return self.context.postfetch_cols()
+        return self.context.postfetch_cols
         
     def supports_sane_rowcount(self):
         """Return ``supports_sane_rowcount`` from the dialect.

File lib/sqlalchemy/engine/default.py

         return self._last_updated_params
 
     def lastrow_has_defaults(self):
-        return hasattr(self, '_postfetch_cols') and len(self._postfetch_cols)
+        return hasattr(self, 'postfetch_cols') and len(self.postfetch_cols)
 
-    def postfetch_cols(self):
-        return self._postfetch_cols
-        
     def set_input_sizes(self):
         """Given a cursor and ClauseParameters, call the appropriate
         style of ``setinputsizes()`` on the cursor, using DB-API types
                 else:
                     self._last_updated_params = compiled_parameters
 
-                self._postfetch_cols = self.compiled.postfetch
+                self.postfetch_cols = self.compiled.postfetch

File lib/sqlalchemy/orm/attributes.py

     def get_history(self, state, passive=False):
         current = self.get(state, passive=passive)
         if current is PASSIVE_NORESULT:
-            return None
-        return AttributeHistory(self, state, current)
+            return (None, None, None)
+        else:
+            return _create_history(self, state, current)
         
     def set_callable(self, state, callable_, clear=False):
         """Set a callable function for this attribute on the given object.
 
     def check_mutable_modified(self, state):
         if self.mutable_scalars:
-            h = self.get_history(state, passive=True)
-            if h is not None and h.is_modified():
+            (added, unchanged, deleted) = self.get_history(state, passive=True)
+            if added or deleted:
                 state.modified = True
                 return True
             else:
             collections.CollectionAdapter(self, state, user_data)
             return getattr(user_data, '_sa_adapter')
 
-
 class GenericBackrefExtension(interfaces.AttributeExtension):
     """An extension which synchronizes a two-way relationship.
 
 class InstanceState(object):
     """tracks state information at the instance level."""
 
-    __slots__ = 'class_', 'obj', 'dict', 'pending', 'committed_state', 'modified', 'trigger', 'callables', 'parents', 'instance_dict', '_strong_obj', 'expired_attributes'
-    
     def __init__(self, obj):
         self.class_ = obj.__class__
         self.obj = weakref.ref(obj, self.__cleanup)
         self.dict = obj.__dict__
         self.committed_state = {}
-        self.modified = False
+        self.modified = self.strong = False
         self.trigger = None
         self.callables = {}
         self.parents = {}
             return False
         
     def __resurrect(self, instance_dict):
-        if self.is_modified():
+        if self.strong or self.is_modified():
             # store strong ref'ed version of the object; will revert
             # to weakref when changes are persisted
             obj = new_instance(self.class_, state=self)
 class StrongInstanceDict(dict):
     def all_states(self):
         return [o._state for o in self.values()]
+
+def _create_history(attr, state, current):
+    if state.committed_state:
+        original = state.committed_state.get(attr.key, NO_VALUE)
+    else:
+        original = NO_VALUE
+
+    if hasattr(attr, 'get_collection'):
+        if original is NO_VALUE:
+            s = util.IdentitySet([])
+        else:
+            s = util.IdentitySet(original)
+
+        _added_items = []
+        _unchanged_items = []
+        _deleted_items = []
+        if current:
+            collection = attr.get_collection(state, current)
+            for a in collection:
+                if a in s:
+                    _unchanged_items.append(a)
+                else:
+                    _added_items.append(a)
+        _deleted_items = list(s.difference(_unchanged_items))
+
+        return (_added_items, _unchanged_items, _deleted_items)
+    else:
+        if attr.is_equal(current, original) is True:
+            _unchanged_items = [current]
+            _added_items = []
+            _deleted_items = []
+        else:
+            _added_items = [current]
+            if original is not NO_VALUE and original is not None:
+                _deleted_items = [original]
+            else:
+                _deleted_items = []
+            _unchanged_items = []
+        return (_added_items, _unchanged_items, _deleted_items)
     
-class AttributeHistory(object):
-    """Calculate the *history* of a particular attribute on a
-    particular instance.
-    """
-
-    def __init__(self, attr, state, current):
-        self.attr = attr
-
-        # get the "original" value.  if a lazy load was fired when we got
-        # the 'current' value, this "original" was also populated just
-        # now as well (therefore we have to get it second)
-        if state.committed_state:
-            original = state.committed_state.get(attr.key, NO_VALUE)
-        else:
-            original = NO_VALUE
-
-        if hasattr(attr, 'get_collection'):
-            self._current = current
-
-            if original is NO_VALUE:
-                s = util.IdentitySet([])
-            else:
-                s = util.IdentitySet(original)
-
-            # FIXME: the tests have an assumption on the collection's ordering
-            self._added_items = util.OrderedIdentitySet()
-            self._unchanged_items = util.OrderedIdentitySet()
-            self._deleted_items = util.OrderedIdentitySet()
-            if current:
-                collection = attr.get_collection(state, current)
-                for a in collection:
-                    if a in s:
-                        self._unchanged_items.add(a)
-                    else:
-                        self._added_items.add(a)
-            for a in s:
-                if a not in self._unchanged_items:
-                    self._deleted_items.add(a)
-        else:
-            self._current = [current]
-            if attr.is_equal(current, original) is True:
-                self._unchanged_items = [current]
-                self._added_items = []
-                self._deleted_items = []
-            else:
-                self._added_items = [current]
-                if original is not NO_VALUE and original is not None:
-                    self._deleted_items = [original]
-                else:
-                    self._deleted_items = []
-                self._unchanged_items = []
-
-    def __iter__(self):
-        return iter(self._current)
-
-    def is_modified(self):
-        return len(self._deleted_items) > 0 or len(self._added_items) > 0
-
-    def added_items(self):
-        return list(self._added_items)
-
-    def unchanged_items(self):
-        return list(self._unchanged_items)
-
-    def deleted_items(self):
-        return list(self._deleted_items)
-
 class PendingCollection(object):
     """stores items appended and removed from a collection that has not been loaded yet.
     
     
     return chain(*[cl._class_state.attrs.values() for cl in class_.__mro__[:-1] if hasattr(cl, '_class_state')])
 
-def is_modified(instance):
-    return instance._state.is_modified()
+def get_history(state, key, **kwargs):
+    return getattr(state.class_, key).impl.get_history(state, **kwargs)
+get_state_history = get_history
 
-def get_history(instance, key, **kwargs):
-    return getattr(instance.__class__, key).impl.get_history(instance._state, **kwargs)
-
-def get_as_list(instance, key, passive=False):
-    """Return an attribute of the given name from the given instance.
-
-    If the attribute is a scalar, return it as a single-item list,
-    otherwise return a collection based attribute.
-
-    If the attribute's value is to be produced by an unexecuted
-    callable, the callable will only be executed if the given
-    `passive` flag is False.
+def get_as_list(state, key, passive=False):
+    """return an InstanceState attribute as a list, 
+    regardless of it being a scalar or collection-based
+    attribute.
+    
+    returns None if passive=True and the getter returns
+    PASSIVE_NORESULT.
     """
-
-    attr = getattr(instance.__class__, key).impl
-    state = instance._state
+    
+    attr = getattr(state.class_, key).impl
     x = attr.get(state, passive=passive)
     if x is PASSIVE_NORESULT:
-        return []
+        return None
     elif hasattr(attr, 'get_collection'):
-        return list(attr.get_collection(state, x))
+        return attr.get_collection(state, x)
     elif isinstance(x, list):
         return x
     else:

File lib/sqlalchemy/orm/dependency.py

 
         return getattr(self.parent.class_, self.key)
 
-    def hasparent(self, obj):
+    def hasparent(self, state):
         """return True if the given object instance has a parent, 
         according to the ``InstrumentedAttribute`` handled by this ``DependencyProcessor``."""
         
         # TODO: use correct API for this
-        return self._get_instrumented_attribute().impl.hasparent(obj._state)
+        return self._get_instrumented_attribute().impl.hasparent(state)
         
     def register_dependencies(self, uowcommit):
         """Tell a ``UOWTransaction`` what mappers are dependent on
 
         raise NotImplementedError()
 
-    def whose_dependent_on_who(self, obj1, obj2):
+    def whose_dependent_on_who(self, state1, state2):
         """Given an object pair assuming `obj2` is a child of `obj1`,
         return a tuple with the dependent object second, or None if
-        they are equal.
-
-        Used by objectstore's object-level topological sort (i.e. cyclical
-        table dependency).
+        there is no dependency.
         """
 
-        if obj1 is obj2:
+        if state1 is state2:
             return None
         elif self.direction == ONETOMANY:
-            return (obj1, obj2)
+            return (state1, state2)
         else:
-            return (obj2, obj1)
+            return (state2, state1)
 
     def process_dependencies(self, task, deplist, uowcommit, delete = False):
         """This method is called during a flush operation to
 
         raise NotImplementedError()
 
-    def _verify_canload(self, child):
+    def _verify_canload(self, state):
         if not self.enable_typechecks:
             return
-        if child is not None and not self.mapper._canload(child):
-            raise exceptions.FlushError("Attempting to flush an item of type %s on collection '%s', which is handled by mapper '%s' and does not load items of that type.  Did you mean to use a polymorphic mapper for this relationship ?  Set 'enable_typechecks=False' on the relation() to disable this exception.  Mismatched typeloading may cause bi-directional relationships (backrefs) to not function properly." % (child.__class__, self.prop, self.mapper))
+        if state is not None and not self.mapper._canload(state):
+            raise exceptions.FlushError("Attempting to flush an item of type %s on collection '%s', which is handled by mapper '%s' and does not load items of that type.  Did you mean to use a polymorphic mapper for this relationship ?  Set 'enable_typechecks=False' on the relation() to disable this exception.  Mismatched typeloading may cause bi-directional relationships (backrefs) to not function properly." % (state.class_, self.prop, self.mapper))
         
-    def _synchronize(self, obj, child, associationrow, clearkeys, uowcommit):
+    def _synchronize(self, state, child, associationrow, clearkeys, uowcommit):
         """Called during a flush to synchronize primary key identifier
         values between a parent/child object, as well as to an
         associationrow in the case of many-to-many.
         raise NotImplementedError()
 
     def _compile_synchronizers(self):
-        """Assemble a list of *synchronization rules*, which are
-        instructions on how to populate the objects on each side of a
-        relationship.  This is done when a ``DependencyProcessor`` is
-        first initialized.
-
-        The list of rules is used within commits by the ``_synchronize()``
-        method when dependent objects are processed.
+        """Assemble a list of *synchronization rules*.
+                
+        These are fired to populate attributes from one side
+        of a relation to another.
         """
 
         self.syncrules = sync.ClauseSynchronizer(self.parent, self.mapper, self.direction)
         else:
             self.syncrules.compile(self.prop.primaryjoin, foreign_keys=self.foreign_keys)
 
-    def get_object_dependencies(self, obj, uowcommit, passive = True):
-        """Return the list of objects that are dependent on the given
-        object, as according to the relationship this dependency
-        processor represents.
-        """
+    def get_object_dependencies(self, state, uowcommit, passive = True):
+        key = ("dependencies", state, self.key, passive)
+        
+        # cache the objects, not the states; the strong reference here
+        # prevents newly loaded objects from being dereferenced during the 
+        # flush process
+        if key in uowcommit.attributes:
+            (added, unchanged, deleted) = uowcommit.attributes[key]
+        else:
+            (added, unchanged, deleted) = attributes.get_history(state, self.key, passive = passive)
+            uowcommit.attributes[key] = (added, unchanged, deleted)
+            
+        if added is None:
+            return (added, unchanged, deleted)
+        else:
+            return (
+                [getattr(c, '_state', None) for c in added],
+                [getattr(c, '_state', None) for c in unchanged],
+                [getattr(c, '_state', None) for c in deleted],
+                )
 
-        return attributes.get_history(obj, self.key, passive = passive)
-
-    def _conditional_post_update(self, obj, uowcommit, related):
+    def _conditional_post_update(self, state, uowcommit, related):
         """Execute a post_update call.
 
         For relations that contain the post_update flag, an additional
         given related object list contains ``INSERT``s or ``DELETE``s.
         """
 
-        if obj is not None and self.post_update:
+        if state is not None and self.post_update:
             for x in related:
                 if x is not None:
-                    uowcommit.register_object(obj, postupdate=True, post_update_cols=self.syncrules.dest_columns())
+                    uowcommit.register_object(state, postupdate=True, post_update_cols=self.syncrules.dest_columns())
                     break
 
     def __str__(self):
             # this phase can be called safely for any cascade but is unnecessary if delete cascade
             # is on.
             if (not self.cascade.delete or self.post_update) and not self.passive_deletes=='all':
-                for obj in deplist:
-                    childlist = self.get_object_dependencies(obj, uowcommit, passive=self.passive_deletes)
-                    if childlist is not None:
-                        for child in childlist.deleted_items():
+                for state in deplist:
+                    (added, unchanged, deleted) = self.get_object_dependencies(state, uowcommit, passive=self.passive_deletes)
+                    if unchanged or deleted:
+                        for child in deleted:
                             if child is not None and self.hasparent(child) is False:
-                                self._synchronize(obj, child, None, True, uowcommit)
-                                self._conditional_post_update(child, uowcommit, [obj])
-                        for child in childlist.unchanged_items():
+                                self._synchronize(state, child, None, True, uowcommit)
+                                self._conditional_post_update(child, uowcommit, [state])
+                        for child in unchanged:
                             if child is not None:
-                                self._synchronize(obj, child, None, True, uowcommit)
-                                self._conditional_post_update(child, uowcommit, [obj])
+                                self._synchronize(state, child, None, True, uowcommit)
+                                self._conditional_post_update(child, uowcommit, [state])
         else:
-            for obj in deplist:
-                childlist = self.get_object_dependencies(obj, uowcommit, passive=True)
-                if childlist is not None:
-                    for child in childlist.added_items():
-                        self._synchronize(obj, child, None, False, uowcommit)
-                        self._conditional_post_update(child, uowcommit, [obj])
-                    for child in childlist.deleted_items():
+            for state in deplist:
+                (added, unchanged, deleted) = self.get_object_dependencies(state, uowcommit, passive=True)
+                if added or deleted:
+                    for child in added:
+                        self._synchronize(state, child, None, False, uowcommit)
+                        if child is not None:
+                            self._conditional_post_update(child, uowcommit, [state])
+                    for child in deleted:
                         if not self.cascade.delete_orphan and not self.hasparent(child):
-                            self._synchronize(obj, child, None, True, uowcommit)
+                            self._synchronize(state, child, None, True, uowcommit)
 
     def preprocess_dependencies(self, task, deplist, uowcommit, delete = False):
         #print self.mapper.mapped_table.name + " " + self.key + " " + repr(len(deplist)) + " preprocess_dep isdelete " + repr(delete) + " direction " + repr(self.direction)
             # head object is being deleted, and we manage its list of child objects
             # the child objects have to have their foreign key to the parent set to NULL
             if not self.post_update and not self.cascade.delete and not self.passive_deletes=='all':
-                for obj in deplist:
-                    childlist = self.get_object_dependencies(obj, uowcommit, passive=self.passive_deletes)
-                    if childlist is not None:
-                        for child in childlist.deleted_items():
+                for state in deplist:
+                    (added, unchanged, deleted) = self.get_object_dependencies(state, uowcommit, passive=self.passive_deletes)
+                    if unchanged or deleted:
+                        for child in deleted:
                             if child is not None and self.hasparent(child) is False:
                                 uowcommit.register_object(child)
-                        for child in childlist.unchanged_items():
+                        for child in unchanged:
                             if child is not None:
                                 uowcommit.register_object(child)
         else:
-            for obj in deplist:
-                childlist = self.get_object_dependencies(obj, uowcommit, passive=True)
-                if childlist is not None:
-                    for child in childlist.added_items():
+            for state in deplist:
+                (added, unchanged, deleted) = self.get_object_dependencies(state, uowcommit, passive=True)
+                if added or deleted:
+                    for child in added:
                         if child is not None:
                             uowcommit.register_object(child)
-                    for child in childlist.deleted_items():
+                    for child in deleted:
                         if not self.cascade.delete_orphan:
                             uowcommit.register_object(child, isdelete=False)
                         elif self.hasparent(child) is False:
                             uowcommit.register_object(child, isdelete=True)
                             for c, m in self.mapper.cascade_iterator('delete', child):
-                                uowcommit.register_object(c, isdelete=True)
+                                uowcommit.register_object(c._state, isdelete=True)
 
-    def _synchronize(self, obj, child, associationrow, clearkeys, uowcommit):
-        source = obj
+    def _synchronize(self, state, child, associationrow, clearkeys, uowcommit):
+        if child is not None:
+            child = getattr(child, '_state', child)
+        source = state
         dest = child
-        if dest is None or (not self.post_update and uowcommit.is_deleted(dest)):
+        if dest is None or (not self.post_update and uowcommit.state_is_deleted(dest)):
             return
         self._verify_canload(child)
-        self.syncrules.execute(source, dest, obj, child, clearkeys)
+        self.syncrules.execute(source, dest, source, child, clearkeys)
 
 class ManyToOneDP(DependencyProcessor):
     def register_dependencies(self, uowcommit):
             if self.post_update and not self.cascade.delete_orphan and not self.passive_deletes=='all':
                 # post_update means we have to update our row to not reference the child object
                 # before we can DELETE the row
-                for obj in deplist:
-                    self._synchronize(obj, None, None, True, uowcommit)
-                    childlist = self.get_object_dependencies(obj, uowcommit, passive=self.passive_deletes)
-                    if childlist is not None:
-                        self._conditional_post_update(obj, uowcommit, childlist.deleted_items() + childlist.unchanged_items() + childlist.added_items())
+                for state in deplist:
+                    self._synchronize(state, None, None, True, uowcommit)
+                    (added, unchanged, deleted) = self.get_object_dependencies(state, uowcommit, passive=self.passive_deletes)
+                    if added or unchanged or deleted:
+                        self._conditional_post_update(state, uowcommit, deleted + unchanged + added)
         else:
-            for obj in deplist:
-                childlist = self.get_object_dependencies(obj, uowcommit, passive=True)
-                if childlist is not None:
-                    for child in childlist.added_items():
-                        self._synchronize(obj, child, None, False, uowcommit)
-                    self._conditional_post_update(obj, uowcommit, childlist.deleted_items() + childlist.unchanged_items() + childlist.added_items())
+            for state in deplist:
+                (added, unchanged, deleted) = self.get_object_dependencies(state, uowcommit, passive=True)
+                if added or deleted or unchanged:
+                    for child in added:
+                        self._synchronize(state, child, None, False, uowcommit)
+                    self._conditional_post_update(state, uowcommit, deleted + unchanged + added)
 
     def preprocess_dependencies(self, task, deplist, uowcommit, delete = False):
         #print self.mapper.mapped_table.name + " " + self.key + " " + repr(len(deplist)) + " PRE process_dep isdelete " + repr(delete) + " direction " + repr(self.direction)
             return
         if delete:
             if self.cascade.delete:
-                for obj in deplist:
-                    childlist = self.get_object_dependencies(obj, uowcommit, passive=self.passive_deletes)
-                    if childlist is not None:
-                        for child in childlist.deleted_items() + childlist.unchanged_items():
+                for state in deplist:
+                    (added, unchanged, deleted) = self.get_object_dependencies(state, uowcommit, passive=self.passive_deletes)
+                    if deleted or unchanged:
+                        for child in deleted + unchanged:
                             if child is not None and self.hasparent(child) is False:
                                 uowcommit.register_object(child, isdelete=True)
                                 for c, m in self.mapper.cascade_iterator('delete', child):
-                                    uowcommit.register_object(c, isdelete=True)
+                                    uowcommit.register_object(c._state, isdelete=True)
         else:
-            for obj in deplist:
-                uowcommit.register_object(obj)
+            for state in deplist:
+                uowcommit.register_object(state)
                 if self.cascade.delete_orphan:
-                    childlist = self.get_object_dependencies(obj, uowcommit, passive=self.passive_deletes)
-                    if childlist is not None:
-                        for child in childlist.deleted_items():
+                    (added, unchanged, deleted) = self.get_object_dependencies(state, uowcommit, passive=self.passive_deletes)
+                    if deleted:
+                        for child in deleted:
                             if self.hasparent(child) is False:
                                 uowcommit.register_object(child, isdelete=True)
                                 for c, m in self.mapper.cascade_iterator('delete', child):
-                                    uowcommit.register_object(c, isdelete=True)
+                                    uowcommit.register_object(c._state, isdelete=True)
 
-    def _synchronize(self, obj, child, associationrow, clearkeys, uowcommit):
+    def _synchronize(self, state, child, associationrow, clearkeys, uowcommit):
         source = child
-        dest = obj
-        if dest is None or (not self.post_update and uowcommit.is_deleted(dest)):
+        dest = state
+        if dest is None or (not self.post_update and uowcommit.state_is_deleted(dest)):
             return
         self._verify_canload(child)
-        self.syncrules.execute(source, dest, obj, child, clearkeys)
+        self.syncrules.execute(source, dest, dest, child, clearkeys)
 
 class ManyToManyDP(DependencyProcessor):
     def register_dependencies(self, uowcommit):
             reverse_dep = None
             
         if delete:
-            for obj in deplist:
-                childlist = self.get_object_dependencies(obj, uowcommit, passive=self.passive_deletes)
-                if childlist is not None:
-                    for child in childlist.deleted_items() + childlist.unchanged_items():
-                        if child is None or (reverse_dep and (reverse_dep, "manytomany", id(child), id(obj)) in uowcommit.attributes):
+            for state in deplist:
+                (added, unchanged, deleted) = self.get_object_dependencies(state, uowcommit, passive=self.passive_deletes)
+                if deleted or unchanged:
+                    for child in deleted + unchanged:
+                        if child is None or (reverse_dep and (reverse_dep, "manytomany", child, state) in uowcommit.attributes):
                             continue
                         associationrow = {}
-                        self._synchronize(obj, child, associationrow, False, uowcommit)
+                        self._synchronize(state, child, associationrow, False, uowcommit)
                         secondary_delete.append(associationrow)
-                        uowcommit.attributes[(self, "manytomany", id(obj), id(child))] = True
+                        uowcommit.attributes[(self, "manytomany", state, child)] = True
         else:
-            for obj in deplist:
-                childlist = self.get_object_dependencies(obj, uowcommit)
-                if childlist is None: continue
-                for child in childlist.added_items():
-                    if child is None or (reverse_dep and (reverse_dep, "manytomany", id(child), id(obj)) in uowcommit.attributes):
-                        continue
-                    associationrow = {}
-                    self._synchronize(obj, child, associationrow, False, uowcommit)
-                    uowcommit.attributes[(self, "manytomany", id(obj), id(child))] = True
-                    secondary_insert.append(associationrow)
-                for child in childlist.deleted_items():
-                    if child is None or (reverse_dep and (reverse_dep, "manytomany", id(child), id(obj)) in uowcommit.attributes):
-                        continue
-                    associationrow = {}
-                    self._synchronize(obj, child, associationrow, False, uowcommit)
-                    uowcommit.attributes[(self, "manytomany", id(obj), id(child))] = True
-                    secondary_delete.append(associationrow)
+            for state in deplist:
+                (added, unchanged, deleted) = self.get_object_dependencies(state, uowcommit)
+                if added or deleted:
+                    for child in added:
+                        if child is None or (reverse_dep and (reverse_dep, "manytomany", child, state) in uowcommit.attributes):
+                            continue
+                        associationrow = {}
+                        self._synchronize(state, child, associationrow, False, uowcommit)
+                        uowcommit.attributes[(self, "manytomany", state, child)] = True
+                        secondary_insert.append(associationrow)
+                    for child in deleted:
+                        if child is None or (reverse_dep and (reverse_dep, "manytomany", child, state) in uowcommit.attributes):
+                            continue
+                        associationrow = {}
+                        self._synchronize(state, child, associationrow, False, uowcommit)
+                        uowcommit.attributes[(self, "manytomany", state, child)] = True
+                        secondary_delete.append(associationrow)
 
         if secondary_delete:
             secondary_delete.sort()
     def preprocess_dependencies(self, task, deplist, uowcommit, delete = False):
         #print self.mapper.mapped_table.name + " " + self.key + " " + repr(len(deplist)) + " preprocess_dep isdelete " + repr(delete) + " direction " + repr(self.direction)
         if not delete:
-            for obj in deplist:
-                childlist = self.get_object_dependencies(obj, uowcommit, passive=True)
-                if childlist is not None:
-                    for child in childlist.deleted_items():
+            for state in deplist:
+                (added, unchanged, deleted) = self.get_object_dependencies(state, uowcommit, passive=True)
+                if deleted:
+                    for child in deleted:
                         if self.cascade.delete_orphan and self.hasparent(child) is False:
                             uowcommit.register_object(child, isdelete=True)
                             for c, m in self.mapper.cascade_iterator('delete', child):
-                                uowcommit.register_object(c, isdelete=True)
+                                uowcommit.register_object(c._state, isdelete=True)
 
-    def _synchronize(self, obj, child, associationrow, clearkeys, uowcommit):
+    def _synchronize(self, state, child, associationrow, clearkeys, uowcommit):
         dest = associationrow
         source = None
         if dest is None:
             return
         self._verify_canload(child)
-        self.syncrules.execute(source, dest, obj, child, clearkeys)
+        self.syncrules.execute(source, dest, state, child, clearkeys)
 
 class AssociationDP(OneToManyDP):
     def __init__(self, *args, **kwargs):
     many-to-many join, when performing a ``flush()``.
 
     The ``Task`` objects in the objectstore module treat it just like
-    any other ``Mapper``, but in fact it only serves as a *dependency*
+    any other ``Mapper``, but in fact it only serves as a dependency
     placeholder for the many-to-many update task.
     """
 

File lib/sqlalchemy/orm/dynamic.py

 
     def get(self, state, passive=False):
         if passive:
-            return self.get_history(state, passive=True).added_items()
+            return self._get_collection(state, passive=True).added_items
         else:
             return AppenderQuery(self, state)
 
         state.dict[self.key] = CollectionHistory(self, state)
 
     def get_collection(self, state, user_data=None):
-        return self.get_history(state, passive=True)._added_items
+        return self._get_collection(state, passive=True).added_items
         
     def set(self, state, value, initiator):
         if initiator is self:
         raise NotImplementedError()
         
     def get_history(self, state, passive=False):
+        c = self._get_collection(state, passive)
+        return (c.added_items, c.unchanged_items, c.deleted_items)
+        
+    def _get_collection(self, state, passive=False):
         try:
             c = state.dict[self.key]
         except KeyError:
             return CollectionHistory(self, state, apply_to=c)
         else:
             return c
-
+        
     def append(self, state, value, initiator, passive=False):
         if initiator is not self:
-            self.get_history(state, passive=True)._added_items.append(value)
+            self._get_collection(state, passive=True).added_items.append(value)
             self.fire_append_event(state, value, initiator)
     
     def remove(self, state, value, initiator, passive=False):
         if initiator is not self:
-            self.get_history(state, passive=True)._deleted_items.append(value)
+            self._get_collection(state, passive=True).deleted_items.append(value)
             self.fire_remove_event(state, value, initiator)
 
             
     def __iter__(self):
         sess = self.__session()
         if sess is None:
-            return iter(self.attr.get_history(self.state, passive=True)._added_items)
+            return iter(self.attr._get_collection(self.state, passive=True).added_items)
         else:
             return iter(self._clone(sess))
 
     def __getitem__(self, index):
         sess = self.__session()
         if sess is None:
-            return self.attr.get_history(self.state, passive=True)._added_items.__getitem__(index)
+            return self.attr._get_collection(self.state, passive=True).added_items.__getitem__(index)
         else:
             return self._clone(sess).__getitem__(index)
     
     def count(self):
         sess = self.__session()
         if sess is None:
-            return len(self.attr.get_history(self.state, passive=True)._added_items)
+            return len(self.attr._get_collection(self.state, passive=True).added_items)
         else:
             return self._clone(sess).count()
     
             oldlist = list(self)
         else:
             oldlist = []
-        self.attr.get_history(self.state, passive=True).replace(oldlist, collection)
+        self.attr._get_collection(self.state, passive=True).replace(oldlist, collection)
         return oldlist
         
     def append(self, item):
         self.attr.remove(self.state, item, None)
 
             
-class CollectionHistory(attributes.AttributeHistory): 
+class CollectionHistory(object): 
     """Overrides AttributeHistory to receive append/remove events directly."""
 
     def __init__(self, attr, state, apply_to=None):
         if apply_to:
-            deleted = util.IdentitySet(apply_to._deleted_items)
-            added = apply_to._added_items
+            deleted = util.IdentitySet(apply_to.deleted_items)
+            added = apply_to.added_items
             coll = AppenderQuery(attr, state).autoflush(False)
-            self._unchanged_items = [o for o in util.IdentitySet(coll) if o not in deleted]
-            self._added_items = apply_to._added_items
-            self._deleted_items = apply_to._deleted_items
+            self.unchanged_items = [o for o in util.IdentitySet(coll) if o not in deleted]
+            self.added_items = apply_to.added_items
+            self.deleted_items = apply_to.deleted_items
         else:
-            self._deleted_items = []
-            self._added_items = []
-            self._unchanged_items = []
+            self.deleted_items = []
+            self.added_items = []
+            self.unchanged_items = []
             
     def replace(self, olditems, newitems):
-        self._added_items = newitems
-        self._deleted_items = olditems
+        self.added_items = newitems
+        self.deleted_items = olditems
         
-    def is_modified(self):
-        return len(self._deleted_items) > 0 or len(self._added_items) > 0
-
-    def added_items(self):
-        return self._added_items
-
-    def unchanged_items(self):
-        return self._unchanged_items
-
-    def deleted_items(self):
-        return self._deleted_items
-    

File lib/sqlalchemy/orm/mapper.py

         """
         return self.identity_key_from_primary_key(self.primary_key_from_instance(instance))
 
+    def _identity_key_from_state(self, state):
+        return self.identity_key_from_primary_key(self._primary_key_from_state(state))
+
     def primary_key_from_instance(self, instance):
         """Return the list of primary key values for the given
         instance.
         """
 
-        return [self._get_attr_by_column(instance, column) for column in self.primary_key]
+        return [self._get_state_attr_by_column(instance._state, column) for column in self.primary_key]
 
-    def _canload(self, instance):
-        """return true if this mapper is capable of loading the given instance"""
+    def _primary_key_from_state(self, state):
+        return [self._get_state_attr_by_column(state, column) for column in self.primary_key]
+
+    def _canload(self, state):
         if self.polymorphic_on is not None:
-            return isinstance(instance, self.class_)
+            return issubclass(state.class_, self.class_)
         else:
-            return instance.__class__ is self.class_
-        
-    def _get_attr_by_column(self, obj, column):
-        """Return an instance attribute using a Column as the key."""
+            return state.class_ is self.class_
+    
+    def _get_state_attr_by_column(self, state, column):
         try:
-            return self._columntoproperty[column].getattr(obj, column)
+            return self._columntoproperty[column].getattr(state, column)
         except KeyError:
             prop = self.__props.get(column.key, None)
             if prop:
                 raise exceptions.InvalidRequestError("Column '%s.%s' is not available, due to conflicting property '%s':%s" % (column.table.name, column.name, column.key, repr(prop)))
             else:
                 raise exceptions.InvalidRequestError("No column %s.%s is configured on mapper %s..." % (column.table.name, column.name, str(self)))
+    
+    def _set_state_attr_by_column(self, state, column, value):
+        return self._columntoproperty[column].setattr(state, value, column)
+        
+    def _get_attr_by_column(self, obj, column):
+        return self._get_state_attr_by_column(obj._state, column)
         
     def _set_attr_by_column(self, obj, column, value):
-        """Set the value of an instance attribute using a Column as the key."""
+        self._set_state_attr_by_column(obj._state, column, value)
 
-        self._columntoproperty[column].setattr(obj, value, column)
-
-    def save_obj(self, objects, uowtransaction, postupdate=False, post_update_cols=None, single=False):
+    def save_obj(self, states, uowtransaction, postupdate=False, post_update_cols=None, single=False):
         """Issue ``INSERT`` and/or ``UPDATE`` statements for a list of objects.
 
         This is called within the context of a UOWTransaction during a
 
         # if batch=false, call save_obj separately for each object
         if not single and not self.batch:
-            for obj in objects:
-                self.save_obj([obj], uowtransaction, postupdate=postupdate, post_update_cols=post_update_cols, single=True)
+            for state in states:
+                self.save_obj([state], uowtransaction, postupdate=postupdate, post_update_cols=post_update_cols, single=True)
             return
 
         if 'connection_callable' in uowtransaction.mapper_flush_opts:
             connection_callable = uowtransaction.mapper_flush_opts['connection_callable']
-            tups = [(obj, connection_callable(self, obj)) for obj in objects]
+            tups = [(state, connection_callable(self, state.obj())) for state in states]
         else:
             connection = uowtransaction.transaction.connection(self)
-            tups = [(obj, connection) for obj in objects]
+            tups = [(state, connection) for state in states]
             
         if not postupdate:
-            for obj, connection in tups:
-                if not has_identity(obj):
-                    for mapper in object_mapper(obj).iterate_to_root():
+            for state, connection in tups:
+                if not _state_has_identity(state):
+                    for mapper in _state_mapper(state).iterate_to_root():
                         if 'before_insert' in mapper.extension.methods:
-                            mapper.extension.before_insert(mapper, connection, obj)
+                            mapper.extension.before_insert(mapper, connection, state.obj())
                 else:
-                    for mapper in object_mapper(obj).iterate_to_root():
+                    for mapper in _state_mapper(state).iterate_to_root():
                         if 'before_update' in mapper.extension.methods:
-                            mapper.extension.before_update(mapper, connection, obj)
+                            mapper.extension.before_update(mapper, connection, state.obj())
 
-        for obj, connection in tups:
+        for state, connection in tups:
             # detect if we have a "pending" instance (i.e. has no instance_key attached to it),
             # and another instance with the same identity key already exists as persistent.  convert to an
             # UPDATE if so.
-            mapper = object_mapper(obj)
-            instance_key = mapper.identity_key_from_instance(obj)
-            if not postupdate and not has_identity(obj) and instance_key in uowtransaction.uow.identity_map:
+            mapper = _state_mapper(state)
+            instance_key = mapper._identity_key_from_state(state)
+            if not postupdate and not _state_has_identity(state) and instance_key in uowtransaction.uow.identity_map:
                 existing = uowtransaction.uow.identity_map[instance_key]
                 if not uowtransaction.is_deleted(existing):
-                    raise exceptions.FlushError("New instance %s with identity key %s conflicts with persistent instance %s" % (mapperutil.instance_str(obj), str(instance_key), mapperutil.instance_str(existing)))
+                    raise exceptions.FlushError("New instance %s with identity key %s conflicts with persistent instance %s" % (mapperutil.state_str(state), str(instance_key), mapperutil.instance_str(existing)))
                 if self.__should_log_debug:
-                    self.__log_debug("detected row switch for identity %s.  will update %s, remove %s from transaction" % (instance_key, mapperutil.instance_str(obj), mapperutil.instance_str(existing)))
+                    self.__log_debug("detected row switch for identity %s.  will update %s, remove %s from transaction" % (instance_key, mapperutil.state_str(state), mapperutil.instance_str(existing)))
                 uowtransaction.set_row_switch(existing)
-            if has_identity(obj):
-                if obj._instance_key != instance_key:
-                    raise exceptions.FlushError("Can't change the identity of instance %s in session (existing identity: %s; new identity: %s)" % (mapperutil.instance_str(obj), obj._instance_key, instance_key))
+            if _state_has_identity(state):
+                if state.dict['_instance_key'] != instance_key:
+                    raise exceptions.FlushError("Can't change the identity of instance %s in session (existing identity: %s; new identity: %s)" % (mapperutil.state_str(state), state.dict['_instance_key'], instance_key))
 
         inserted_objects = util.Set()
         updated_objects = util.Set()
             insert = []
             update = []
 
-            for obj, connection in tups:
-                mapper = object_mapper(obj)
+            for state, connection in tups:
+                mapper = _state_mapper(state)
                 if table not in mapper._pks_by_table:
                     continue
                 pks = mapper._pks_by_table[table]
-                instance_key = mapper.identity_key_from_instance(obj)
+                instance_key = mapper._identity_key_from_state(state)
 
                 if self.__should_log_debug:
-                    self.__log_debug("save_obj() table '%s' instance %s identity %s" % (table.name, mapperutil.instance_str(obj), str(instance_key)))
+                    self.__log_debug("save_obj() table '%s' instance %s identity %s" % (table.name, mapperutil.state_str(state), str(instance_key)))
 
-                isinsert = not instance_key in uowtransaction.uow.identity_map and not postupdate and not has_identity(obj)
+                isinsert = not instance_key in uowtransaction.uow.identity_map and not postupdate and not _state_has_identity(state)
                 params = {}
                 value_params = {}
                 hasdata = False
                         if col is mapper.version_id_col:
                             params[col.key] = 1
                         elif col in pks:
-                            value = mapper._get_attr_by_column(obj, col)
+                            value = mapper._get_state_attr_by_column(state, col)
                             if value is not None:
                                 params[col.key] = value
                         elif mapper.polymorphic_on is not None and mapper.polymorphic_on.shares_lineage(col):
                             if col.default is None or value is not None:
                                 params[col.key] = value
                         else:
-                            value = mapper._get_attr_by_column(obj, col)
+                            value = mapper._get_state_attr_by_column(state, col)
                             if col.default is None or value is not None:
                                 if isinstance(value, sql.ClauseElement):
                                     value_params[col] = value
                                 else:
                                     params[col.key] = value
-                    insert.append((obj, params, mapper, connection, value_params))
+                    insert.append((state, params, mapper, connection, value_params))
                 else:
                     for col in mapper._cols_by_table[table]:
                         if col is mapper.version_id_col:
-                            params[col._label] = mapper._get_attr_by_column(obj, col)
+                            params[col._label] = mapper._get_state_attr_by_column(state, col)
                             params[col.key] = params[col._label] + 1
                             for prop in mapper._columntoproperty.values():
-                                history = attributes.get_history(obj, prop.key, passive=True)
-                                if history and history.added_items():
+                                (added, unchanged, deleted) = attributes.get_history(state, prop.key, passive=True)
+                                if added:
                                     hasdata = True
                         elif col in pks:
-                            params[col._label] = mapper._get_attr_by_column(obj, col)
+                            params[col._label] = mapper._get_state_attr_by_column(state, col)
                         elif mapper.polymorphic_on is not None and mapper.polymorphic_on.shares_lineage(col):
                             pass
                         else:
                             if post_update_cols is not None and col not in post_update_cols:
                                 continue
                             prop = mapper._columntoproperty[col]
-                            history = attributes.get_history(obj, prop.key, passive=True)
-                            if history:
-                                a = history.added_items()
-                                if a:
-                                    if isinstance(a[0], sql.ClauseElement):
-                                        value_params[col] = a[0]
-                                    else:
-                                        params[col.key] = prop.get_col_value(col, a[0])
-                                    hasdata = True
+                            (added, unchanged, deleted) = attributes.get_history(state, prop.key, passive=True)
+                            if added:
+                                if isinstance(added[0], sql.ClauseElement):
+                                    value_params[col] = added[0]
+                                else:
+                                    params[col.key] = prop.get_col_value(col, added[0])
+                                hasdata = True
                     if hasdata:
-                        update.append((obj, params, mapper, connection, value_params))
+                        update.append((state, params, mapper, connection, value_params))
 
             if update:
                 mapper = table_to_mapper[table]
                     return 0
                 update.sort(comparator)
                 for rec in update:
-                    (obj, params, mapper, connection, value_params) = rec
+                    (state, params, mapper, connection, value_params) = rec
                     c = connection.execute(statement.values(value_params), params)
-                    mapper._postfetch(connection, table, obj, c, c.last_updated_params(), value_params)
+                    mapper._postfetch(connection, table, state, c, c.last_updated_params(), value_params)
 
                     # testlib.pragma exempt:__hash__
-                    updated_objects.add((id(obj), obj, connection))
+                    updated_objects.add((state, connection))
                     rows += c.rowcount
 
                 if c.supports_sane_rowcount() and rows != len(update):
             if insert:
                 statement = table.insert()
                 def comparator(a, b):
-                    return cmp(a[0]._sa_insert_order, b[0]._sa_insert_order)
+                    return cmp(a[0].insert_order, b[0].insert_order)
                 insert.sort(comparator)
                 for rec in insert:
-                    (obj, params, mapper, connection, value_params) = rec
+                    (state, params, mapper, connection, value_params) = rec
                     c = connection.execute(statement.values(value_params), params)
                     primary_key = c.last_inserted_ids()
 
                     if primary_key is not None:
                         i = 0
                         for col in mapper._pks_by_table[table]:
-                            if mapper._get_attr_by_column(obj, col) is None and len(primary_key) > i:
-                                mapper._set_attr_by_column(obj, col, primary_key[i])
+                            if mapper._get_state_attr_by_column(state, col) is None and len(primary_key) > i:
+                                mapper._set_state_attr_by_column(state, col, primary_key[i])
                             i+=1
-                    mapper._postfetch(connection, table, obj, c, c.last_inserted_params(), value_params)
+                    mapper._postfetch(connection, table, state, c, c.last_inserted_params(), value_params)
 
                     # synchronize newly inserted ids from one table to the next
                     # TODO: this fires off more than needed, try to organize syncrules
                     # per table
                     for m in util.reversed(list(mapper.iterate_to_root())):
                         if m._synchronizer is not None:
-                            m._synchronizer.execute(obj, obj)
+                            m._synchronizer.execute(state, state)
 
                     # testlib.pragma exempt:__hash__
-                    inserted_objects.add((id(obj), obj, connection))
+                    inserted_objects.add((state, connection))
 
         if not postupdate:
-            for id_, obj, connection in inserted_objects:
-                for mapper in object_mapper(obj).iterate_to_root():
+            for state, connection in inserted_objects:
+                for mapper in _state_mapper(state).iterate_to_root():
                     if 'after_insert' in mapper.extension.methods:
-                        mapper.extension.after_insert(mapper, connection, obj)
-            for id_, obj, connection in updated_objects:
-                for mapper in object_mapper(obj).iterate_to_root():
+                        mapper.extension.after_insert(mapper, connection, state.obj())
+            for state, connection in updated_objects:
+                for mapper in _state_mapper(state).iterate_to_root():
                     if 'after_update' in mapper.extension.methods:
-                        mapper.extension.after_update(mapper, connection, obj)
+                        mapper.extension.after_update(mapper, connection, state)
     
-    def _postfetch(self, connection, table, obj, resultproxy, params, value_params):
+    def _postfetch(self, connection, table, state, resultproxy, params, value_params):
         """After an ``INSERT`` or ``UPDATE``, assemble newly generated
         values on an instance.  For columns which are marked as being generated
         on the database side, set up a group-based "deferred" loader 
         which will populate those attributes in one query when next accessed.
         """
 
-        postfetch_cols = resultproxy.postfetch_cols().union(util.Set(value_params.keys())) 
+        postfetch_cols = util.Set(resultproxy.postfetch_cols()).union(util.Set(value_params.keys())) 
         deferred_props = []
 
         for c in self._cols_by_table[table]:
                 continue
             if c.primary_key or not c.key in params:
                 continue
-            if self._get_attr_by_column(obj, c) != params[c.key]:
-                self._set_attr_by_column(obj, c, params[c.key])
+            if self._get_state_attr_by_column(state, c) != params[c.key]:
+                self._set_state_attr_by_column(state, c, params[c.key])
         
         if deferred_props:
-            expire_instance(obj, deferred_props)
+            _expire_state(state, deferred_props)
 
-    def delete_obj(self, objects, uowtransaction):
+    def delete_obj(self, states, uowtransaction):
         """Issue ``DELETE`` statements for a list of objects.
 
         This is called within the context of a UOWTransaction during a
 
         if 'connection_callable' in uowtransaction.mapper_flush_opts:
             connection_callable = uowtransaction.mapper_flush_opts['connection_callable']
-            tups = [(obj, connection_callable(self, obj)) for obj in objects]
+            tups = [(state, connection_callable(self, state.obj())) for state in states]
         else:
             connection = uowtransaction.transaction.connection(self)
-            tups = [(obj, connection) for obj in objects]
+            tups = [(state, connection) for state in states]
 
-        for (obj, connection) in tups:
-            for mapper in object_mapper(obj).iterate_to_root():
+        for (state, connection) in tups:
+            for mapper in _state_mapper(state).iterate_to_root():
                 if 'before_delete' in mapper.extension.methods:
-                    mapper.extension.before_delete(mapper, connection, obj)
+                    mapper.extension.before_delete(mapper, connection, state.obj())
 
         deleted_objects = util.Set()
         table_to_mapper = {}
 
         for table in sqlutil.sort_tables(table_to_mapper.keys(), reverse=True):
             delete = {}
-            for (obj, connection) in tups:
-                mapper = object_mapper(obj)
+            for (state, connection) in tups:
+                mapper = _state_mapper(state)
                 if table not in mapper._pks_by_table:
                     continue
 
                 params = {}
-                if not hasattr(obj, '_instance_key'):
+                if not _state_has_identity(state):
                     continue
                 else:
                     delete.setdefault(connection, []).append(params)
                 for col in mapper._pks_by_table[table]:
-                    params[col.key] = mapper._get_attr_by_column(obj, col)
+                    params[col.key] = mapper._get_state_attr_by_column(state, col)
                 if mapper.version_id_col is not None and table.c.contains_column(mapper.version_id_col):
-                    params[mapper.version_id_col.key] = mapper._get_attr_by_column(obj, mapper.version_id_col)
+                    params[mapper.version_id_col.key] = mapper._get_state_attr_by_column(state, mapper.version_id_col)
                 # testlib.pragma exempt:__hash__
-                deleted_objects.add((id(obj), obj, connection))
+                deleted_objects.add((state, connection))
             for connection, del_objects in delete.iteritems():
                 mapper = table_to_mapper[table]
                 def comparator(a, b):
                 if c.supports_sane_multi_rowcount() and c.rowcount != len(del_objects):
                     raise exceptions.ConcurrentModificationError("Deleted rowcount %d does not match number of objects deleted %d" % (c.rowcount, len(del_objects)))
 
-        for id_, obj, connection in deleted_objects:
-            for mapper in object_mapper(obj).iterate_to_root():
+        for state, connection in deleted_objects:
+            for mapper in _state_mapper(state).iterate_to_root():
                 if 'after_delete' in mapper.extension.methods:
-                    mapper.extension.after_delete(mapper, connection, obj)
+                    mapper.extension.after_delete(mapper, connection, state.obj())
 
     def register_dependencies(self, uowcommit, *args, **kwargs):
         """Register ``DependencyProcessor`` instances with a
         for prop in self.__props.values():
             prop.register_dependencies(uowcommit, *args, **kwargs)
 
-    def cascade_iterator(self, type, object, recursive=None, halt_on=None):
+    def cascade_iterator(self, type, state, recursive=None, halt_on=None):
         """Iterate each element and its mapper in an object graph, 
         for all relations that meet the given cascade rule.
 
           The name of the cascade rule (i.e. save-update, delete,
           etc.)
 
-        object
-          The lead object instance.  child items will be processed per
+        state
+          The lead InstanceState.  child items will be processed per
           the relations defined for this object's mapper.
 
         recursive
           Used by the function for internal context during recursive
           calls, leave as None.
+        
+        the return value are object instances; this provides a strong
+        reference so that they don't fall out of scope immediately.
         """
 
         if recursive is None:
             recursive=util.IdentitySet()
         for prop in self.__props.values():
-            for (c, m) in prop.cascade_iterator(type, object, recursive, halt_on=halt_on):
+            for (c, m) in prop.cascade_iterator(type, state, recursive, halt_on=halt_on):
                 yield (c, m)
 
     def get_select_mapper(self):
 def has_identity(object):
     return hasattr(object, '_instance_key')
 
+def _state_has_identity(state):
+    return '_instance_key' in state.dict
+    
 def has_mapper(object):
     """Return True if the given object has had a mapper association
     set up, either through loading, or via insertion in a session.
 
     return hasattr(object, '_entity_name')
 
+def _state_mapper(state):
+    return state.class_._class_state.mappers[state.dict.get('_entity_name', None)]
+
 def object_mapper(object, entity_name=None, raiseerror=True):
     """Given an object, return the primary Mapper associated with the object instance.
     

File lib/sqlalchemy/orm/properties.py

         
     def copy(self):
         return ColumnProperty(deferred=self.deferred, group=self.group, *self.columns)
+    
+    def getattr(self, state, column):
+        return getattr(state.class_, self.key).impl.get(state)
+
+    def setattr(self, state, value, column):
+        getattr(state.class_, self.key).impl.set(state, value, None)
         
-    def getattr(self, object, column):
-        return getattr(object, self.key)
-
-    def setattr(self, object, value, column):
-        setattr(object, self.key, value)
-
     def merge(self, session, source, dest, dont_load, _recursive):
         setattr(dest, self.key, getattr(source, self.key, None))
 
     def copy(self):
         return CompositeProperty(deferred=self.deferred, group=self.group, composite_class=self.composite_class, *self.columns)
 
-    def getattr(self, object, column):
-        obj = getattr(object, self.key)
+    def getattr(self, state, column):
+        obj = getattr(state.class_, self.key).impl.get(state)
         return self.get_col_value(column, obj)
 
-    def setattr(self, object, value, column):
-        obj = getattr(object, self.key, None)
+    def setattr(self, state, value, column):
+        # TODO: test coverage for this method
+        obj = getattr(state.class_, self.key).impl.get(state)
         if obj is None:
             obj = self.composite_class(*[None for c in self.columns])
+            getattr(state.class_, self.key).impl.set(state, obj, None)
+            
         for a, b in zip(self.columns, value.__composite_values__()):
             if a is column:
                 setattr(obj, b, value)
-
+        
     def get_col_value(self, column, value):
         for a, b in zip(self.columns, value.__composite_values__()):
             if a is column:
     def merge(self, session, source, dest, dont_load, _recursive):
         if not "merge" in self.cascade:
             return
-        childlist = attributes.get_history(source, self.key, passive=True)
-        if childlist is None:
+        instances = attributes.get_as_list(source._state, self.key, passive=True)
+        if not instances:
             return
         if self.uselist:
             # sets a blank collection according to the correct list class
             dest_list = attributes.init_collection(dest, self.key)
-            for current in list(childlist):
+            for current in instances:
                 obj = session.merge(current, entity_name=self.mapper.entity_name, dont_load=dont_load, _recursive=_recursive)
                 if obj is not None:
                     if dont_load:
                     else:
                         dest_list.append_with_event(obj)
         else:
-            current = list(childlist)[0]
+            current = instances[0]
             if current is not None:
                 obj = session.merge(current, entity_name=self.mapper.entity_name, dont_load=dont_load, _recursive=_recursive)
                 if obj is not None:
                     else:
                         setattr(dest, self.key, obj)
 
-    def cascade_iterator(self, type, object, recursive, halt_on=None):
+    def cascade_iterator(self, type, state, recursive, halt_on=None):
         if not type in self.cascade:
             return
         passive = type != 'delete' or self.passive_deletes
         mapper = self.mapper.primary_mapper()
-        for c in attributes.get_as_list(object, self.key, passive=passive):
-            if c is not None and c not in recursive and (halt_on is None or not halt_on(c)):
-                if not isinstance(c, self.mapper.class_):
-                    raise exceptions.AssertionError("Attribute '%s' on class '%s' doesn't handle objects of type '%s'" % (self.key, str(self.parent.class_), str(c.__class__)))
-                recursive.add(c)
-                yield (c, mapper)
-                for (c2, m) in mapper.cascade_iterator(type, c, recursive):
-                    yield (c2, m)
+        instances = attributes.get_as_list(state, self.key, passive=passive)
+        if instances:
+            for c in instances:
+                if c is not None and c not in recursive and (halt_on is None or not halt_on(c)):
+                    if not isinstance(c, self.mapper.class_):
+                        raise exceptions.AssertionError("Attribute '%s' on class '%s' doesn't handle objects of type '%s'" % (self.key, str(self.parent.class_), str(c.__class__)))
+                    recursive.add(c)
+                    yield (c, mapper)
+                    for (c2, m) in mapper.cascade_iterator(type, c._state, recursive):
+                        yield (c2, m)
 
     def _get_target_class(self):
         """Return the target class of the relation, even if the

File lib/sqlalchemy/orm/session.py

         
         if attribute_names:
             self._validate_persistent(instance)
-            expire_instance(instance, attribute_names=attribute_names)
+            _expire_state(instance._state, attribute_names=attribute_names)
         else:
             # pre-fetch the full cascade since the expire is going to 
             # remove associations
             cascaded = list(_cascade_iterator('refresh-expire', instance))
             self._validate_persistent(instance)
-            expire_instance(instance, None)
+            _expire_state(instance._state, None)
             for (c, m) in cascaded:
                 self._validate_persistent(c)
-                expire_instance(c, None)
+                _expire_state(c._state, None)
 
     def prune(self):
         """Removes unreferenced instances cached in the identity map.
         self._validate_persistent(instance)
         for c, m in [(instance, None)] + list(_cascade_iterator('expunge', instance)):
             if c in self:
-                self.uow._remove_deleted(c)
+                self.uow._remove_deleted(c._state)
                 self._unattach(c)
 
     def save(self, instance, entity_name=None):
         The `entity_name` keyword argument will further qualify the
         specific ``Mapper`` used to handle this instance.
         """
-
         self._save_impl(instance, entity_name=entity_name)
         self._cascade_save_or_update(instance)
 
         result of True.
         """
         
-        return instance in self.uow.new or (hasattr(instance, '_instance_key') and self.identity_map.get(instance._instance_key) is instance)
+        return instance._state in self.uow.new or (hasattr(instance, '_instance_key') and self.identity_map.get(instance._instance_key) is instance)
 
     def __iter__(self):
         """return an iterator of all instances which are pending or persistent within this Session."""
         
-        return iter(list(self.uow.new) + self.uow.identity_map.values())
+        return iter(list(self.uow.new.values()) + self.uow.identity_map.values())
 
     def is_modified(self, instance, include_collections=True, passive=False):
         """return True if the given instance has modified attributes.
         for attr in attributes._managed_attributes(instance.__class__):
             if not include_collections and hasattr(attr.impl, 'get_collection'):
                 continue
-            if attr.get_history(instance).is_modified():
+            (added, unchanged, deleted) = attr.get_history(instance)
+            if added or deleted:
                 return True
         return False
         
                      is_modified() method.
                      """)
 
-    deleted = property(lambda s:s.uow.deleted,
+    deleted = property(lambda s:util.IdentitySet(s.uow.deleted.values()),
                        doc="A ``Set`` of all instances marked as 'deleted' within this ``Session``")
 
-    new = property(lambda s:s.uow.new,
+    new = property(lambda s:util.IdentitySet(s.uow.new.values()),
                    doc="A ``Set`` of all instances marked as 'new' within this ``Session``.")
 
-def expire_instance(instance, attribute_names):
+def _expire_state(state, attribute_names):
     """standalone expire instance function. 
     
     installs a callable with the given instance's _state
     If the list is None or blank, the entire instance is expired.
     """
     
-    if instance._state.trigger is None:
+    if state.trigger is None:
         def load_attributes(instance, attribute_names):
             if object_session(instance).query(instance.__class__)._get(instance._instance_key, refresh_instance=instance, only_load_props=attribute_names) is None:
                 raise exceptions.InvalidRequestError("Could not refresh instance '%s'" % mapperutil.instance_str(instance))
-        instance._state.trigger = load_attributes
+        state.trigger = load_attributes
         
-    instance._state.expire_attributes(attribute_names)
+    state.expire_attributes(attribute_names)
     
 register_attribute = unitofwork.register_attribute
 
 
 def _cascade_iterator(cascade, instance, **kwargs):
     mapper = _object_mapper(instance)
-    for (o, m) in mapper.cascade_iterator(cascade, instance, **kwargs):
+    for (o, m) in mapper.cascade_iterator(cascade, instance._state, **kwargs):
         yield o, m
 
 def object_session(instance):
 # Lazy initialization to avoid circular imports
 unitofwork.object_session = object_session
 from sqlalchemy.orm import mapper
-mapper.expire_instance = expire_instance
+mapper._expire_state = _expire_state

File lib/sqlalchemy/orm/sync.py

                     source_column = binary.right
             else:
                 if binary.left in foreign_keys:
-                    source_column=binary.right
+                    source_column = binary.right
                     dest_column = binary.left
                 elif binary.right in foreign_keys:
                     source_column = binary.left
     """An instruction indicating how to populate the objects on each
     side of a relationship.
 
-    In other words, if table1 column A is joined against table2 column
+    E.g. if table1 column A is joined against table2 column
     B, and we are a one-to-many from table1 to table2, a syncrule
     would say *take the A attribute from object1 and assign it to the
     B attribute on object2*.
-
-    A rule contains the source mapper, the source column, destination
-    column, destination mapper in the case of a one/many relationship,
-    and the integer direction of this mapper relative to the
-    association in the case of a many to many relationship.
     """
 
     def __init__(self, source_mapper, source_column, dest_column, dest_mapper=None, issecondary=None):
             self._dest_primary_key = self.dest_mapper is not None and self.dest_column in self.dest_mapper._pks_by_table[self.dest_column.table] and not self.dest_mapper.allow_null_pks
             return self._dest_primary_key
 
-    def execute(self, source, dest, obj, child, clearkeys):
+    def execute(self, source, dest, parent, child, clearkeys):
         if source is None:
             if self.issecondary is False:
-                source = obj
+                source = parent
             elif self.issecondary is True:
                 source = child
         if clearkeys or source is None:
             value = None
             clearkeys = True
         else:
-            value = self.source_mapper._get_attr_by_column(source, self.source_column)
+            value = self.source_mapper._get_state_attr_by_column(source, self.source_column)
         if isinstance(dest, dict):
             dest[self.dest_column.key] = value
         else:
             if clearkeys and self.dest_primary_key():
-                raise exceptions.AssertionError("Dependency rule tried to blank-out primary key column '%s' on instance '%s'" % (str(self.dest_column), mapperutil.instance_str(dest)))
+                raise exceptions.AssertionError("Dependency rule tried to blank-out primary key column '%s' on instance '%s'" % (str(self.dest_column), mapperutil.state_str(dest)))
 
             if logging.is_debug_enabled(self.logger):
-                self.logger.debug("execute() instances: %s(%s)->%s(%s) ('%s')" % (mapperutil.instance_str(source), str(self.source_column), mapperutil.instance_str(dest), str(self.dest_column), value))
-            self.dest_mapper._set_attr_by_column(dest, self.dest_column, value)
+                self.logger.debug("execute() instances: %s(%s)->%s(%s) ('%s')" % (mapperutil.state_str(source), str(self.source_column), mapperutil.state_str(dest), str(self.dest_column), value))
+            self.dest_mapper._set_state_attr_by_column(dest, self.dest_column, value)
 
 SyncRule.logger = logging.class_logger(SyncRule)
 

File lib/sqlalchemy/orm/unitofwork.py

 changes at once.
 """
 
-import gc, StringIO, weakref
+import StringIO, weakref
 from sqlalchemy import util, logging, topological, exceptions
 from sqlalchemy.orm import attributes, interfaces
 from sqlalchemy.orm import util as mapperutil
-from sqlalchemy.orm.mapper import object_mapper
+from sqlalchemy.orm.mapper import object_mapper, _state_mapper
 
 # Load lazily
 object_session = None
 
 class UOWEventHandler(interfaces.AttributeExtension):
-    """An event handler added to all class attributes which handles
-    session operations.
+    """An event handler added to all relation attributes which handles
+    session cascade operations.
     """
 
     def __init__(self, key, class_, cascade=None):
                 sess.save_or_update(newvalue, entity_name=ename)
 
 def register_attribute(class_, key, *args, **kwargs):
+    """overrides attributes.register_attribute() to add UOW event handlers
+    to new InstrumentedAttributes.
+    """
+    
     cascade = kwargs.pop('cascade', None)
-    extension = util.to_list(kwargs.pop('extension', None) or [])
-    extension.insert(0, UOWEventHandler(key, class_, cascade=cascade))
-    kwargs['extension'] = extension
+    useobject = kwargs.get('useobject', False)
+    if useobject:
+        # for object-holding attributes, instrument UOWEventHandler
+        # to process per-attribute cascades
+        extension = util.to_list(kwargs.pop('extension', None) or [])
+        extension.insert(0, UOWEventHandler(key, class_, cascade=cascade))
+        kwargs['extension'] = extension
     return attributes.register_attribute(class_, key, *args, **kwargs)
     
 
         else:
             self.identity_map = attributes.StrongInstanceDict()
 
-        self.new = util.IdentitySet() #OrderedSet()
-        self.deleted = util.IdentitySet()
+        self.new = {}   # InstanceState->object, strong refs object
+        self.deleted = {}  # same
         self.logger = logging.instance_logger(self, echoflag=session.echo_uow)
 
-    def _remove_deleted(self, obj):
-        if hasattr(obj, "_instance_key"):
-            del self.identity_map[obj._instance_key]
-        try:
-            self.deleted.remove(obj)
-        except KeyError:
-            pass
-        try:
-            self.new.remove(obj)
-        except KeyError:
-            pass
+    def _remove_deleted(self, state):
+        if '_instance_key' in state.dict:
+            del self.identity_map[state.dict['_instance_key']]
+        self.deleted.pop(state, None)
+        self.new.pop(state, None)
 
-    def _is_valid(self, obj):
-        if (hasattr(obj, '_instance_key') and obj._instance_key not in self.identity_map) or \
-            (not hasattr(obj, '_instance_key') and obj not in self.new):
-            return False
+    def _is_valid(self, state):
+        if '_instance_key' in state.dict:
+            return state.dict['_instance_key'] in self.identity_map
         else:
-            return True
+            return state in self.new
 
-    def _register_clean(self, obj):
+    def _register_clean(self, state):
         """register the given object as 'clean' (i.e. persistent) within this unit of work, after
         a save operation has taken place."""
-        
-        if obj in self.new:
-            self.new.remove(obj)
-        if not hasattr(obj, '_instance_key'):
-            mapper = object_mapper(obj)
-            obj._instance_key = mapper.identity_key_from_instance(obj)
-        if hasattr(obj, '_sa_insert_order'):
-            delattr(obj, '_sa_insert_order')
-        self.identity_map[obj._instance_key] = obj
-        obj._state.commit_all()
+
+        if '_instance_key' not in state.dict:
+            mapper = _state_mapper(state)
+            state.dict['_instance_key'] = mapper._identity_key_from_state(state)
+        if hasattr(state, 'insert_order'):
+            delattr(state, 'insert_order')
+        self.identity_map[state.dict['_instance_key']] = state.obj()
+        state.commit_all()
+        # remove from new last, might be the last strong ref
+        self.new.pop(state, None)
 
     def register_new(self, obj):
         """register the given object as 'new' (i.e. unsaved) within this unit of work."""
 
         if hasattr(obj, '_instance_key'):
             raise exceptions.InvalidRequestError("Object '%s' already has an identity - it can't be registered as new" % repr(obj))
-        if obj not in self.new:
-            self.new.add(obj)
-            obj._sa_insert_order = len(self.new)
+        if obj._state not in self.new:
+            self.new[obj._state] = obj
+            obj._state.insert_order = len(self.new)
 
     def register_deleted(self, obj):
         """register the given persistent object as 'to be deleted' within this unit of work."""
         
-        self.deleted.add(obj)
+        self.deleted[obj._state] = obj
 
     def locate_dirty(self):
         """return a set of all persistent instances within this unit of work which 
         
         # a little bit of inlining for speed
         return util.IdentitySet([x for x in self.identity_map.values() 
-            if x not in self.deleted 
+            if x._state not in self.deleted 
             and (
                 x._state.modified
                 or (x.__class__._class_state.has_mutable_scalars and x.state.is_modified())
             )
             ])
 
-        
     def flush(self, session, objects=None):
         """create a dependency tree of all pending SQL operations within this unit of work and execute."""
         
             or (x.class_._class_state.has_mutable_scalars and x.is_modified())
         ]
         
-        if len(dirty) == 0 and len(self.deleted) == 0 and len(self.new) == 0:
+        if not dirty and not self.deleted and not self.new:
             return
-            
-        dirty = util.IdentitySet([x.obj() for x in dirty]).difference(self.deleted)
+        
+        deleted = util.Set(self.deleted)
+        new = util.Set(self.new)
+        
+        dirty = util.Set(dirty).difference(deleted)
         
         flush_context = UOWTransaction(self, session)
 
             session.extension.before_flush(session, flush_context, objects)
 
         # create the set of all objects we want to operate upon
-        if objects is not None:
+        if objects:
             # specific list passed in
-            objset = util.IdentitySet(objects)
+            objset = util.Set([o._state for o in objects])
         else:
             # or just everything
-            objset = util.IdentitySet(self.identity_map.values()).union(self.new)
+            objset = util.Set(self.identity_map.all_states()).union(new)
             
         # store objects whose fate has been decided
-        processed = util.IdentitySet()
+        processed = util.Set()
 
         # put all saves/updates into the flush context.  detect top-level orphans and throw them into deleted.
-        for obj in self.new.union(dirty).intersection(objset).difference(self.deleted):
-            if obj in processed:
+        for state in new.union(dirty).intersection(objset).difference(deleted):
+            if state in processed:
                 continue
 
-            flush_context.register_object(obj, isdelete=object_mapper(obj)._is_orphan(obj))
-            processed.add(obj)
+            flush_context.register_object(state, isdelete=_state_mapper(state)._is_orphan(state.obj()))
+            processed.add(state)
 
         # put all remaining deletes into the flush context.
-        for obj in self.deleted.intersection(objset).difference(processed):
-            flush_context.register_object(obj, isdelete=True)
+        for state in deleted.intersection(objset).difference(processed):
+            flush_context.register_object(state, isdelete=True)
 
         if len(flush_context.tasks) == 0:
             return
         dirty = self.locate_dirty()
         keepers = weakref.WeakValueDictionary(self.identity_map)
         self.identity_map.clear()
-        gc.collect()
         self.identity_map.update(keepers)
         return ref_count - len(self.identity_map)
 
 
         self.logger = logging.instance_logger(self, echoflag=session.echo_uow)
         
-    def register_object(self, obj, isdelete = False, listonly = False, postupdate=False, post_update_cols=None, **kwargs):
-        """Add an object to this ``UOWTransaction`` to be updated in the database.
-
-        This operation has the combined effect of locating/creating an appropriate
-        ``UOWTask`` object, and calling its ``append()`` method which then locates/creates
-        an appropriate ``UOWTaskElement`` object.
-        """
-
-        #print "REGISTER", repr(obj), repr(getattr(obj, '_instance_key', None)), str(isdelete), str(listonly)
-
+    def register_object(self, state, isdelete = False, listonly = False, postupdate=False, post_update_cols=None, **kwargs):
         # if object is not in the overall session, do nothing
-        if not self.uow._is_valid(obj):
+        if not self.uow._is_valid(state):
             if self._should_log_debug:
-                self.logger.debug("object %s not part of session, not registering for flush" % (mapperutil.instance_str(obj)))
+                self.logger.debug("object %s not part of session, not registering for flush" % (mapperutil.state_str(state)))
             return
 
         if self._should_log_debug:
-            self.logger.debug("register object for flush: %s isdelete=%s listonly=%s postupdate=%s" % (mapperutil.instance_str(obj), isdelete, listonly, postupdate))
+            self.logger.debug("register object for flush: %s isdelete=%s listonly=%s postupdate=%s" % (mapperutil.state_str(state), isdelete, listonly, postupdate))
 
-        mapper = object_mapper(obj)
+        mapper = _state_mapper(state)
+        
         task = self.get_task_by_mapper(mapper)
         if postupdate:
-            task.append_postupdate(obj, post_update_cols)
-            return
-
-        task.append(obj, listonly, isdelete=isdelete, **kwargs)
+            task.append_postupdate(state, post_update_cols)
+        else:
+            task.append(state, listonly, isdelete=isdelete, **kwargs)
 
     def set_row_switch(self, obj):
         """mark a deleted object as a 'row switch'.
         """
         mapper = object_mapper(obj)
         task = self.get_task_by_mapper(mapper)
-        taskelement = task._objects[id(obj)]
+        taskelement = task._objects[obj._state]
         taskelement.isdelete = "rowswitch"
         
     def unregister_object(self, obj):
         no further operations occur upon the instance."""
         mapper = object_mapper(obj)
         task = self.get_task_by_mapper(mapper)
-        if id(obj) in task._objects:
-            task.delete(obj)
+        if obj._state in task._objects:
+            task.delete(obj._state)
 
     def is_deleted(self, obj):
         """return true if the given object is marked as deleted within this UOWTransaction."""
         
         mapper = object_mapper(obj)
         task = self.get_task_by_mapper(mapper)
-        return task.is_deleted(obj)
+        return task.is_deleted(obj._state)
 
+    def state_is_deleted(self, state):
+        mapper = _state_mapper(state)
+        task = self.get_task_by_mapper(mapper)
+        return task.is_deleted(state)
+        
     def get_task_by_mapper(self, mapper, dontcreate=False):
         """return UOWTask element corresponding to the given mapper.
 
             if base_mapper in self.tasks:
                 base_task = self.tasks[base_mapper]
             else:
-                base_task = UOWTask(self, base_mapper)
-                self.tasks[base_mapper] = base_task
+                self.tasks[base_mapper] = base_task = UOWTask(self, base_mapper)
                 base_mapper.register_dependencies(self)
 
             if mapper not in self.tasks:
-                task = UOWTask(self, mapper, base_task=base_task)
-                self.tasks[mapper] = task
+                self.tasks[mapper] = task = UOWTask(self, mapper, base_task=base_task)
                 mapper.register_dependencies(self)
             else:
                 task = self.tasks[mapper]
         by another.        
         """
 
-        # correct for primary mapper (the mapper offcially associated with the class)
+        # correct for primary mapper
         # also convert to the "base mapper", the parentmost task at the top of an inheritance chain
         # dependency sorting is done via non-inheriting mappers only, dependencies between mappers
         # in the same inheritance chain is done at the per-object level
         self.dependencies.add((mapper, dependency))
 
     def register_processor(self, mapper, processor, mapperfrom):
-        """register a dependency processor object, corresponding to dependencies between
+        """register a dependency processor, corresponding to dependencies between
         the two given mappers.
         
-        In reality, the processor is an instance of ``dependency.DependencyProcessor``
-        and is registered as a result of the ``mapper.register_dependencies()`` call in
-        ``get_task_by_mapper()``.
-        
-        The dependency processor supports the methods ``preprocess_dependencies()`` and
-        ``process_dependencies()``, which
-        perform operations on a list of instances that have a dependency relationship
-        with some other instance.  The operations include adding items to the UOW
-        corresponding to some cascade operations, issuing inserts/deletes on 
-        association tables, and synchronzing foreign key values between related objects
-        before the dependent object is operated upon at the SQL level.
         """
 
-        # when the task from "mapper" executes, take the objects from the task corresponding
-        # to "mapperfrom"'s list of save/delete objects, and send them to "processor"
-        # for dependency processing
-
-        #print "registerprocessor", str(mapper), repr(processor), repr(processor.key), str(mapperfrom)
-
-        # correct for primary mapper (the mapper offcially associated with the class)
+        # correct for primary mapper
         mapper = mapper.primary_mapper()
         mapperfrom = mapperfrom.primary_mapper()
 
     def execute(self):
         """Execute this UOWTransaction.
         
-        This will organize all collected UOWTasks into a toplogically-sorted
-        dependency tree, which is then traversed using the traversal scheme
+        This will organize all collected UOWTasks into a dependency-sorted
+        list which is then traversed using the traversal scheme
         encoded in the UOWExecutor class.  Operations to mappers and dependency
         processors are fired off in order to issue SQL to the database and 
-        to maintain instance state during the execution."""
+        synchronize instance attributes with database values and related
+        foreign key values."""
 
         # pre-execute dependency processors.  this process may
         # result in new tasks, objects and/or dependency processors being added,
             if not ret:
                 break
 
-        head = self._sort_dependencies()
+        tasks = self._sort_dependencies()
         if self._should_log_info:
-            if head is None:
-                self.logger.info("Task dump: None")
-            else:
-                self.logger.info("Task dump:\n" + head.dump())
-        if head is not None:
-            UOWExecutor().execute(self, head)
+            self.logger.info("Task dump:\n" + self._dump(tasks))
+        UOWExecutor().execute(self, tasks)
         if self._should_log_info:
             self.logger.info("Execute Complete")
 
+    def _dump(self, tasks):
+        buf = StringIO.StringIO()
+        import uowdumper
+        uowdumper.UOWDumper(tasks, buf)
+        return buf.getvalue()
+        
     def post_exec(self):
         """mark processed objects as clean / deleted after a successful flush().
         
 
         for task in self.tasks.values():
             for elem in task.elements:
-                if elem.obj is None:
+                if elem.state is None:
                     continue
                 if elem.isdelete:
-                    self.uow._remove_deleted(elem.obj)
+                    self.uow._remove_deleted(elem.state)
                 else:
-                    self.uow._register_clean(elem.obj)
+                    self.uow._register_clean(elem.state)
 
     def _sort_dependencies(self):
-        """Create a hierarchical tree of dependent UOWTask instances.
+        nodes = topological.sort_with_cycles(self.dependencies, 
+            [t.mapper for t in self.tasks.values() if t.base_task is t]
+        )
 
-        The root UOWTask is returned.  
-        
-        Cyclical relationships
-        within the toplogical sort are further broken down into new
-        temporary UOWTask insances which represent smaller sub-groups of objects
-        that would normally belong to a single UOWTask.
+        ret = []
+        for item, cycles in nodes:
+            task = self.get_task_by_mapper(item)
+            if cycles:
+                for t in task._sort_circular_dependencies(self, [self.get_task_by_mapper(i) for i in cycles]):
+                    ret.append(t)
+            else:
+                ret.append(task)
 
-        """
-
-        def sort_hier(node):
-            if node is None:
-                return None
-            task = self.get_task_by_mapper(node.item)
-            if node.cycles is not None: