Commits

Mike Bayer  committed f225f9d

- move all of orm to use absolute imports
- break out key mechanics of loading objects
into new "orm.loading" module, removing implementation
details from both mapper.py and query.py. is analogous
to persistence.py
- some other cleanup and old cruft removal

  • Participants
  • Parent commits df96868

Comments (0)

Files changed (29)

File lib/sqlalchemy/orm/__init__.py

 
 """
 
-from sqlalchemy.orm import exc
-from sqlalchemy.orm.mapper import (
+from . import exc
+from .mapper import (
      Mapper,
      _mapper_registry,
      class_mapper,
-     configure_mappers
+     configure_mappers,
+     reconstructor,
+     validates
      )
-from sqlalchemy.orm.interfaces import (
+from .interfaces import (
      EXT_CONTINUE,
      EXT_STOP,
      InstrumentationManager,
      SessionExtension,
      AttributeExtension,
      )
-from sqlalchemy.orm.util import (
+from .util import (
      aliased,
      join,
      object_mapper,
      with_parent,
      with_polymorphic,
      )
-from sqlalchemy.orm.properties import (
+from .properties import (
      ColumnProperty,
      ComparableProperty,
      CompositeProperty,
      PropertyLoader,
      SynonymProperty,
      )
-from sqlalchemy.orm.relationships import (
+from .relationships import (
     foreign,
     remote,
     remote_foreign
 )
-from sqlalchemy.orm import mapper as mapperlib
-from sqlalchemy.orm.mapper import reconstructor, validates
-from sqlalchemy.orm import strategies
-from sqlalchemy.orm.query import AliasOption, Query
-from sqlalchemy.sql import util as sql_util
-from sqlalchemy.orm.session import Session
-from sqlalchemy.orm.session import object_session, sessionmaker, \
+from .session import (
+    Session, 
+    object_session, 
+    sessionmaker, 
     make_transient
-from sqlalchemy.orm.scoping import ScopedSession
-from sqlalchemy import util as sa_util
+)
+from .scoping import (
+    ScopedSession
+)
+from . import mapper as mapperlib
+from . import strategies
+from .query import AliasOption, Query
+from ..sql import util as sql_util
+from .. import util as sa_util
 
 __all__ = (
     'EXT_CONTINUE',
     """
     alias = kwargs.pop('alias', None)
     if kwargs:
-        raise exceptions.ArgumentError('Invalid kwargs for contains_eag'
-                'er: %r' % kwargs.keys())
+        raise exc.ArgumentError(
+                'Invalid kwargs for contains_eager: %r' % kwargs.keys())
     return strategies.EagerLazyOption(keys, lazy='joined',
             propagate_to_loaders=False, chained=True), \
         strategies.LoadEagerFromAliasOption(keys, alias=alias, chained=True)

File lib/sqlalchemy/orm/attributes.py

 import operator
 from operator import itemgetter
 
-from sqlalchemy import util, event, exc as sa_exc, inspection
-from sqlalchemy.orm import interfaces, collections, events, exc as orm_exc
-
-
-mapperutil = util.importlater("sqlalchemy.orm", "util")
+from .. import util, event, inspection
+from . import interfaces, collections, events, exc as orm_exc
+orm_util = util.importlater("sqlalchemy.orm", "util")
 
 PASSIVE_NO_RESULT = util.symbol('PASSIVE_NO_RESULT',
 """Symbol returned by a loader callable or other attribute/history
                             "but the parent record "
                             "has gone stale, can't be sure this "
                             "is the most recent parent." % 
-                            (mapperutil.state_str(state), 
-                            mapperutil.state_str(parent_state),
+                            (orm_util.state_str(state), 
+                            orm_util.state_str(parent_state),
                             self.key))
 
                     return
             else:
                 raise ValueError(
                     "Object %s not associated with %s on attribute '%s'" % (
-                    mapperutil.instance_str(check_old),
-                   mapperutil.state_str(state),
+                    orm_util.instance_str(check_old),
+                   orm_util.state_str(state),
                    self.key
                 ))
         value = self.fire_replace_event(state, dict_, value, old, initiator)

File lib/sqlalchemy/orm/collections.py

 import sys
 import weakref
 
-from sqlalchemy.sql import expression
-from sqlalchemy import schema, util, exc as sa_exc
+from ..sql import expression
+from .. import util, exc as sa_exc
+orm_util = util.importlater("sqlalchemy.orm", "util")
+attributes = util.importlater("sqlalchemy.orm", "attributes")
+
 
 __all__ = ['collection', 'collection_adapter',
            'mapped_collection', 'column_mapped_collection',
         return self.cols
 
     def __call__(self, value):
-        state = instance_state(value)
-        m = _state_mapper(state)
+        state = attributes.instance_state(value)
+        m = orm_util._state_mapper(state)
 
         key = [
             m._get_state_attr_by_column(state, state.dict, col)
     def __reduce__(self):
         return _SerializableColumnGetter, (self.colkeys,)
     def __call__(self, value):
-        state = instance_state(value)
-        m = _state_mapper(state)
+        state = attributes.instance_state(value)
+        m = orm_util._state_mapper(state)
         key = [m._get_state_attr_by_column(
                         state, state.dict, 
                         m.mapped_table.columns[k])
     after a session flush.
 
     """
-    global _state_mapper, instance_state
-    from sqlalchemy.orm.util import _state_mapper
-    from sqlalchemy.orm.attributes import instance_state
-
     cols = [expression._only_column_elements(q, "mapping_spec")
                 for q in util.to_list(mapping_spec)
             ]

File lib/sqlalchemy/orm/dependency.py

 
 """
 
-from sqlalchemy import sql, util, exc as sa_exc
-from sqlalchemy.orm import attributes, exc, sync, unitofwork, \
+from .. import sql, util, exc as sa_exc
+from . import attributes, exc, sync, unitofwork, \
                                         util as mapperutil
-from sqlalchemy.orm.interfaces import ONETOMANY, MANYTOONE, MANYTOMANY
+from .interfaces import ONETOMANY, MANYTOONE, MANYTOMANY
 
 class DependencyProcessor(object):
     def __init__(self, prop):
                                         self.key, 
                                         self._passive_delete_flag)
                 if history:
-                    ret = True
                     for child in history.deleted:
                         if self.hasparent(child) is False:
                             uowcommit.register_object(child, isdelete=True, 
             ])
 
     def presort_deletes(self, uowcommit, states):
+        # TODO: no tests fail if this whole
+        # thing is removed !!!!
         if not self.passive_deletes:
             # if no passive deletes, load history on 
             # the collection, so that prop_has_changes()
             # returns True
             for state in states:
-                history = uowcommit.get_attribute_history(
+                uowcommit.get_attribute_history(
                                         state, 
                                         self.key, 
                                         self._passive_delete_flag)

File lib/sqlalchemy/orm/deprecated_interfaces.py

 # This module is part of SQLAlchemy and is released under
 # the MIT License: http://www.opensource.org/licenses/mit-license.php
 
-from sqlalchemy import event, util
+from .. import event, util
 from interfaces import EXT_CONTINUE
 
 

File lib/sqlalchemy/orm/descriptor_props.py

 
 """
 
-from sqlalchemy.orm.interfaces import \
-    MapperProperty, PropComparator, StrategizedProperty
-from sqlalchemy.orm.mapper import _none_set
-from sqlalchemy.orm import attributes, strategies
-from sqlalchemy import util, sql, exc as sa_exc, event, schema
-from sqlalchemy.sql import expression
+from .interfaces import MapperProperty, PropComparator
+from .util import _none_set
+from . import attributes, strategies
+from .. import util, sql, exc as sa_exc, event, schema
+from ..sql import expression
 properties = util.importlater('sqlalchemy.orm', 'properties')
 
 class DescriptorProperty(MapperProperty):

File lib/sqlalchemy/orm/dynamic.py

 
 """
 
-from sqlalchemy import log, util
-from sqlalchemy import exc as sa_exc
-from sqlalchemy.orm import exc as orm_exc
-from sqlalchemy.sql import operators
-from sqlalchemy.orm import (
-    attributes, object_session, util as mapperutil, strategies, object_mapper
+from .. import log, util
+from ..sql import operators
+from . import (
+    attributes, object_session, util as orm_util, strategies, 
+    object_mapper, exc as orm_exc, collections
     )
-from sqlalchemy.orm.query import Query
-from sqlalchemy.orm.util import has_identity
-from sqlalchemy.orm import attributes, collections
+from .query import Query
 
 class DynaLoader(strategies.AbstractRelationshipLoader):
     def init_class_attribute(self, mapper):
     query_class = None
 
     def __init__(self, attr, state):
-        Query.__init__(self, attr.target_mapper, None)
+        super(AppenderMixin, self).__init__(attr.target_mapper, None)
         self.instance = instance = state.obj()
         self.attr = attr
 
         if sess is not None and self.autoflush and sess.autoflush \
             and self.instance in sess:
             sess.flush()
-        if not has_identity(self.instance):
+        if not orm_util.has_identity(self.instance):
             return None
         else:
             return sess
                     "Parent instance %s is not bound to a Session, and no "
                     "contextual session is established; lazy load operation "
                     "of attribute '%s' cannot proceed" % (
-                        mapperutil.instance_str(instance), self.attr.key))
+                        orm_util.instance_str(instance), self.attr.key))
 
         if self.query_class:
             query = self.query_class(self.attr.target_mapper, session=sess)

File lib/sqlalchemy/orm/evaluator.py

 # the MIT License: http://www.opensource.org/licenses/mit-license.php
 
 import operator
-from sqlalchemy.sql import operators, functions
-from sqlalchemy.sql import expression as sql
-
+from ..sql import operators
 
 class UnevaluatableError(Exception):
     pass
 
 
 _notimplemented_ops = set(getattr(operators, op)
-                          for op in ('like_op', 'notlike_op', 'ilike_op',
-                                     'notilike_op', 'between_op', 'in_op',
-                                     'notin_op', 'endswith_op', 'concat_op'))
+                      for op in ('like_op', 'notlike_op', 'ilike_op',
+                                 'notilike_op', 'between_op', 'in_op',
+                                 'notin_op', 'endswith_op', 'concat_op'))
 
 class EvaluatorCompiler(object):
     def process(self, clause):
         meth = getattr(self, "visit_%s" % clause.__visit_name__, None)
         if not meth:
-            raise UnevaluatableError("Cannot evaluate %s" % type(clause).__name__)
+            raise UnevaluatableError(
+                "Cannot evaluate %s" % type(clause).__name__)
         return meth(clause)
 
     def visit_grouping(self, clause):
                         return False
                 return True
         else:
-            raise UnevaluatableError("Cannot evaluate clauselist with operator %s" % clause.operator)
+            raise UnevaluatableError(
+                "Cannot evaluate clauselist with operator %s" % 
+                clause.operator)
 
         return evaluate
 
     def visit_binary(self, clause):
-        eval_left,eval_right = map(self.process, [clause.left, clause.right])
+        eval_left,eval_right = map(self.process, 
+                                [clause.left, clause.right])
         operator = clause.operator
         if operator is operators.is_:
             def evaluate(obj):
                     return None
                 return operator(eval_left(obj), eval_right(obj))
         else:
-            raise UnevaluatableError("Cannot evaluate %s with operator %s" % (type(clause).__name__, clause.operator))
+            raise UnevaluatableError(
+                    "Cannot evaluate %s with operator %s" % 
+                    (type(clause).__name__, clause.operator))
         return evaluate
 
     def visit_unary(self, clause):
                     return None
                 return not value
             return evaluate
-        raise UnevaluatableError("Cannot evaluate %s with operator %s" % (type(clause).__name__, clause.operator))
+        raise UnevaluatableError(
+                    "Cannot evaluate %s with operator %s" % 
+                    (type(clause).__name__, clause.operator))
 
     def visit_bindparam(self, clause):
         val = clause.value

File lib/sqlalchemy/orm/events.py

 """ORM event interfaces.
 
 """
-from sqlalchemy import event, exc, util
+from .. import event, exc, util
 orm = util.importlater("sqlalchemy", "orm")
 import inspect
 

File lib/sqlalchemy/orm/exc.py

 # the MIT License: http://www.opensource.org/licenses/mit-license.php
 
 """SQLAlchemy ORM exceptions."""
-
-import sqlalchemy as sa
-orm_util = sa.util.importlater('sqlalchemy.orm', 'util')
+from .. import exc as sa_exc, util
+orm_util = util.importlater('sqlalchemy.orm', 'util')
+attributes = util.importlater('sqlalchemy.orm', 'attributes')
 
 NO_STATE = (AttributeError, KeyError)
 """Exception types that may be raised by instrumentation implementations."""
 
-class StaleDataError(sa.exc.SQLAlchemyError):
+class StaleDataError(sa_exc.SQLAlchemyError):
     """An operation encountered database state that is unaccounted for.
 
     Conditions which cause this to happen include:
 ConcurrentModificationError = StaleDataError
 
 
-class FlushError(sa.exc.SQLAlchemyError):
+class FlushError(sa_exc.SQLAlchemyError):
     """A invalid condition was detected during flush()."""
 
 
-class UnmappedError(sa.exc.InvalidRequestError):
+class UnmappedError(sa_exc.InvalidRequestError):
     """Base for exceptions that involve expected mappings not present."""
 
-class ObjectDereferencedError(sa.exc.SQLAlchemyError):
+class ObjectDereferencedError(sa_exc.SQLAlchemyError):
     """An operation cannot complete due to an object being garbage collected."""
 
-class DetachedInstanceError(sa.exc.SQLAlchemyError):
+class DetachedInstanceError(sa_exc.SQLAlchemyError):
     """An attempt to access unloaded attributes on a 
     mapped instance that is detached."""
 
     def __init__(self, obj, msg=None):
         if not msg:
             try:
-                mapper = sa.orm.class_mapper(type(obj))
+                mapper = orm_util.class_mapper(type(obj))
                 name = _safe_cls_name(type(obj))
                 msg = ("Class %r is mapped, but this instance lacks "
                        "instrumentation.  This occurs when the instance is created "
     def __reduce__(self):
         return self.__class__, (None, self.args[0])
 
-class ObjectDeletedError(sa.exc.InvalidRequestError):
+class ObjectDeletedError(sa_exc.InvalidRequestError):
     """A refresh operation failed to retrieve the database
     row corresponding to an object's known primary key identity.
     
             msg = "Instance '%s' has been deleted, or its "\
              "row is otherwise not present." % orm_util.state_str(state)
 
-        sa.exc.InvalidRequestError.__init__(self, msg)
+        sa_exc.InvalidRequestError.__init__(self, msg)
 
     def __reduce__(self):
         return self.__class__, (None, self.args[0])
 
-class UnmappedColumnError(sa.exc.InvalidRequestError):
+class UnmappedColumnError(sa_exc.InvalidRequestError):
     """Mapping operation was requested on an unknown column."""
 
 
-class NoResultFound(sa.exc.InvalidRequestError):
+class NoResultFound(sa_exc.InvalidRequestError):
     """A database result was required but none was found."""
 
 
-class MultipleResultsFound(sa.exc.InvalidRequestError):
+class MultipleResultsFound(sa_exc.InvalidRequestError):
     """A single database result was required but more than one were found."""
 
 
-# Legacy compat until 0.6.
-sa.exc.ConcurrentModificationError = ConcurrentModificationError
-sa.exc.FlushError = FlushError
-sa.exc.UnmappedColumnError
-
 def _safe_cls_name(cls):
     try:
         cls_name = '.'.join((cls.__module__, cls.__name__))
 
 def _default_unmapped(cls):
     try:
-        mappers = sa.orm.attributes.manager_of_class(cls).mappers
+        mappers = attributes.manager_of_class(cls).mappers
     except NO_STATE:
         mappers = {}
     except TypeError:

File lib/sqlalchemy/orm/identity.py

 # the MIT License: http://www.opensource.org/licenses/mit-license.php
 
 import weakref
-from sqlalchemy.orm import attributes
+from . import attributes
 
 
 class IdentityMap(dict):

File lib/sqlalchemy/orm/instrumentation.py

 """
 
 
-from sqlalchemy.orm import exc, collections, events
-from operator import attrgetter, itemgetter
-from sqlalchemy import event, util, inspection
+from . import exc, collections, events, state, attributes
+from operator import attrgetter
+from .. import event, util
 import weakref
-from sqlalchemy.orm import state, attributes
 
 
 INSTRUMENTATION_MANAGER = '__sa_instrumentation_manager__'

File lib/sqlalchemy/orm/interfaces.py

 classes within should be considered mostly private.
 
 """
-
+from __future__ import absolute_import
 from itertools import chain
 
-from sqlalchemy import exc as sa_exc
-from sqlalchemy import util
-from sqlalchemy.sql import operators
-deque = __import__('collections').deque
+from .. import exc as sa_exc, util
+from ..sql import operators
+from collections import deque
 
-mapperutil = util.importlater('sqlalchemy.orm', 'util')
-
-collections = None
+orm_util = util.importlater('sqlalchemy.orm', 'util')
+collections = util.importlater('sqlalchemy.orm', 'collections')
 
 __all__ = (
     'AttributeExtension',
 MANYTOONE = util.symbol('MANYTOONE')
 MANYTOMANY = util.symbol('MANYTOMANY')
 
-from deprecated_interfaces import AttributeExtension, SessionExtension, \
+from .deprecated_interfaces import AttributeExtension, SessionExtension, \
     MapperExtension
 
 
         self.__dict__ = state
 
     def _find_entity_prop_comparator(self, query, token, mapper, raiseerr):
-        if mapperutil._is_aliased_class(mapper):
+        if orm_util._is_aliased_class(mapper):
             searchfor = mapper
             isa = False
         else:
-            searchfor = mapperutil._class_to_mapper(mapper)
+            searchfor = orm_util._class_to_mapper(mapper)
             isa = True
         for ent in query._mapper_entities:
             if ent.corresponds_to(searchfor):
         Return a list of affected paths.
         
         """
-        path = mapperutil.PathRegistry.root
+        path = orm_util.PathRegistry.root
         entity = None
         paths = []
         no_result = []
 
             if getattr(token, '_of_type', None):
                 ac = token._of_type
-                ext_info = mapperutil._extended_entity_info(ac)
+                ext_info = orm_util._extended_entity_info(ac)
                 path_element = mapper = ext_info.mapper
                 if not ext_info.is_aliased_class:
-                    ac = mapperutil.with_polymorphic(
+                    ac = orm_util.with_polymorphic(
                                 ext_info.mapper.base_mapper, 
                                 ext_info.mapper, aliased=True)
-                    ext_info = mapperutil._extended_entity_info(ac)
+                    ext_info = orm_util._extended_entity_info(ac)
                 path.set(query, "path_with_polymorphic", ext_info)
             else:
                 path_element = mapper = getattr(prop, 'mapper', None)
         delattr(class_, key)
 
     def instrument_collection_class(self, class_, key, collection_class):
-        global collections
-        if collections is None:
-            from sqlalchemy.orm import collections
         return collections.prepare_instrumentation(collection_class)
 
     def get_instance_dict(self, class_, instance):

File lib/sqlalchemy/orm/loading.py

+# orm/loading.py
+# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""private module containing functions used to convert database
+rows into object instances and associated state.
+
+the functions here are called primarily by Query, Mapper, 
+as well as some of the attribute loading strategies.
+
+"""
+from __future__ import absolute_import
+
+from .. import util
+from . import attributes, exc as orm_exc
+from .interfaces import EXT_CONTINUE
+from ..sql import util as sql_util
+from .util import _none_set, state_str
+
+_new_runid = util.counter()
+
+def instances(query, cursor, context):
+    """Return an ORM result as an iterator."""
+    session = query.session
+
+    context.runid = _new_runid()
+
+    filter_fns = [ent.filter_fn
+                for ent in query._entities]
+    filtered = id in filter_fns
+
+    single_entity = filtered and len(query._entities) == 1
+
+    if filtered:
+        if single_entity:
+            filter_fn = id
+        else:
+            def filter_fn(row):
+                return tuple(fn(x) for x, fn in zip(row, filter_fns))
+
+    custom_rows = single_entity and \
+                    query._entities[0].mapper.dispatch.append_result
+
+    (process, labels) = \
+                zip(*[
+                    query_entity.row_processor(query, 
+                            context, custom_rows)
+                    for query_entity in query._entities
+                ])
+
+    while True:
+        context.progress = {}
+        context.partials = {}
+
+        if query._yield_per:
+            fetch = cursor.fetchmany(query._yield_per)
+            if not fetch:
+                break
+        else:
+            fetch = cursor.fetchall()
+
+        if custom_rows:
+            rows = []
+            for row in fetch:
+                process[0](row, rows)
+        elif single_entity:
+            rows = [process[0](row, None) for row in fetch]
+        else:
+            rows = [util.NamedTuple([proc(row, None) for proc in process],
+                                    labels) for row in fetch]
+
+        if filtered:
+            rows = util.unique_list(rows, filter_fn)
+
+        if context.refresh_state and query._only_load_props \
+                    and context.refresh_state in context.progress:
+            context.refresh_state.commit(
+                    context.refresh_state.dict, query._only_load_props)
+            context.progress.pop(context.refresh_state)
+
+        session._finalize_loaded(context.progress)
+
+        for ii, (dict_, attrs) in context.partials.iteritems():
+            ii.commit(dict_, attrs)
+
+        for row in rows:
+            yield row
+
+        if not query._yield_per:
+            break
+
+def merge_result(query, iterator, load=True):
+    """Merge a result into this :class:`.Query` object's Session."""
+
+    from . import query as querylib
+
+    session = query.session
+    if load:
+        # flush current contents if we expect to load data
+        session._autoflush()
+
+    autoflush = session.autoflush
+    try:
+        session.autoflush = False
+        single_entity = len(query._entities) == 1
+        if single_entity:
+            if isinstance(query._entities[0], querylib._MapperEntity):
+                result = [session._merge(
+                        attributes.instance_state(instance), 
+                        attributes.instance_dict(instance), 
+                        load=load, _recursive={})
+                        for instance in iterator]
+            else:
+                result = list(iterator)
+        else:
+            mapped_entities = [i for i, e in enumerate(query._entities) 
+                                    if isinstance(e, querylib._MapperEntity)]
+            result = []
+            for row in iterator:
+                newrow = list(row)
+                for i in mapped_entities:
+                    newrow[i] = session._merge(
+                            attributes.instance_state(newrow[i]), 
+                            attributes.instance_dict(newrow[i]), 
+                            load=load, _recursive={})
+                result.append(util.NamedTuple(newrow, row._labels))
+
+        return iter(result)
+    finally:
+        session.autoflush = autoflush
+
+def get_from_identity(session, key, passive):
+    """Look up the given key in the given session's identity map, 
+    check the object for expired state if found.
+
+    """
+    instance = session.identity_map.get(key)
+    if instance is not None:
+
+        state = attributes.instance_state(instance)
+
+        # expired - ensure it still exists
+        if state.expired:
+            if not passive & attributes.SQL_OK:
+                # TODO: no coverage here
+                return attributes.PASSIVE_NO_RESULT
+            elif not passive & attributes.RELATED_OBJECT_OK:
+                # this mode is used within a flush and the instance's
+                # expired state will be checked soon enough, if necessary
+                return instance
+            try:
+                state(passive)
+            except orm_exc.ObjectDeletedError:
+                session._remove_newly_deleted([state])
+                return None
+        return instance
+    else:
+        return None
+
+def load_on_ident(query, key, 
+                    refresh_state=None, lockmode=None,
+                        only_load_props=None):
+    """Load the given identity key from the database."""
+
+    lockmode = lockmode or query._lockmode
+
+    if key is not None:
+        ident = key[1]
+    else:
+        ident = None
+
+    if refresh_state is None:
+        q = query._clone()
+        q._get_condition()
+    else:
+        q = query._clone()
+
+    if ident is not None:
+        mapper = query._mapper_zero()
+
+        (_get_clause, _get_params) = mapper._get_clause
+
+        # None present in ident - turn those comparisons
+        # into "IS NULL"
+        if None in ident:
+            nones = set([
+                        _get_params[col].key for col, value in
+                         zip(mapper.primary_key, ident) if value is None
+                        ])
+            _get_clause = sql_util.adapt_criterion_to_null(
+                                            _get_clause, nones)
+
+        _get_clause = q._adapt_clause(_get_clause, True, False)
+        q._criterion = _get_clause
+
+        params = dict([
+            (_get_params[primary_key].key, id_val)
+            for id_val, primary_key in zip(ident, mapper.primary_key)
+        ])
+
+        q._params = params
+
+    if lockmode is not None:
+        q._lockmode = lockmode
+    q._get_options(
+        populate_existing=bool(refresh_state),
+        version_check=(lockmode is not None),
+        only_load_props=only_load_props,
+        refresh_state=refresh_state)
+    q._order_by = None
+
+    try:
+        return q.one()
+    except orm_exc.NoResultFound:
+        return None
+
+def instance_processor(mapper, context, path, adapter, 
+                            polymorphic_from=None, 
+                            only_load_props=None, 
+                            refresh_state=None,
+                            polymorphic_discriminator=None):
+
+    """Produce a mapper level row processor callable 
+       which processes rows into mapped instances."""
+
+    # note that this method, most of which exists in a closure
+    # called _instance(), resists being broken out, as 
+    # attempts to do so tend to add significant function
+    # call overhead.  _instance() is the most
+    # performance-critical section in the whole ORM.
+
+    pk_cols = mapper.primary_key
+
+    if polymorphic_from or refresh_state:
+        polymorphic_on = None
+    else:
+        if polymorphic_discriminator is not None:
+            polymorphic_on = polymorphic_discriminator
+        else:
+            polymorphic_on = mapper.polymorphic_on
+        polymorphic_instances = util.PopulateDict(
+                                    _configure_subclass_mapper(
+                                            mapper,
+                                            context, path, adapter)
+                                    )
+
+    version_id_col = mapper.version_id_col
+
+    if adapter:
+        pk_cols = [adapter.columns[c] for c in pk_cols]
+        if polymorphic_on is not None:
+            polymorphic_on = adapter.columns[polymorphic_on]
+        if version_id_col is not None:
+            version_id_col = adapter.columns[version_id_col]
+
+    identity_class = mapper._identity_class
+
+    new_populators = []
+    existing_populators = []
+    eager_populators = []
+    load_path = context.query._current_path + path \
+                if context.query._current_path.path \
+                else path
+
+    def populate_state(state, dict_, row, isnew, only_load_props):
+        if isnew:
+            if context.propagate_options:
+                state.load_options = context.propagate_options
+            if state.load_options:
+                state.load_path = load_path
+
+        if not new_populators:
+            _populators(mapper, context, path, row, adapter,
+                            new_populators,
+                            existing_populators,
+                            eager_populators
+            )
+
+        if isnew:
+            populators = new_populators
+        else:
+            populators = existing_populators
+
+        if only_load_props is None:
+            for key, populator in populators:
+                populator(state, dict_, row)
+        elif only_load_props:
+            for key, populator in populators:
+                if key in only_load_props:
+                    populator(state, dict_, row)
+
+    session_identity_map = context.session.identity_map
+
+    listeners = mapper.dispatch
+
+    translate_row = listeners.translate_row or None
+    create_instance = listeners.create_instance or None
+    populate_instance = listeners.populate_instance or None
+    append_result = listeners.append_result or None
+    populate_existing = context.populate_existing or mapper.always_refresh
+    invoke_all_eagers = context.invoke_all_eagers
+
+    if mapper.allow_partial_pks:
+        is_not_primary_key = _none_set.issuperset
+    else:
+        is_not_primary_key = _none_set.issubset
+
+    def _instance(row, result):
+        if not new_populators and invoke_all_eagers:
+            _populators(mapper, context, path, row, adapter,
+                            new_populators,
+                            existing_populators,
+                            eager_populators
+            )
+
+        if translate_row:
+            for fn in translate_row:
+                ret = fn(mapper, context, row)
+                if ret is not EXT_CONTINUE:
+                    row = ret
+                    break
+
+        if polymorphic_on is not None:
+            discriminator = row[polymorphic_on]
+            if discriminator is not None:
+                _instance = polymorphic_instances[discriminator]
+                if _instance:
+                    return _instance(row, result)
+
+        # determine identity key
+        if refresh_state:
+            identitykey = refresh_state.key
+            if identitykey is None:
+                # super-rare condition; a refresh is being called
+                # on a non-instance-key instance; this is meant to only
+                # occur within a flush()
+                identitykey = mapper._identity_key_from_state(refresh_state)
+        else:
+            identitykey = (
+                            identity_class, 
+                            tuple([row[column] for column in pk_cols])
+                        )
+
+        instance = session_identity_map.get(identitykey)
+        if instance is not None:
+            state = attributes.instance_state(instance)
+            dict_ = attributes.instance_dict(instance)
+
+            isnew = state.runid != context.runid
+            currentload = not isnew
+            loaded_instance = False
+
+            if not currentload and \
+                    version_id_col is not None and \
+                    context.version_check and \
+                    mapper._get_state_attr_by_column(
+                            state, 
+                            dict_, 
+                            mapper.version_id_col) != \
+                                    row[version_id_col]:
+
+                raise orm_exc.StaleDataError(
+                        "Instance '%s' has version id '%s' which "
+                        "does not match database-loaded version id '%s'." 
+                        % (state_str(state), 
+                            mapper._get_state_attr_by_column(
+                                        state, dict_,
+                                        mapper.version_id_col),
+                                row[version_id_col]))
+        elif refresh_state:
+            # out of band refresh_state detected (i.e. its not in the
+            # session.identity_map) honor it anyway.  this can happen 
+            # if a _get() occurs within save_obj(), such as
+            # when eager_defaults is True.
+            state = refresh_state
+            instance = state.obj()
+            dict_ = attributes.instance_dict(instance)
+            isnew = state.runid != context.runid
+            currentload = True
+            loaded_instance = False
+        else:
+            # check for non-NULL values in the primary key columns,
+            # else no entity is returned for the row
+            if is_not_primary_key(identitykey[1]):
+                return None
+
+            isnew = True
+            currentload = True
+            loaded_instance = True
+
+            if create_instance:
+                for fn in create_instance:
+                    instance = fn(mapper, context, 
+                                        row, mapper.class_)
+                    if instance is not EXT_CONTINUE:
+                        manager = attributes.manager_of_class(
+                                                instance.__class__)
+                        # TODO: if manager is None, raise a friendly error
+                        # about returning instances of unmapped types
+                        manager.setup_instance(instance)
+                        break
+                else:
+                    instance = mapper.class_manager.new_instance()
+            else:
+                instance = mapper.class_manager.new_instance()
+
+            dict_ = attributes.instance_dict(instance)
+            state = attributes.instance_state(instance)
+            state.key = identitykey
+
+            # attach instance to session.
+            state.session_id = context.session.hash_key
+            session_identity_map.add(state)
+
+        if currentload or populate_existing:
+            # state is being fully loaded, so populate.
+            # add to the "context.progress" collection.
+            if isnew:
+                state.runid = context.runid
+                context.progress[state] = dict_
+
+            if populate_instance:
+                for fn in populate_instance:
+                    ret = fn(mapper, context, row, state, 
+                        only_load_props=only_load_props, 
+                        instancekey=identitykey, isnew=isnew)
+                    if ret is not EXT_CONTINUE:
+                        break
+                else:
+                    populate_state(state, dict_, row, isnew, only_load_props)
+            else:
+                populate_state(state, dict_, row, isnew, only_load_props)
+
+            if loaded_instance:
+                state.manager.dispatch.load(state, context)
+            elif isnew:
+                state.manager.dispatch.refresh(state, context, only_load_props)
+
+        elif state in context.partials or state.unloaded or eager_populators:
+            # state is having a partial set of its attributes
+            # refreshed.  Populate those attributes,
+            # and add to the "context.partials" collection.
+            if state in context.partials:
+                isnew = False
+                (d_, attrs) = context.partials[state]
+            else:
+                isnew = True
+                attrs = state.unloaded
+                context.partials[state] = (dict_, attrs)
+
+            if populate_instance:
+                for fn in populate_instance:
+                    ret = fn(mapper, context, row, state, 
+                        only_load_props=attrs, 
+                        instancekey=identitykey, isnew=isnew)
+                    if ret is not EXT_CONTINUE:
+                        break
+                else:
+                    populate_state(state, dict_, row, isnew, attrs)
+            else:
+                populate_state(state, dict_, row, isnew, attrs)
+
+            for key, pop in eager_populators:
+                if key not in state.unloaded:
+                    pop(state, dict_, row)
+
+            if isnew:
+                state.manager.dispatch.refresh(state, context, attrs)
+
+
+        if result is not None:
+            if append_result:
+                for fn in append_result:
+                    if fn(mapper, context, row, state, 
+                                result, instancekey=identitykey,
+                                isnew=isnew) is not EXT_CONTINUE:
+                        break
+                else:
+                    result.append(instance)
+            else:
+                result.append(instance)
+
+        return instance
+    return _instance
+
+def _populators(mapper, context, path, row, adapter,
+        new_populators, existing_populators, eager_populators):
+    """Produce a collection of attribute level row processor 
+    callables."""
+
+    delayed_populators = []
+    pops = (new_populators, existing_populators, delayed_populators, 
+                        eager_populators)
+    for prop in mapper._props.itervalues():
+        for i, pop in enumerate(prop.create_row_processor(
+                                    context, path,
+                                    mapper, row, adapter)):
+            if pop is not None:
+                pops[i].append((prop.key, pop))
+
+    if delayed_populators:
+        new_populators.extend(delayed_populators)
+
+def _configure_subclass_mapper(mapper, context, path, adapter):
+    """Produce a mapper level row processor callable factory for mappers
+    inheriting this one."""
+
+    def configure_subclass_mapper(discriminator):
+        try:
+            sub_mapper = mapper.polymorphic_map[discriminator]
+        except KeyError:
+            raise AssertionError(
+                    "No such polymorphic_identity %r is defined" %
+                    discriminator)
+        if sub_mapper is mapper:
+            return None
+
+        # replace the tip of the path info with the subclass mapper 
+        # being used, that way accurate "load_path" info is available 
+        # for options invoked during deferred loads, e.g.
+        # query(Person).options(defer(Engineer.machines, Machine.name)).
+        # for AliasedClass paths, disregard this step (new in 0.8).
+        return instance_processor(
+                            sub_mapper,
+                            context, 
+                            path.parent[sub_mapper] 
+                                if not path.is_aliased_class 
+                                else path, 
+                            adapter,
+                            polymorphic_from=mapper)
+    return configure_subclass_mapper

File lib/sqlalchemy/orm/mapper.py

 available in :class:`~sqlalchemy.orm.`.
 
 """
-
+from __future__ import absolute_import
 import types
 import weakref
 import operator
 from itertools import chain, groupby
-deque = __import__('collections').deque
+from collections import deque
 
-from sqlalchemy import sql, util, log, exc as sa_exc, event, schema
-from sqlalchemy.sql import expression, visitors, operators, util as sqlutil
-from sqlalchemy.orm import instrumentation, attributes, sync, \
-                        exc as orm_exc, unitofwork, events
-from sqlalchemy.orm.interfaces import MapperProperty, EXT_CONTINUE, \
+from .. import sql, util, log, exc as sa_exc, event, schema
+from ..sql import expression, visitors, operators, util as sql_util
+from . import instrumentation, attributes, sync, \
+                        exc as orm_exc, unitofwork, events, loading
+from .interfaces import MapperProperty, EXT_CONTINUE, \
                                 PropComparator
 
-from sqlalchemy.orm.util import _INSTRUMENTOR, _class_to_mapper, \
+from .util import _INSTRUMENTOR, _class_to_mapper, \
      _state_mapper, class_mapper, instance_str, state_str,\
-     PathRegistry
-
+     PathRegistry, _none_set
 import sys
 sessionlib = util.importlater("sqlalchemy.orm", "session")
 properties = util.importlater("sqlalchemy.orm", "properties")
 _mapper_registry = weakref.WeakKeyDictionary()
 _new_mappers = False
 _already_compiling = False
-_none_set = frozenset([None])
 
 _memoized_configured_property = util.group_expirable_memoized_property()
 
                         # immediate table of the inherited mapper, not its
                         # full table which could pull in other stuff we dont
                         # want (allows test/inheritance.InheritTest4 to pass)
-                        self.inherit_condition = sqlutil.join_condition(
+                        self.inherit_condition = sql_util.join_condition(
                                                     self.inherits.local_table,
                                                     self.local_table)
                     self.mapped_table = sql.join(
                                                 self.inherit_condition)
 
                     fks = util.to_set(self.inherit_foreign_keys)
-                    self._inherits_equated_pairs = sqlutil.criterion_as_pairs(
+                    self._inherits_equated_pairs = sql_util.criterion_as_pairs(
                                                 self.mapped_table.onclause,
                                                 consider_as_foreign_keys=fks)
             else:
 
     def _configure_pks(self):
 
-        self.tables = sqlutil.find_tables(self.mapped_table)
+        self.tables = sql_util.find_tables(self.mapped_table)
 
         self._pks_by_table = {}
         self._cols_by_table = {}
             # determine primary key from argument or mapped_table pks - 
             # reduce to the minimal set of columns
             if self._primary_key_argument:
-                primary_key = sqlutil.reduce_columns(
+                primary_key = sql_util.reduce_columns(
                     [self.mapped_table.corresponding_column(c) for c in
                     self._primary_key_argument],
                     ignore_nonexistent_tables=True)
             else:
-                primary_key = sqlutil.reduce_columns(
+                primary_key = sql_util.reduce_columns(
                                 self._pks_by_table[self.mapped_table],
                                 ignore_nonexistent_tables=True)
 
             mappers = []
 
         if selectable is not None:
-            tables = set(sqlutil.find_tables(selectable,
+            tables = set(sql_util.find_tables(selectable,
                             include_aliases=True))
             mappers = [m for m in mappers if m.local_table in tables]
 
         if self.inherits and not self.concrete:
             statement = self._optimized_get_statement(state, attribute_names)
             if statement is not None:
-                result = session.query(self).from_statement(statement).\
-                                        _load_on_ident(None, 
-                                            only_load_props=attribute_names, 
-                                            refresh_state=state)
+                result = loading.load_on_ident(
+                            session.query(self).from_statement(statement),
+                                None, 
+                                only_load_props=attribute_names, 
+                                refresh_state=state
+                            )
 
         if result is False:
             if has_key:
                             % state_str(state))
                 return
 
-            result = session.query(self)._load_on_ident(
-                                                identity_key, 
-                                                refresh_state=state, 
-                                                only_load_props=attribute_names)
+            result = loading.load_on_ident(
+                        session.query(self),
+                                    identity_key, 
+                                    refresh_state=state, 
+                                    only_load_props=attribute_names)
 
         # if instance is pending, a refresh operation 
         # may not complete (even if PK attributes are assigned)
         props = self._props
 
         tables = set(chain(
-                        *[sqlutil.find_tables(c, check_columns=True) 
+                        *[sql_util.find_tables(c, check_columns=True) 
                         for key in attribute_names
                         for c in props[key].columns]
                     ))
             for t in mapper.tables:
                 table_to_mapper[t] = mapper
 
-        sorted_ = sqlutil.sort_tables(table_to_mapper.iterkeys())
+        sorted_ = sql_util.sort_tables(table_to_mapper.iterkeys())
         ret = util.OrderedDict()
         for t in sorted_:
             ret[t] = table_to_mapper[t]
 
         return result
 
-
-    def _instance_processor(self, context, path, adapter, 
-                                polymorphic_from=None, 
-                                only_load_props=None, refresh_state=None,
-                                polymorphic_discriminator=None):
-
-        """Produce a mapper level row processor callable 
-           which processes rows into mapped instances."""
-
-        # note that this method, most of which exists in a closure
-        # called _instance(), resists being broken out, as 
-        # attempts to do so tend to add significant function
-        # call overhead.  _instance() is the most
-        # performance-critical section in the whole ORM.
-
-        pk_cols = self.primary_key
-
-        if polymorphic_from or refresh_state:
-            polymorphic_on = None
-        else:
-            if polymorphic_discriminator is not None:
-                polymorphic_on = polymorphic_discriminator
-            else:
-                polymorphic_on = self.polymorphic_on
-            polymorphic_instances = util.PopulateDict(
-                                        self._configure_subclass_mapper(
-                                                context, path, adapter)
-                                        )
-
-        version_id_col = self.version_id_col
-
-        if adapter:
-            pk_cols = [adapter.columns[c] for c in pk_cols]
-            if polymorphic_on is not None:
-                polymorphic_on = adapter.columns[polymorphic_on]
-            if version_id_col is not None:
-                version_id_col = adapter.columns[version_id_col]
-
-        identity_class = self._identity_class
-
-        new_populators = []
-        existing_populators = []
-        eager_populators = []
-        load_path = context.query._current_path + path \
-                    if context.query._current_path.path \
-                    else path
-
-        def populate_state(state, dict_, row, isnew, only_load_props):
-            if isnew:
-                if context.propagate_options:
-                    state.load_options = context.propagate_options
-                if state.load_options:
-                    state.load_path = load_path
-
-            if not new_populators:
-                self._populators(context, path, row, adapter,
-                                new_populators,
-                                existing_populators,
-                                eager_populators
-                )
-
-            if isnew:
-                populators = new_populators
-            else:
-                populators = existing_populators
-
-            if only_load_props is None:
-                for key, populator in populators:
-                    populator(state, dict_, row)
-            elif only_load_props:
-                for key, populator in populators:
-                    if key in only_load_props:
-                        populator(state, dict_, row)
-
-        session_identity_map = context.session.identity_map
-
-        listeners = self.dispatch
-
-        translate_row = listeners.translate_row or None
-        create_instance = listeners.create_instance or None
-        populate_instance = listeners.populate_instance or None
-        append_result = listeners.append_result or None
-        populate_existing = context.populate_existing or self.always_refresh
-        invoke_all_eagers = context.invoke_all_eagers
-
-        if self.allow_partial_pks:
-            is_not_primary_key = _none_set.issuperset
-        else:
-            is_not_primary_key = _none_set.issubset
-
-        def _instance(row, result):
-            if not new_populators and invoke_all_eagers:
-                self._populators(context, path, row, adapter,
-                                new_populators,
-                                existing_populators,
-                                eager_populators
-                )
-
-            if translate_row:
-                for fn in translate_row:
-                    ret = fn(self, context, row)
-                    if ret is not EXT_CONTINUE:
-                        row = ret
-                        break
-
-            if polymorphic_on is not None:
-                discriminator = row[polymorphic_on]
-                if discriminator is not None:
-                    _instance = polymorphic_instances[discriminator]
-                    if _instance:
-                        return _instance(row, result)
-
-            # determine identity key
-            if refresh_state:
-                identitykey = refresh_state.key
-                if identitykey is None:
-                    # super-rare condition; a refresh is being called
-                    # on a non-instance-key instance; this is meant to only
-                    # occur within a flush()
-                    identitykey = self._identity_key_from_state(refresh_state)
-            else:
-                identitykey = (
-                                identity_class, 
-                                tuple([row[column] for column in pk_cols])
-                            )
-
-            instance = session_identity_map.get(identitykey)
-            if instance is not None:
-                state = attributes.instance_state(instance)
-                dict_ = attributes.instance_dict(instance)
-
-                isnew = state.runid != context.runid
-                currentload = not isnew
-                loaded_instance = False
-
-                if not currentload and \
-                        version_id_col is not None and \
-                        context.version_check and \
-                        self._get_state_attr_by_column(
-                                state, 
-                                dict_, 
-                                self.version_id_col) != \
-                                        row[version_id_col]:
-
-                    raise orm_exc.StaleDataError(
-                            "Instance '%s' has version id '%s' which "
-                            "does not match database-loaded version id '%s'." 
-                            % (state_str(state), 
-                                self._get_state_attr_by_column(
-                                            state, dict_,
-                                            self.version_id_col),
-                                    row[version_id_col]))
-            elif refresh_state:
-                # out of band refresh_state detected (i.e. its not in the
-                # session.identity_map) honor it anyway.  this can happen 
-                # if a _get() occurs within save_obj(), such as
-                # when eager_defaults is True.
-                state = refresh_state
-                instance = state.obj()
-                dict_ = attributes.instance_dict(instance)
-                isnew = state.runid != context.runid
-                currentload = True
-                loaded_instance = False
-            else:
-                # check for non-NULL values in the primary key columns,
-                # else no entity is returned for the row
-                if is_not_primary_key(identitykey[1]):
-                    return None
-
-                isnew = True
-                currentload = True
-                loaded_instance = True
-
-                if create_instance:
-                    for fn in create_instance:
-                        instance = fn(self, context, 
-                                            row, self.class_)
-                        if instance is not EXT_CONTINUE:
-                            manager = attributes.manager_of_class(
-                                                    instance.__class__)
-                            # TODO: if manager is None, raise a friendly error
-                            # about returning instances of unmapped types
-                            manager.setup_instance(instance)
-                            break
-                    else:
-                        instance = self.class_manager.new_instance()
-                else:
-                    instance = self.class_manager.new_instance()
-
-                dict_ = attributes.instance_dict(instance)
-                state = attributes.instance_state(instance)
-                state.key = identitykey
-
-                # attach instance to session.
-                state.session_id = context.session.hash_key
-                session_identity_map.add(state)
-
-            if currentload or populate_existing:
-                # state is being fully loaded, so populate.
-                # add to the "context.progress" collection.
-                if isnew:
-                    state.runid = context.runid
-                    context.progress[state] = dict_
-
-                if populate_instance:
-                    for fn in populate_instance:
-                        ret = fn(self, context, row, state, 
-                            only_load_props=only_load_props, 
-                            instancekey=identitykey, isnew=isnew)
-                        if ret is not EXT_CONTINUE:
-                            break
-                    else:
-                        populate_state(state, dict_, row, isnew, only_load_props)
-                else:
-                    populate_state(state, dict_, row, isnew, only_load_props)
-
-                if loaded_instance:
-                    state.manager.dispatch.load(state, context)
-                elif isnew:
-                    state.manager.dispatch.refresh(state, context, only_load_props)
-
-            elif state in context.partials or state.unloaded or eager_populators:
-                # state is having a partial set of its attributes
-                # refreshed.  Populate those attributes,
-                # and add to the "context.partials" collection.
-                if state in context.partials:
-                    isnew = False
-                    (d_, attrs) = context.partials[state]
-                else:
-                    isnew = True
-                    attrs = state.unloaded
-                    context.partials[state] = (dict_, attrs)
-
-                if populate_instance:
-                    for fn in populate_instance:
-                        ret = fn(self, context, row, state, 
-                            only_load_props=attrs, 
-                            instancekey=identitykey, isnew=isnew)
-                        if ret is not EXT_CONTINUE:
-                            break
-                    else:
-                        populate_state(state, dict_, row, isnew, attrs)
-                else:
-                    populate_state(state, dict_, row, isnew, attrs)
-
-                for key, pop in eager_populators:
-                    if key not in state.unloaded:
-                        pop(state, dict_, row)
-
-                if isnew:
-                    state.manager.dispatch.refresh(state, context, attrs)
-
-
-            if result is not None:
-                if append_result:
-                    for fn in append_result:
-                        if fn(self, context, row, state, 
-                                    result, instancekey=identitykey,
-                                    isnew=isnew) is not EXT_CONTINUE:
-                            break
-                    else:
-                        result.append(instance)
-                else:
-                    result.append(instance)
-
-            return instance
-        return _instance
-
-    def _populators(self, context, path, row, adapter,
-            new_populators, existing_populators, eager_populators):
-        """Produce a collection of attribute level row processor 
-        callables."""
-
-        delayed_populators = []
-        pops = (new_populators, existing_populators, delayed_populators, 
-                            eager_populators)
-        for prop in self._props.itervalues():
-            for i, pop in enumerate(prop.create_row_processor(
-                                        context, path,
-                                        self, row, adapter)):
-                if pop is not None:
-                    pops[i].append((prop.key, pop))
-
-        if delayed_populators:
-            new_populators.extend(delayed_populators)
-
-    def _configure_subclass_mapper(self, context, path, adapter):
-        """Produce a mapper level row processor callable factory for mappers
-        inheriting this one."""
-
-        def configure_subclass_mapper(discriminator):
-            try:
-                mapper = self.polymorphic_map[discriminator]
-            except KeyError:
-                raise AssertionError(
-                        "No such polymorphic_identity %r is defined" %
-                        discriminator)
-            if mapper is self:
-                return None
-
-            # replace the tip of the path info with the subclass mapper 
-            # being used, that way accurate "load_path" info is available 
-            # for options invoked during deferred loads, e.g.
-            # query(Person).options(defer(Engineer.machines, Machine.name)).
-            # for AliasedClass paths, disregard this step (new in 0.8).
-            return mapper._instance_processor(
-                                context, 
-                                path.parent[mapper] 
-                                    if not path.is_aliased_class 
-                                    else path, 
-                                adapter,
-                                polymorphic_from=self)
-        return configure_subclass_mapper
-
 log.class_logger(Mapper)
 
 def configure_mappers():

File lib/sqlalchemy/orm/persistence.py

 
 import operator
 from itertools import groupby
-
-from sqlalchemy import sql, util, exc as sa_exc, schema
-from sqlalchemy.orm import attributes, sync, \
-                        exc as orm_exc,\
-                        evaluator
-
-from sqlalchemy.orm.util import _state_mapper, state_str, _attr_as_key
-from sqlalchemy.sql import expression
+from .. import sql, util, exc as sa_exc, schema
+from . import attributes, sync, exc as orm_exc, evaluator
+from .util import _state_mapper, state_str, _attr_as_key
+from ..sql import expression
 
 def save_obj(base_mapper, states, uowtransaction, single=False):
     """Issue ``INSERT`` and/or ``UPDATE`` statements for a list 
                     params[col._label] = value
         if hasdata:
             if hasnull:
-                raise sa_exc.FlushError(
+                raise orm_exc.FlushError(
                             "Can't update table "
                             "using NULL for primary "
                             "key value")
                     mapper._get_state_attr_by_column(
                                     state, state_dict, col)
             if value is None:
-                raise sa_exc.FlushError(
+                raise orm_exc.FlushError(
                             "Can't delete from table "
                             "using NULL for primary "
                             "key value")
         # refresh whatever has been expired.
         if base_mapper.eager_defaults and state.unloaded:
             state.key = base_mapper._identity_key_from_state(state)
-            uowtransaction.session.query(base_mapper)._load_on_ident(
+            from . import loading
+            loading.load_on_ident(
+                uowtransaction.session.query(base_mapper),
                 state.key, refresh_state=state,
                 only_load_props=state.unloaded)
 

File lib/sqlalchemy/orm/properties.py

 
 """
 
-from sqlalchemy import sql, util, log, exc as sa_exc
-from sqlalchemy.sql.util import ClauseAdapter, criterion_as_pairs, \
-    join_condition, _shallow_annotate
-from sqlalchemy.sql import operators, expression, visitors
-from sqlalchemy.orm import attributes, dependency, mapper, \
-    object_mapper, strategies, configure_mappers, relationships
-from sqlalchemy.orm.util import CascadeOptions, _class_to_mapper, \
-    _orm_annotate, _orm_deannotate, _orm_full_deannotate,\
-    _entity_info
+from .. import sql, util, log, exc as sa_exc
+from ..sql import operators, expression
+from . import (
+    attributes, dependency, mapper, 
+    strategies, configure_mappers, relationships
+    )
+from .util import (
+    CascadeOptions, \
+        _orm_annotate, _orm_deannotate, _orm_full_deannotate,
+        _entity_info
+    )
 
-from sqlalchemy.orm.interfaces import MANYTOMANY, MANYTOONE, \
-    MapperProperty, ONETOMANY, PropComparator, StrategizedProperty
+from .interfaces import (
+    MANYTOMANY, MANYTOONE, MapperProperty, ONETOMANY, 
+    PropComparator, StrategizedProperty
+    )
 mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
 NoneType = type(None)
 
+from descriptor_props import CompositeProperty, SynonymProperty, \
+            ComparableProperty,ConcreteInheritedProperty
+
 __all__ = ('ColumnProperty', 'CompositeProperty', 'SynonymProperty',
            'ComparableProperty', 'RelationshipProperty', 'RelationProperty')
 
-from descriptor_props import CompositeProperty, SynonymProperty, \
-            ComparableProperty,ConcreteInheritedProperty
 
 class ColumnProperty(StrategizedProperty):
     """Describes an object attribute that corresponds to a table column.

File lib/sqlalchemy/orm/query.py

 """
 
 from itertools import chain
-from operator import itemgetter
-
-from sqlalchemy import sql, util, log, schema
-from sqlalchemy import exc as sa_exc
-from sqlalchemy.orm import exc as orm_exc
-from sqlalchemy.orm import persistence
-from sqlalchemy.sql import util as sql_util
-from sqlalchemy.sql import expression, visitors, operators
-from sqlalchemy.orm import (
-    attributes, interfaces, mapper, object_mapper, evaluator,
+
+from . import (
+    attributes, interfaces, object_mapper, persistence, 
+    exc as orm_exc, loading
     )
-from sqlalchemy.orm.util import (
+from .util import (
     AliasedClass, ORMAdapter, _entity_descriptor, _entity_info,
     _extended_entity_info, PathRegistry,
     _is_aliased_class, _is_mapped_class, _orm_columns, _orm_selectable,
-    join as orm_join,with_parent, _attr_as_key, aliased
+    join as orm_join,with_parent, aliased
     )
-
+from .. import sql, util, log, exc as sa_exc
+from ..sql import (
+        util as sql_util,
+        expression, visitors
+    )
 
 __all__ = ['Query', 'QueryContext', 'aliased']
 
     criteria and options associated with it.
 
     :class:`.Query` objects are normally initially generated using the 
-    :meth:`~.Session.query` method of :class:`.Session`.  For a full walkthrough 
-    of :class:`.Query` usage, see the :ref:`ormtutorial_toplevel`.
+    :meth:`~.Session.query` method of :class:`.Session`.  For a full 
+    walkthrough of :class:`.Query` usage, see the 
+    :ref:`ormtutorial_toplevel`.
 
     """
 
                         ext_info.mapper.with_polymorphic:
                         if ext_info.mapper.mapped_table not in \
                                             self._polymorphic_adapters:
-                            self._mapper_loads_polymorphically_with(ext_info.mapper, 
+                            self._mapper_loads_polymorphically_with(
+                                ext_info.mapper, 
                                 sql_util.ColumnAdapter(
-                                            ext_info.selectable, 
-                                            ext_info.mapper._equivalent_columns))
+                                        ext_info.selectable, 
+                                        ext_info.mapper._equivalent_columns
+                                )
+                            )
                         aliased_adapter = None
                     elif ext_info.is_aliased_class:
                         aliased_adapter = sql_util.ColumnAdapter(
-                                            ext_info.selectable, 
-                                            ext_info.mapper._equivalent_columns)
+                                    ext_info.selectable, 
+                                    ext_info.mapper._equivalent_columns
+                                    )
                     else:
                         aliased_adapter = None
 
         self._orm_only_adapt = False
 
     def _adapt_clause(self, clause, as_filter, orm_only):
-        """Adapt incoming clauses to transformations which have been applied 
-        within this query."""
+        """Adapt incoming clauses to transformations which 
+        have been applied within this query."""
 
         adapters = []
 
         if len(self._entities) > 1:
             raise sa_exc.InvalidRequestError(
                     rationale or 
-                    "This operation requires a Query against a single mapper."
+                    "This operation requires a Query "
+                    "against a single mapper."
                 )
         return self._mapper_zero()
 
         if len(self._entities) > 1:
             raise sa_exc.InvalidRequestError(
                     rationale or 
-                    "This operation requires a Query against a single mapper."
+                    "This operation requires a Query "
+                    "against a single mapper."
                 )
         return self._entity_zero()
 
                 "Query.%s() being called on a Query which already has LIMIT "
                 "or OFFSET applied. To modify the row-limited results of a "
                 " Query, call from_self() first.  "
-                "Otherwise, call %s() before limit() or offset() are applied."
+                "Otherwise, call %s() before limit() or offset() "
+                "are applied."
                 % (meth, meth)
             )
 
         return stmt._annotate({'no_replacement_traverse': True})
 
     def subquery(self, name=None):
-        """return the full SELECT statement represented by this :class:`.Query`, 
-        embedded within an :class:`.Alias`.
+        """return the full SELECT statement represented by 
+        this :class:`.Query`, embedded within an :class:`.Alias`.
 
         Eager JOIN generation within the query is disabled.
 
         return self.enable_eagerloads(False).statement.alias(name=name)
 
     def cte(self, name=None, recursive=False):
-        """Return the full SELECT statement represented by this :class:`.Query`
-        represented as a common table expression (CTE).
+        """Return the full SELECT statement represented by this 
+        :class:`.Query` represented as a common table expression (CTE).
 
         .. versionadded:: 0.7.6
 
         further details.
 
         Here is the `Postgresql WITH 
-        RECURSIVE example <http://www.postgresql.org/docs/8.4/static/queries-with.html>`_.
-        Note that, in this example, the ``included_parts`` cte and the ``incl_alias`` alias
-        of it are Core selectables, which
-        means the columns are accessed via the ``.c.`` attribute.  The ``parts_alias``
-        object is an :func:`.orm.aliased` instance of the ``Part`` entity, so column-mapped
-        attributes are available directly::
+        RECURSIVE example 
+        <http://www.postgresql.org/docs/8.4/static/queries-with.html>`_.
+        Note that, in this example, the ``included_parts`` cte and the 
+        ``incl_alias`` alias of it are Core selectables, which
+        means the columns are accessed via the ``.c.`` attribute.  The 
+        ``parts_alias`` object is an :func:`.orm.aliased` instance of the 
+        ``Part`` entity, so column-mapped attributes are available 
+        directly::
 
             from sqlalchemy.orm import aliased
 
                 quantity = Column(Integer)
 
             included_parts = session.query(
-                                Part.sub_part, 
-                                Part.part, 
-                                Part.quantity).\\
-                                    filter(Part.part=="our part").\\
-                                    cte(name="included_parts", recursive=True)
+                            Part.sub_part, 
+                            Part.part, 
+                            Part.quantity).\\
+                                filter(Part.part=="our part").\\
+                                cte(name="included_parts", recursive=True)
 
             incl_alias = aliased(included_parts, name="pr")
             parts_alias = aliased(Part, name="p")
 
             q = session.query(
                     included_parts.c.sub_part,
-                    func.sum(included_parts.c.quantity).label('total_quantity')
+                    func.sum(included_parts.c.quantity).
+                        label('total_quantity')
                 ).\\
                 group_by(included_parts.c.sub_part)
 
         :meth:`._SelectBase.cte`
 
         """
-        return self.enable_eagerloads(False).statement.cte(name=name, recursive=recursive)
+        return self.enable_eagerloads(False).\
+            statement.cte(name=name, recursive=recursive)
 
     def label(self, name):
-        """Return the full SELECT statement represented by this :class:`.Query`, converted 
+        """Return the full SELECT statement represented by this 
+        :class:`.Query`, converted 
         to a scalar subquery with a label of the given name.
 
         Analogous to :meth:`sqlalchemy.sql._SelectBaseMixin.label`.
                 not mapper.always_refresh and \
                 self._lockmode is None:
 
-            instance = self._get_from_identity(self.session, key, attributes.PASSIVE_OFF)
+            instance = loading.get_from_identity(self.session, key, attributes.PASSIVE_OFF)
             if instance is not None:
                 # reject calls for id in identity map but class
                 # mismatch.
                     return None
                 return instance
 
-        return self._load_on_ident(key)
+        return loading.load_on_ident(self, key)
 
     @_generative()
     def correlate(self, *args):
             for u in session.query(User).instances(result):
                 print u
         """
-        session = self.session
-
         context = __context
         if context is None:
             context = QueryContext(self)
 
-        context.runid = _new_runid()
-
-        filter_fns = [ent.filter_fn
-                    for ent in self._entities]
-        filtered = id in filter_fns
-
-        single_entity = filtered and len(self._entities) == 1
-
-        if filtered:
-            if single_entity:
-                filter_fn = id
-            else:
-                def filter_fn(row):
-                    return tuple(fn(x) for x, fn in zip(row, filter_fns))
-
-        custom_rows = single_entity and \
-                        self._entities[0].mapper.dispatch.append_result
-
-        (process, labels) = \
-                    zip(*[
-                        query_entity.row_processor(self, context, custom_rows)
-                        for query_entity in self._entities
-                    ])
-
-
-        while True:
-            context.progress = {}
-            context.partials = {}
-
-            if self._yield_per:
-                fetch = cursor.fetchmany(self._yield_per)
-                if not fetch:
-                    break
-            else:
-                fetch = cursor.fetchall()
-
-            if custom_rows:
-                rows = []
-                for row in fetch:
-                    process[0](row, rows)
-            elif single_entity:
-                rows = [process[0](row, None) for row in fetch]
-            else:
-                rows = [util.NamedTuple([proc(row, None) for proc in process],
-                                        labels) for row in fetch]
-
-            if filtered:
-                rows = util.unique_list(rows, filter_fn)
-
-            if context.refresh_state and self._only_load_props \
-                        and context.refresh_state in context.progress:
-                context.refresh_state.commit(
-                        context.refresh_state.dict, self._only_load_props)
-                context.progress.pop(context.refresh_state)
-
-            session._finalize_loaded(context.progress)
-
-            for ii, (dict_, attrs) in context.partials.iteritems():
-                ii.commit(dict_, attrs)
-
-            for row in rows:
-                yield row